php separate
This commit is contained in:
21
TODO.md
Normal file
21
TODO.md
Normal file
@@ -0,0 +1,21 @@
|
||||
TODO:
|
||||
|
||||
Downstream scripts need to be updated to use the `newcountyoutages` table instead of the old `countyoutages` table.
|
||||
|
||||
Key changes:
|
||||
1. Table name is now `newcountyoutages`.
|
||||
2. The `update` column has been renamed to `fetch_time`.
|
||||
|
||||
Affected scripts:
|
||||
- `powersum.py`: Needs to be updated to query `newcountyoutages` and use `fetch_time`.
|
||||
- `powercounty.py`: This script is deprecated and should be removed. All its functionality is replaced by `newpower.py`.
|
||||
|
||||
---
|
||||
|
||||
The point-based scraper `power2.py` has been refactored into `newpower2.py`.
|
||||
|
||||
Key changes:
|
||||
1. The `power` table is now deprecated in favor of the `newpower` table.
|
||||
2. The `start` column has been renamed to `start_time`.
|
||||
3. The `lastchange` column has been renamed to `last_change`.
|
||||
4. A new `fetch_time` column has been added to track when the record was scraped.
|
||||
138
main.php
Normal file
138
main.php
Normal file
@@ -0,0 +1,138 @@
|
||||
<?php
|
||||
/**
|
||||
* Unified API Gateway
|
||||
*
|
||||
* This file serves as a single entry point for all API services.
|
||||
* Each route is identified by the 'service' parameter.
|
||||
*
|
||||
* Available routes:
|
||||
* - service=cams: Returns active camera information
|
||||
* - service=camapi: Returns camera API endpoints with filtering
|
||||
* - service=camlist: Returns camera list with active status
|
||||
* - service=admin: Admin operations for cameras
|
||||
* - service=camcircle: Returns camera coverage circles
|
||||
* - service=db: Returns weather station data
|
||||
* - service=fire: Returns fire information
|
||||
* - service=individualcam: Returns individual camera images
|
||||
* - service=lsr: Returns local storm reports
|
||||
* - service=nws: Returns NWS personnel stats
|
||||
* - service=powerapi: Returns power outage information
|
||||
* - service=searchapi: Returns search results for power outages
|
||||
* - service=ohgo: Returns Ohio traffic information
|
||||
* - service=power: Returns power outage information
|
||||
* - service=stormdata: Returns storm data
|
||||
* - service=warntrack: Returns warning tracking data
|
||||
* - service=ver: Returns version information
|
||||
* - service=update_field: Updates table fields
|
||||
* - service=mp4: Returns MP4 video information
|
||||
* - service=camobs: Returns camera observations with radius and bbox filtering
|
||||
* - service=single: Returns single camera information by camid
|
||||
* - service=powerapitest: Returns extended power outage testing data
|
||||
*
|
||||
* Migration instructions:
|
||||
* To migrate from the old scripts to the new unified API:
|
||||
*
|
||||
* Old endpoint: cam.php
|
||||
* New endpoint: main.php?service=cams
|
||||
*
|
||||
* Old endpoint: camapi.php?cams&lat1=...&lon1=...&lat2=...&lon2=...
|
||||
* New endpoint: main.php?service=camapi&cams&lat1=...&lon1=...&lat2=...&lon2=...
|
||||
*
|
||||
* Old endpoint: camlist.php
|
||||
* New endpoint: main.php?service=camlist
|
||||
*
|
||||
* Old endpoint: admin.php?action=...
|
||||
* New endpoint: main.php?service=admin&action=...
|
||||
*
|
||||
* Old endpoint: camcircle.php
|
||||
* New endpoint: main.php?service=camcircle
|
||||
*
|
||||
* Old endpoint: camobs.php
|
||||
* New endpoint: main.php?service=camobs
|
||||
*
|
||||
* Old endpoint: single.php?camid=...
|
||||
* New endpoint: main.php?service=single&camid=...
|
||||
*
|
||||
* Old endpoint: powerapitest.php
|
||||
* New endpoint: main.php?service=powerapitest
|
||||
*
|
||||
* Old endpoint: db.php
|
||||
* New endpoint: main.php?service=db
|
||||
*
|
||||
* Old endpoint: db.php?outside
|
||||
* New endpoint: main.php?service=db&outside
|
||||
*
|
||||
* Old endpoint: fire.php
|
||||
* New endpoint: main.php?service=fire
|
||||
*
|
||||
* Old endpoint: individualcam.php?camid=...
|
||||
* New endpoint: main.php?service=individualcam&camid=...
|
||||
*
|
||||
* Old endpoint: lsr.php
|
||||
* New endpoint: main.php?service=lsr
|
||||
*
|
||||
* Old endpoint: lsr.php?ohgo
|
||||
* New endpoint: main.php?service=lsr&ohgo
|
||||
*
|
||||
* Old endpoint: nws.php
|
||||
* New endpoint: main.php?service=nws
|
||||
*
|
||||
* Old endpoint: nws.php?officestats
|
||||
* New endpoint: main.php?service=nws&officestats
|
||||
*
|
||||
* Old endpoint: powerapi.php
|
||||
* New endpoint: main.php?service=powerapi
|
||||
*
|
||||
* Old endpoint: powerapi.php?states
|
||||
* New endpoint: main.php?service=powerapi&states
|
||||
*
|
||||
* Old endpoint: searchapi.php
|
||||
* New endpoint: main.php?service=searchapi
|
||||
*
|
||||
* Old endpoint: ohgo.php
|
||||
* New endpoint: main.php?service=ohgo
|
||||
*
|
||||
* Old endpoint: power.php
|
||||
* New endpoint: main.php?service=power
|
||||
*
|
||||
* Old endpoint: stormdata.php
|
||||
* New endpoint: main.php?service=stormdata
|
||||
*
|
||||
* Old endpoint: warntrack.php
|
||||
* New endpoint: main.php?service=warntrack
|
||||
*
|
||||
* Old endpoint: ver.php
|
||||
* New endpoint: main.php?service=ver
|
||||
*
|
||||
* Old endpoint: update_field.php?table=...&field=...&value=...&where=...
|
||||
* New endpoint: main.php?service=update_field&table=...&field=...&value=...&where=...
|
||||
*
|
||||
* Old endpoint: mp4.php?camid=...
|
||||
* New endpoint: main.php?service=mp4&camid=...
|
||||
*/
|
||||
|
||||
// Get the service parameter to determine which function to execute
|
||||
$service = $_GET['service'] ?? 'default';
|
||||
|
||||
// Route to the appropriate service file
|
||||
$serviceFile = __DIR__ . '/php/' . $service . '.php';
|
||||
|
||||
if (file_exists($serviceFile)) {
|
||||
require_once $serviceFile;
|
||||
} else {
|
||||
// Default behavior - show available services if no valid service provided
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode([
|
||||
'error' => 'Invalid service parameter. Please provide a valid service.',
|
||||
'available_services' => [
|
||||
'cams', 'camapi', 'camlist', 'admin', 'camcircle',
|
||||
'db', 'fire', 'individualcam', 'lsr', 'nws',
|
||||
'powerapi', 'searchapi', 'ohgo', 'power',
|
||||
'stormdata', 'warntrack', 'ver', 'update_field', 'mp4',
|
||||
'camobs', 'single', 'powerapitest'
|
||||
],
|
||||
'documentation' => 'See main.php file for detailed documentation on each service.'
|
||||
]);
|
||||
}
|
||||
?>
|
||||
44
php/admin.php
Normal file
44
php/admin.php
Normal file
@@ -0,0 +1,44 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
$action = $_GET['action'];
|
||||
|
||||
if ($action == 'checkurl' ) {
|
||||
$url = $_POST['url'];
|
||||
$query = "SELECT exists (SELECT 1 FROM cams WHERE url = '{$url}')";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if ($action == 'newcam' ) {
|
||||
$url = $_POST['url'];
|
||||
$lat = $_POST['lat'];
|
||||
$lon = $_POST['lon'];
|
||||
$desc = $_POST['description'];
|
||||
$method = $_POST['method'];
|
||||
$permission = $_POST['permission'];
|
||||
$owner = $_POST['owner'];
|
||||
$email = $_POST['email'];
|
||||
|
||||
$query = "INSERT into cams (url,lat,lon,description,interval,method,active,permission,owner,email,keephours) values ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)";
|
||||
$result = pg_query_params($query, Array($url,$lat,$lon,$desc,'10',$method,'t',$permission,$owner,$email,'240')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$status = pg_result_status($result);
|
||||
echo json_encode($status);
|
||||
pg_free_result($result);
|
||||
|
||||
shell_exec('python3 /var/www/html/work/runallgeom.py');
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
80
php/camapi.php
Normal file
80
php/camapi.php
Normal file
@@ -0,0 +1,80 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//cams endpoint
|
||||
if (isset($_GET['cams'])) {
|
||||
if($_GET['lat1']) {
|
||||
$lat1 = getParam('lat1');
|
||||
if($_GET['lon1']) {
|
||||
$lon1 = getParam('lon1');
|
||||
if($_GET['lat2']) {
|
||||
$lat2 = getParam('lat2');
|
||||
if($_GET['lon2']) {
|
||||
$lon2 = getParam('lon2');
|
||||
if($_GET['elevbottom']) {
|
||||
$elevbottom = getParam('elevbottom');
|
||||
if($_GET['elevtop']) {
|
||||
$elevtop = getParam('elevtop');
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"select camid,url,description from cams where method = 'rtsp' and active = true and cwa = 'RLX' and elevation > $5 and elevation < $6 and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat < $1 and lat > $2 and lon < $3 and lon > $4 order by elevation desc",
|
||||
array($lat1,$lat2,$lon1,$lon2,$elevbottom,$elevtop)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// camstatic endpoint
|
||||
if (isset($_GET['camstatic'])) {
|
||||
if($_GET['lat1']) {
|
||||
$lat1 = getParam('lat1');
|
||||
if($_GET['lon1']) {
|
||||
$lon1 = getParam('lon1');
|
||||
if($_GET['radius']) {
|
||||
$radius = getParam('radius');
|
||||
$rad = $radius / 70;
|
||||
|
||||
$lat1 = floatval($lat1);
|
||||
$lon1 = floatval($lon1);
|
||||
$radius = floatval($rad);
|
||||
$query = "select * from cams where method = 'rtsp' and active = true and cwa = 'RLX' and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and st_dwithin(geom, ST_SetSRID(ST_Point(" . strval($lon1) . ", " . strval($lat1) . "), 4326)," . strval($radius) . ") order by elevation desc";
|
||||
|
||||
$result = pg_query($dbconn,$query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// camdb endpoint
|
||||
if (isset($_GET['camdb'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT COUNT(*) FROM camdb") or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
36
php/camcircle.php
Normal file
36
php/camcircle.php
Normal file
@@ -0,0 +1,36 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Performing SQL query
|
||||
$query = "WITH subquery_points AS (
|
||||
SELECT geom AS point_geometry
|
||||
FROM cams
|
||||
WHERE active = true and lastsuccess IS NOT NULL AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < 60
|
||||
)
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(ST_Buffer(point_geometry::geography, 8000))::jsonb
|
||||
))
|
||||
) AS feature_collection
|
||||
FROM subquery_points";
|
||||
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
print_r($array[0]['feature_collection']);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
57
php/camlist.php
Normal file
57
php/camlist.php
Normal file
@@ -0,0 +1,57 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
|
||||
// Initialize response array
|
||||
$response = [];
|
||||
|
||||
// Database connection
|
||||
try {
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT url, lat, lon, elevation, county, state, active, aspect, bloomsky, source, method FROM cams where active = true";
|
||||
$result = pg_query($dbconn, $query);
|
||||
|
||||
if (!$result) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
// Fetch results
|
||||
$data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// Ensure numeric values are properly typed
|
||||
$line['lat'] = floatval($line['lat']);
|
||||
$line['lon'] = floatval($line['lon']);
|
||||
$line['elevation'] = floatval($line['elevation']);
|
||||
$line['active'] = $line['active'] === 't' ? true : false; // Convert PostgreSQL boolean
|
||||
|
||||
$data[] = $line;
|
||||
}
|
||||
|
||||
$response = [
|
||||
'status' => 'success',
|
||||
'data' => $data,
|
||||
'count' => count($data)
|
||||
];
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
$response = [
|
||||
'status' => 'error',
|
||||
'message' => $e->getMessage()
|
||||
];
|
||||
}
|
||||
|
||||
// Output JSON
|
||||
echo json_encode($response, JSON_PRETTY_PRINT | JSON_NUMERIC_CHECK);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
72
php/camobs.php
Normal file
72
php/camobs.php
Normal file
@@ -0,0 +1,72 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
if($_GET['camstatic'] ?? null) {
|
||||
if ($_GET['camstatic'] == 'radius') {
|
||||
if($_GET['lat1'] ?? null) {
|
||||
$lat1 = getParam('lat1');
|
||||
if($_GET['lon1'] ?? null) {
|
||||
$lon1 = getParam('lon1');
|
||||
if($_GET['radius'] ?? null) {
|
||||
$radius = getParam('radius');
|
||||
$rad = $radius / 70;
|
||||
|
||||
$lat1 = floatval($lat1);
|
||||
$lon1 = floatval($lon1);
|
||||
$radius = floatval($rad);
|
||||
$query = "select * from cams where active = true and cwa = 'RLX' and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and st_dwithin(geom, ST_SetSRID(ST_Point(" . strval($lon1) . ", " . strval($lat1) . "), 4326)," . strval($radius) . ") order by elevation desc";
|
||||
$result = pg_query($dbconn,$query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if($_GET['camstatic'] ?? null) {
|
||||
if ($_GET['camstatic'] == 'bbox') {
|
||||
if($_GET['lat1'] ?? null) {
|
||||
$lat1 = getParam('lat1');
|
||||
if($_GET['lon1'] ?? null) {
|
||||
$lon1 = getParam('lon1');
|
||||
if($_GET['lat2'] ?? null) {
|
||||
$lat2 = getParam('lat2');
|
||||
if($_GET['lon2'] ?? null) {
|
||||
$lon2 = getParam('lon2');
|
||||
if($_GET['elevbottom'] ?? null) {
|
||||
$elevbottom = getParam('elevbottom');
|
||||
if($_GET['elevtop'] ?? null) {
|
||||
$elevtop = getParam('elevtop');
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"select * from cams where active = true and cwa = 'RLX' and elevation > $5 and elevation < $6 and (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) and lat < $1 and lat > $2 and lon < $3 and lon > $4 order by elevation desc",
|
||||
array($lat1,$lat2,$lon1,$lon2,$elevbottom,$elevtop)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
28
php/cams.php
Normal file
28
php/cams.php
Normal file
@@ -0,0 +1,28 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT cwa,lat,lon,lastimage,county,elevation,camid,state,description,hydro,airport FROM cams WHERE active <> false AND lastsuccess IS NOT NULL AND (EXTRACT(EPOCH FROM (current_timestamp - lastsuccess ))/60) < (interval + 20) order by elevation desc";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// Ensure hydro is a proper boolean
|
||||
$line['hydro'] = ($line['hydro'] === 't' || $line['hydro'] === true);
|
||||
// Ensure airport is a proper boolean
|
||||
$line['airport'] = ($line['airport'] === 't' || $line['airport'] === true);
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
396
php/common.php
Normal file
396
php/common.php
Normal file
@@ -0,0 +1,396 @@
|
||||
<?php
|
||||
// Database connection function
|
||||
function getDBConnection() {
|
||||
static $dbconn = null;
|
||||
if ($dbconn === null) {
|
||||
$dbconn = pg_connect("host=localhost dbname=nws user=nws password=nws")
|
||||
or die('Could not connect: ' . pg_last_error());
|
||||
}
|
||||
return $dbconn;
|
||||
}
|
||||
|
||||
// Function to safely get and escape string parameters
|
||||
function getParam($key, $default = null) {
|
||||
$value = $_GET[$key] ?? $default;
|
||||
if ($value !== null) {
|
||||
return pg_escape_string($value);
|
||||
}
|
||||
return $value;
|
||||
}
|
||||
|
||||
// Function to safely get and validate integer parameters
|
||||
function getIntParam($key, $default = null) {
|
||||
$value = $_GET[$key] ?? $default;
|
||||
if ($value !== null) {
|
||||
return (int)$value;
|
||||
}
|
||||
return $value;
|
||||
}
|
||||
|
||||
// Helper functions for stormdata service
|
||||
function send_error(int $http_code, string $message, ?string $log_message = null): void {
|
||||
if ($log_message) { error_log($log_message); }
|
||||
elseif ($http_code >= 500) { error_log("Server Error (" . $http_code . "): " . $message); }
|
||||
http_response_code($http_code);
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode(['error' => $message]);
|
||||
exit;
|
||||
}
|
||||
|
||||
function send_geojson(array $features): void {
|
||||
$geojson_output = ['type' => 'FeatureCollection', 'features' => $features];
|
||||
header('Content-Type: application/geo+json; charset=utf-8');
|
||||
echo json_encode($geojson_output);
|
||||
exit;
|
||||
}
|
||||
|
||||
function handle_cam_request($dbconn, array $data): void {
|
||||
error_log("Handling 'camera image' request.");
|
||||
|
||||
$start_time_str = $data['start_time'] ?? null;
|
||||
$end_time_str = $data['end_time'] ?? null;
|
||||
$geojson_str = $data['area_geojson'] ?? null;
|
||||
|
||||
if ($start_time_str === null || $end_time_str === null || $geojson_str === null) {
|
||||
send_error(400, 'Missing required parameters for camera request: start_time, end_time, area_geojson');
|
||||
}
|
||||
|
||||
if (strtotime($start_time_str) === false) {
|
||||
send_error(400, 'Invalid start_time format.');
|
||||
}
|
||||
if (strtotime($end_time_str) === false) {
|
||||
send_error(400, 'Invalid end_time format.');
|
||||
}
|
||||
|
||||
$geojson_obj = json_decode($geojson_str);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
send_error(400, 'Invalid area_geojson provided: Contains invalid JSON string.', 'GeoJSON Decode Error: ' . json_last_error_msg());
|
||||
}
|
||||
if (!is_object($geojson_obj) || !isset($geojson_obj->type) || !in_array($geojson_obj->type, ['Polygon', 'MultiPolygon'])) {
|
||||
send_error(400, 'Invalid area_geojson provided: Decoded JSON must be a Polygon or MultiPolygon object.');
|
||||
}
|
||||
|
||||
$query = "
|
||||
SELECT
|
||||
c.*,
|
||||
ST_AsGeoJSON(c.geom) as geometry_geojson,
|
||||
COALESCE(img_agg.images, '[]'::jsonb) AS images
|
||||
FROM
|
||||
cams c
|
||||
LEFT JOIN (
|
||||
SELECT
|
||||
camid,
|
||||
jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'timestamp', dateutc,
|
||||
'url', filepath -- Assuming filepath is the relative URL path
|
||||
) ORDER BY dateutc ASC -- Order images chronologically
|
||||
) AS images
|
||||
FROM
|
||||
camdb
|
||||
WHERE
|
||||
dateutc >= $1::timestamp -- start_time
|
||||
AND dateutc <= $2::timestamp -- end_time
|
||||
GROUP BY
|
||||
camid
|
||||
) AS img_agg ON c.camid = img_agg.camid
|
||||
WHERE
|
||||
c.active = TRUE -- Only active cameras
|
||||
AND ST_Within(c.geom, ST_GeomFromGeoJSON($3)) -- Camera location within area
|
||||
ORDER BY
|
||||
c.camid; -- Optional: Order cameras by ID
|
||||
";
|
||||
|
||||
$params = array(
|
||||
$start_time_str, // $1: start_time
|
||||
$end_time_str, // $2: end_time
|
||||
$geojson_str // $3: area_geojson string
|
||||
);
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
|
||||
if (!$result) {
|
||||
send_error(500, 'Database query failed for camera data.', 'Camera Query Failed: ' . pg_last_error($dbconn) . " | Query: " . $query . " | Params: " . print_r($params, true));
|
||||
}
|
||||
|
||||
$cameras_output = [];
|
||||
while ($row = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($row['geometry_geojson']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode geometry for camid ' . ($row['camid'] ?? 'N/A') . ': ' . json_last_error_msg());
|
||||
$geometry = null;
|
||||
}
|
||||
|
||||
$images = json_decode($row['images']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode images JSON for camid ' . ($row['camid'] ?? 'N/A') . ': ' . json_last_error_msg());
|
||||
$images = [];
|
||||
}
|
||||
|
||||
$camera_data = $row;
|
||||
unset($camera_data['geometry_geojson']);
|
||||
unset($camera_data['geom']);
|
||||
$camera_data['geometry'] = $geometry;
|
||||
$camera_data['images'] = $images;
|
||||
|
||||
$cameras_output[] = $camera_data;
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($cameras_output) . " cameras matching criteria.");
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($cameras_output, JSON_PRETTY_PRINT | JSON_UNESCAPED_SLASHES);
|
||||
exit;
|
||||
}
|
||||
|
||||
function handle_wu_request_poly($dbconn, array $data): void {
|
||||
$polygons = $data['polygons'] ?? [];
|
||||
$start_time = $data['start_time'] ?? '2025-01-01 00:00:00';
|
||||
$end_time = $data['end_time'] ?? '2025-01-02 00:00:00';
|
||||
|
||||
if (empty($polygons)) {
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'No polygons provided']);
|
||||
exit;
|
||||
}
|
||||
|
||||
$polygon_placeholders = [];
|
||||
$params = [];
|
||||
$param_index = 1;
|
||||
|
||||
foreach ($polygons as $polygon) {
|
||||
$polygon_placeholders[] = "ST_GeomFromText(\$$param_index, 4326)";
|
||||
$params[] = $polygon;
|
||||
$param_index++;
|
||||
}
|
||||
|
||||
$params[] = $start_time;
|
||||
$params[] = $end_time;
|
||||
$start_time_placeholder = "\$$param_index";
|
||||
$param_index++;
|
||||
$end_time_placeholder = "\$$param_index";
|
||||
|
||||
$polygon_sql = implode(', ', $polygon_placeholders);
|
||||
|
||||
$sql = "
|
||||
SELECT wo.*
|
||||
FROM wuobs wo
|
||||
JOIN wusites ws ON wo.stationid = ws.stationid
|
||||
WHERE ws.geom && ST_Union(ARRAY[$polygon_sql])::geometry
|
||||
AND ST_Within(ws.geom, ST_Union(ARRAY[$polygon_sql])::geometry)
|
||||
AND wo.observation_time BETWEEN $start_time_placeholder AND $end_time_placeholder
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $sql, $params);
|
||||
|
||||
if ($result === false) {
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => pg_last_error($dbconn)]);
|
||||
exit;
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($row = pg_fetch_assoc($result)) {
|
||||
$results[] = $row;
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
}
|
||||
|
||||
function handle_ohgo_request($dbconn, array $data): void {
|
||||
error_log("Handling 'ohgo' request.");
|
||||
$start = $data['start_time'] ?? null;
|
||||
$geojson_str = $data['area_geojson'] ?? null;
|
||||
$end = $data['end_time'] ?? null;
|
||||
|
||||
if ($start === null || $geojson_str === null || $end === null) {
|
||||
send_error(400, 'Missing required parameters for ohgo request: start, geojson, end');
|
||||
}
|
||||
|
||||
$geojson_obj = json_decode($geojson_str);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
send_error(400, 'Invalid GeoJSON provided: Not valid JSON.', 'GeoJSON Decode Error: ' . json_last_error_msg());
|
||||
}
|
||||
if (!isset($geojson_obj->type) || !in_array($geojson_obj->type, ['Polygon', 'MultiPolygon'])) {
|
||||
send_error(400, 'Invalid GeoJSON provided: Type must be Polygon or MultiPolygon.');
|
||||
}
|
||||
|
||||
|
||||
$query = "SELECT ST_AsGeoJSON(geom) AS geometry, category, roadstatus, county, state, location, routename, description, start AS start_timestamp, endtime AS end_timestamp, lastupdate FROM ohgo WHERE start > $1::timestamp AND start < $3::timestamp AND ST_Within(geom, ST_GeomFromGeoJSON($2)) ORDER BY start ASC";
|
||||
$params = array($start, $geojson_str, $end);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) {
|
||||
send_error(500, 'Database query failed for ohgo data.', 'OHGO Query Failed: ' . pg_last_error($dbconn));
|
||||
}
|
||||
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode geometry for ohgo row: ' . json_last_error_msg());
|
||||
continue;
|
||||
}
|
||||
$properties = $line;
|
||||
unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for ohgo request.");
|
||||
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
function handle_power_request($dbconn, array $data): void {
|
||||
error_log("Handling 'power' request.");
|
||||
$start = $data['start_time'] ?? null;
|
||||
$geojson_str = $data['area_geojson'] ?? null;
|
||||
$end = $data['end_time'] ?? null;
|
||||
$buffer_hours = $data['buffer'] ?? 0;
|
||||
|
||||
if ($start === null || $geojson_str === null || $end === null || $buffer_hours === null) {
|
||||
send_error(400, 'Missing required parameters for power request: start_time, area_geojson, end_time, buffer_hours');
|
||||
}
|
||||
if (!is_numeric($buffer_hours) || ($buffer_hours_float = floatval($buffer_hours)) < 0) {
|
||||
send_error(400, 'Invalid buffer_hours provided: Must be a non-negative number.');
|
||||
}
|
||||
$buffer_hours_int = (int)$buffer_hours_float;
|
||||
$geojson_obj = json_decode($geojson_str);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
send_error(400, 'Invalid area_geojson provided: Contains invalid JSON string.', 'GeoJSON Decode Error: ' . json_last_error_msg());
|
||||
}
|
||||
if (!is_object($geojson_obj) || !isset($geojson_obj->type) || !in_array($geojson_obj->type, ['Polygon', 'MultiPolygon'])) {
|
||||
send_error(400, 'Invalid area_geojson provided: Decoded JSON must be a Polygon or MultiPolygon object.');
|
||||
}
|
||||
|
||||
$query = "SELECT ST_AsGeoJSON(realgeom) AS geometry, derivedstart AS start_timestamp, cause, peakoutage, lastchange AS end_timestamp FROM power WHERE derivedstart >= $1::timestamp AND derivedstart < ($3::timestamp + make_interval(hours => $4::integer)) AND ST_Within(realgeom, ST_GeomFromGeoJSON($2)) ORDER BY derivedstart ASC";
|
||||
$params = array(
|
||||
$start, // $1: start_time from JSON
|
||||
$geojson_str, // $2: area_geojson STRING from JSON
|
||||
$end, // $3: end_time from JSON
|
||||
$buffer_hours_int // $4: buffer_hours from JSON (as integer)
|
||||
);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) {
|
||||
send_error(500, 'Database query failed for power data.', 'Power Query Failed: ' . pg_last_error($dbconn) . " | Query: " . $query . " | Params: " . print_r($params, true));
|
||||
}
|
||||
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode geometry for power row: ' . json_last_error_msg());
|
||||
continue;
|
||||
}
|
||||
$properties = $line;
|
||||
unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for power request.");
|
||||
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
function handle_ohgo_request_no_poly($dbconn, array $data): void {
|
||||
error_log("Handling 'ohgo' request no poly.");
|
||||
$start = $data['start_time'] ?? null;
|
||||
$end = $data['end_time'] ?? null;
|
||||
|
||||
if ($start === null || $end === null) {
|
||||
send_error(400, 'Missing required parameters for ohgo request: start, end');
|
||||
}
|
||||
|
||||
$query = "SELECT ST_AsGeoJSON(geom) AS geometry, county, state AS st, location, routename AS city, upper(cwa) AS wfo, 'FLOOD' AS typetext, 'Department of Highways' AS source, description AS remark,
|
||||
TO_CHAR(start, 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"') AS valid
|
||||
FROM ohgo
|
||||
WHERE start > $1::timestamp
|
||||
AND start < $2::timestamp
|
||||
AND cwa = 'RLX'
|
||||
ORDER BY start ASC";
|
||||
$params = array($start, $end);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) {
|
||||
send_error(500, 'Database query failed for ohgo data.', 'OHGO Query Failed: ' . pg_last_error($dbconn));
|
||||
}
|
||||
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode geometry for ohgo row: ' . json_last_error_msg());
|
||||
continue;
|
||||
}
|
||||
$properties = $line;
|
||||
unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for ohgo request.");
|
||||
|
||||
send_geojson($features);
|
||||
}
|
||||
|
||||
function handle_power_request_no_poly($dbconn, array $data): void {
|
||||
error_log("Handling 'power' request no poly.");
|
||||
$start = $data['start_time'] ?? null;
|
||||
$end = $data['end_time'] ?? null;
|
||||
$outage_threshold = $data['outage_threshold'] ?? 9;
|
||||
$buffer_hours = $data['buffer'] ?? 0;
|
||||
|
||||
if ($start === null || $end === null || $buffer_hours === null) {
|
||||
send_error(400, 'Missing required parameters for power request: start_time, end_time, buffer_hours');
|
||||
}
|
||||
if (!is_numeric($buffer_hours) || ($buffer_hours_float = floatval($buffer_hours)) < 0) {
|
||||
send_error(400, 'Invalid buffer_hours provided: Must be a non-negative number.');
|
||||
}
|
||||
$buffer_hours_int = (int)$buffer_hours_float;
|
||||
$outage_thresh = (float)$outage_threshold;
|
||||
|
||||
$query = "SELECT ST_AsGeoJSON(realgeom) AS geometry,
|
||||
TO_CHAR(derivedstart, 'YYYY-MM-DD\"T\"HH24:MI:SS\"Z\"') AS valid,
|
||||
('Power Outage affecting ' || peakoutage || ' customers caused by ' || COALESCE(cause, 'unknown')) AS remark,
|
||||
'Utility Company' as source,
|
||||
'POWER OUTAGE' as typetext,
|
||||
'U' as type,
|
||||
(ROUND(ST_Y(realgeom)::numeric, 3) || ', ' || ROUND(ST_X(realgeom)::numeric, 3)) AS city,
|
||||
county as county,
|
||||
state as state,
|
||||
state as st
|
||||
FROM power
|
||||
WHERE derivedstart >= $1::timestamp
|
||||
AND derivedstart < ($2::timestamp + make_interval(hours => $3::integer))
|
||||
and peakoutage > $4
|
||||
AND ST_Within(realgeom, (SELECT geom FROM public.cwa WHERE cwa = 'RLX'))
|
||||
ORDER BY derivedstart ASC";
|
||||
|
||||
$params = array(
|
||||
$start, // $1: start_time from JSON
|
||||
$end, // $2: end_time from JSON
|
||||
$buffer_hours_int, // $3: buffer_hours from JSON (as integer)
|
||||
$outage_thresh // $4
|
||||
);
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
if (!$result) {
|
||||
send_error(500, 'Database query failed for power data.', 'Power Query Failed: ' . pg_last_error($dbconn) . " | Query: " . $query . " | Params: " . print_r($params, true));
|
||||
}
|
||||
|
||||
$features = [];
|
||||
while ($line = pg_fetch_assoc($result)) {
|
||||
$geometry = json_decode($line['geometry']);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
error_log('Failed to decode geometry for power row: ' . json_last_error_msg());
|
||||
continue;
|
||||
}
|
||||
$properties = $line;
|
||||
unset($properties['geometry']);
|
||||
$features[] = ['type' => 'Feature', 'geometry' => $geometry, 'properties' => $properties];
|
||||
}
|
||||
pg_free_result($result);
|
||||
error_log("Found " . count($features) . " features for power request.");
|
||||
|
||||
send_geojson($features);
|
||||
}
|
||||
?>
|
||||
150
php/db.php
Normal file
150
php/db.php
Normal file
@@ -0,0 +1,150 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, current point outage info
|
||||
if (empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
stationid,
|
||||
lat,
|
||||
lon,
|
||||
tempf,
|
||||
dewpt,
|
||||
preciptotal,
|
||||
winddir,
|
||||
windspd,
|
||||
windgust,
|
||||
elev,
|
||||
adm1,
|
||||
adm2,
|
||||
neighborhood,
|
||||
maxt,
|
||||
mint,
|
||||
pressure,
|
||||
lastob,
|
||||
county,
|
||||
rain24,
|
||||
rain3,
|
||||
rain6,
|
||||
windmax,
|
||||
cwa
|
||||
FROM (
|
||||
SELECT DISTINCT ON (stationid) *
|
||||
FROM wusites
|
||||
WHERE active = TRUE
|
||||
AND cwa = 'RLX'
|
||||
AND lastob BETWEEN timezone('utc', NOW()) - INTERVAL '0.5 hours'
|
||||
AND timezone('utc', NOW())
|
||||
) p
|
||||
ORDER BY lastob DESC
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['outsideold'])) {
|
||||
$query = "SELECT stationid, lat, lon, tempf, dewpt,preciptotal,winddir,windspd,windgust,elev,adm1,adm2,neighborhood,maxt,mint,pressure,lastob,county,rain24,rain3,rain6,windmax,cwa FROM (SELECT DISTINCT ON (stationid) * FROM wusites WHERE (active = TRUE) and lastob BETWEEN timezone('utc', now()) - INTERVAL '.5 HOURS'AND timezone('utc', now())) p ORDER BY lastob desc;";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['outside'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
stationid,
|
||||
lat,
|
||||
lon,
|
||||
tempf,
|
||||
dewpt,
|
||||
preciptotal,
|
||||
winddir,
|
||||
windspd,
|
||||
windgust,
|
||||
elev,
|
||||
adm1,
|
||||
adm2,
|
||||
neighborhood,
|
||||
maxt,
|
||||
mint,
|
||||
pressure,
|
||||
lastob,
|
||||
county,
|
||||
rain24,
|
||||
rain3,
|
||||
rain6,
|
||||
windmax,
|
||||
cwa
|
||||
FROM (
|
||||
SELECT DISTINCT ON (stationid) *
|
||||
FROM wusites
|
||||
WHERE active = TRUE
|
||||
AND lastob BETWEEN timezone('utc', NOW()) - INTERVAL '0.5 hours'
|
||||
AND timezone('utc', NOW())
|
||||
) p
|
||||
ORDER BY lastob DESC
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
19
php/fire.php
Normal file
19
php/fire.php
Normal file
@@ -0,0 +1,19 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, current point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(geom)::json,'properties',json_build_object('incname',incname,'discovery',discovery,'modified',modified,'age',age,'dailyacres',dailyacres,'type',type,'contained',contained,'personnel',personnel))order by modified asc)) FROM fire where type = $1 and contained <> 100 and modified > now() - interval '36 hours'",
|
||||
array('WF')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
45
php/individualcam.php
Normal file
45
php/individualcam.php
Normal file
@@ -0,0 +1,45 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
$camid = getParam('camid');
|
||||
|
||||
if(getParam('dtg')){
|
||||
$endtime = getParam('dtg');
|
||||
if(isset($_GET['camimages'])){
|
||||
$camimages = $_GET['camimages'];
|
||||
}
|
||||
if(!isset($_GET['camimages'])){
|
||||
$camimages = 20;
|
||||
}
|
||||
|
||||
$query = "SELECT camid, filepath, date_trunc('second', dateutc) as dateutc FROM camdb where camid = '{$camid}' and dateutc < '{$endtime}' order by dateutc desc limit '{$camimages}'";
|
||||
}
|
||||
|
||||
$camimages = 20;
|
||||
|
||||
//if(!isset($_GET['dtg'])) {
|
||||
if (!isset($_GET['dtg'])) {
|
||||
if(isset($_GET['camimages'])){
|
||||
$camimages = $_GET['camimages'];
|
||||
}
|
||||
|
||||
$query = "SELECT camid, filepath, date_trunc('second', dateutc) as dateutc FROM camdb where camid = '{$camid}' order by dateutc desc limit '{$camimages}'";
|
||||
}
|
||||
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
769
php/lsr.php
Normal file
769
php/lsr.php
Normal file
@@ -0,0 +1,769 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
if (isset($_GET['ohgo'])) {
|
||||
$query = "SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'lat', lat,
|
||||
'lon', lon,
|
||||
'description', description,
|
||||
'roadstatus', roadstatus,
|
||||
'start', start,
|
||||
'lastupdate', lastupdate
|
||||
)
|
||||
)
|
||||
)
|
||||
) as geojson
|
||||
FROM public.ohgo
|
||||
WHERE endtime IS NULL
|
||||
AND lastupdate > NOW() - INTERVAL '2 hours'";
|
||||
|
||||
// Prepare and execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
if (!$result) {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query failed: ' . pg_last_error()]);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['geojson'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['geojson']; // Direct output since it's already JSON from jsonb_build_object
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'No results found']);
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['ohgotable'])) {
|
||||
// Performing SQL query
|
||||
$query = "SELECT CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
id, category, roadstatus, cwa, county, state, location, routename,
|
||||
description,
|
||||
TO_CHAR(start, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(endtime, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(lastupdate, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM ohgo
|
||||
WHERE (endtime IS NULL OR endtime > NOW() - INTERVAL '48 hours') and start > now() - interval '144 hours'
|
||||
ORDER BY start ASC
|
||||
";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in JSON
|
||||
$array = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['lsrohgo'])) {
|
||||
if ($_GET['lsrohgo'] == 'true') {
|
||||
$lsrflag = 'true';
|
||||
} else {
|
||||
$lsrflag = 'false';
|
||||
}
|
||||
$id = (int) $_GET['id'];
|
||||
$query = "UPDATE ohgo SET lsr = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($lsrflag, $id)) or die('Query failed: ' . pg_last_error());
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['ohgohide'])) {
|
||||
if ($_GET['ohgohide'] == 'true') {
|
||||
$lsrflag = 'true';
|
||||
} else {
|
||||
$lsrflag = 'false';
|
||||
}
|
||||
$id = (int) $_GET['id'];
|
||||
$query = "UPDATE ohgo SET hide = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($lsrflag, $id)) or die('Query failed: ' . pg_last_error());
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//take vtec, return start, end, polygon, outages in polygon, outages in buffer, warntype, polygon pop
|
||||
if (isset($_GET['vtec'])) {
|
||||
$vtec = $_GET['vtec'];
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'id', warnindex,
|
||||
'issue', issue,
|
||||
'endtime', endtime,
|
||||
'warntype', warntype,
|
||||
'issue', issue,
|
||||
'outagesvalid', outagesvalid,
|
||||
'outagesbuffer', outagesbuffer,
|
||||
'polygonpop', polygonpop,
|
||||
'lat', st_y(st_centroid(nwspoly)),
|
||||
'lon', st_x(st_centroid(nwspoly)),
|
||||
'vtec', vtec
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE vtec = $1;
|
||||
";
|
||||
|
||||
// Prepare and execute the query using pg_query_params
|
||||
$result = pg_query_params($dbconn, $query, array($vtec))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Output the JSON object
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
// Free result
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//Get reports pre-flagged with the vtec
|
||||
if (isset($_GET['preflagreports'])) {
|
||||
$vtec = $_GET['preflagreports'];
|
||||
$query = "SELECT * from reports WHERE severe = $1";
|
||||
$result = pg_query_params($dbconn, $query, array($vtec)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//Get reports within polygon for arbitary time after end time
|
||||
if (isset($_GET['reports'])) {
|
||||
$vtec = $_GET['reports'];
|
||||
if (isset($_GET['hours'])) {
|
||||
$hours = $_GET['hours'];
|
||||
} else {
|
||||
$hours = 6;
|
||||
}
|
||||
|
||||
//echo $hours;
|
||||
|
||||
$query = "SELECT * from reports,svr where ST_Contains(svr.nwspoly, reports.geom) and vtec = $1 and reports.initialdtg AT TIME ZONE 'America/New_York' > svr.issue AND reports.initialdtg AT TIME ZONE 'America/New_York' < svr.issue + (INTERVAL '1 h' * $2)";
|
||||
$result = pg_query_params($dbconn, $query, array($vtec,$hours)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//Get point power outages within polygon + arb time, default 60 minutes
|
||||
if (isset($_GET['outages'])) {
|
||||
$vtec = $_GET['outages'];
|
||||
if (isset($_GET['hours'])) {
|
||||
$hours = $_GET['hours'];
|
||||
} else {
|
||||
$hours = 1;
|
||||
}
|
||||
|
||||
//echo $hours;
|
||||
|
||||
$query = "SELECT power.lat,power.lon,power.peakoutage,power.cause,power.derivedstart,power.lastchange from power,svr where ST_Contains(svr.nwspoly, power.realgeom) and vtec = $1 and derivedstart > svr.issue AND derivedstart < svr.issue + (INTERVAL '1 h' * $2)";
|
||||
$result = pg_query_params($dbconn, $query, array($vtec,$hours)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//no gets, current point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(geom)::json,'properties',json_build_object('id',id,'time',initialdtg,'county',county,'state',state,'issue',issue,'rawemail',rawemail,'place',place,'comments',comments)) order by initialdtg desc)) FROM reports where initialdtg > $1 ",
|
||||
array('2024-06-07')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//For real time mapping
|
||||
if(isset($_GET['verify'])) {
|
||||
$query = "select id, lat::Numeric(16,3), lon::Numeric(16,3),issue,to_char(initialdtg, 'yyyy/mm/dd hh24:mi') as initialdtg,rawemail,concat(county,' ',state,'\n',place) as place,comments,lsr::text,severe from reports where severe is not null and processed = true order by initialdtg desc";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//no gets, current point outage info
|
||||
if (isset($_GET['rtcad'])) {
|
||||
if (isset($_GET['hours'])) {
|
||||
$hours = $_GET['hours'];
|
||||
} else {
|
||||
$hours = 6;
|
||||
}
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', utcinitialdtg,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'issue', issue,
|
||||
'rawemail', rawemail,
|
||||
'place', place,
|
||||
'comments', comments
|
||||
)
|
||||
) ORDER BY initialdtg DESC
|
||||
)
|
||||
)
|
||||
FROM reports
|
||||
WHERE lat is not null and utcinitialdtg >= NOW() - INTERVAL '1 hour' * $1;
|
||||
";
|
||||
|
||||
// Prepare and execute the query using pg_query_params
|
||||
$result = pg_query_params($dbconn, $query, array($hours))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Output the JSON object
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
|
||||
// Free result
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//Stats
|
||||
if (isset($_GET['stats'])) {
|
||||
$query = "SELECT county, state, MAX(emailtime), count(*) FROM reports where county is not null and (state = 'WV' or state = 'VA' or state = 'KY' or state = 'OH') GROUP BY county, state";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
//Get METAR Array for Jelly Bean
|
||||
if (isset($_GET['metars'])) {
|
||||
if (isset($_GET['start'])) {
|
||||
$start = $_GET['start'];
|
||||
}
|
||||
if (isset($_GET['end'])) {
|
||||
$end = $_GET['end'];
|
||||
}
|
||||
|
||||
$query = "SELECT icao,temp,dewp,wx,precip1,precip3,precip6,raw,obtime,stationname,lat,lon from metars where obtime - interval '45 minutes' > $1 and obtime < $2 order by lon asc";
|
||||
$result = pg_query_params($dbconn, $query, array($start,$end)) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
} //echo($array);
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['news'])) {
|
||||
//$query = "SELECT headline, summary, imageurl, source, storylink, updated from news where notrelevant is not true";
|
||||
$query = "
|
||||
SELECT
|
||||
*,
|
||||
CASE
|
||||
WHEN concat(summary, ' ', headline) ILIKE ANY (ARRAY[
|
||||
'%weather%', '%flood%', '%fire%', '%fog%', '%snow%', '%emergency%'
|
||||
'%wind%', '%ice%', '%rain%', '%power%', '%explosion%',
|
||||
'%drown%', '%stream%', '%river%', '%air%', '%wind%',
|
||||
'%river%', '%ice%', '%creek%', '%crash%', '%thunder%',
|
||||
'%fog%', '%spill%', '%pileup%', '%pile-up%', '%gust%',
|
||||
'%fatal%', '%injury%', '%sleet%', '%injured%', '%frost%',
|
||||
'%culvert%', '%slippery%', '%wildfire%', '%tornado%',
|
||||
'%thunderstorm%', '%downburst%', '%microburst%', '%crash%', '%heatstroke%', '%derecho%'
|
||||
'%lightning%', '%hypothermia%', '%slide%', '%flow%', '%ski%', '%water%', '%innundation%'
|
||||
]) THEN 2
|
||||
WHEN concat(summary, ' ', headline) ILIKE ANY (ARRAY[
|
||||
'%legislative%','%history%','%budget%','%birthday%','%banning%','%academic%','%tuna%','%Service Forecast%', '%DOGE%','%demonstrators%','%forum%','%health%','%fraud%','%birthday%', '%egg%', '%eggs%', '%collector%', '%church%', ' %crypto%'
|
||||
]) THEN 0
|
||||
ELSE 1
|
||||
END AS relevance_level
|
||||
FROM news WHERE timeutc > NOW() - INTERVAL '18 hours'
|
||||
ORDER BY relevance_level DESC, timeutc DESC
|
||||
";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['news2'])) {
|
||||
$query = "
|
||||
SELECT
|
||||
*,
|
||||
(SELECT COUNT(*)
|
||||
FROM unnest(ARRAY[
|
||||
'weather', 'flood', 'fire', 'fog', 'snow', 'emergency',
|
||||
'wind', 'ice', 'rain', 'power', 'explosion', 'warmer', 'colder',
|
||||
'drown', 'stream', 'river', 'air', 'wind', 'destroyed', 'rime', 'glaze',
|
||||
'river', 'ice', 'creek', 'crash', 'thunder', 'spinup', 'black ice', 'aircraft',
|
||||
'fog', 'spill', 'pileup', 'pile-up', 'gust', 'frozen', 'funnel', 'rainfall',
|
||||
'fatal', 'injury', 'sleet', 'injured', 'frost', 'dead', 'death', 'landslide',
|
||||
'culvert', 'slippery', 'wildfire', 'tornado', 'blizzard', 'creek', 'hail',
|
||||
'thunderstorm', 'downburst', 'microburst', 'crash', 'heatstroke', 'derecho',
|
||||
'lightning', 'hypothermia', 'slide', 'flow', 'ski', 'water', 'inundation', 'victim',
|
||||
'victims', 'flooding','flooded','snowing','freezing rain','clouds','cloud','storm'
|
||||
]) AS pattern
|
||||
WHERE concat(summary, ' ', headline) ~* ('\y' || pattern || '\y')) AS match_count
|
||||
FROM news
|
||||
WHERE timeutc > NOW() - INTERVAL '18 hours'
|
||||
ORDER BY nlpscore DESC, timeutc DESC
|
||||
";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['news3old'])) {
|
||||
$query = "
|
||||
SELECT * FROM news WHERE (timeutc > NOW() - INTERVAL '24 hours' and nlpscore > 0.1) or (timeutc > NOW() - INTERVAL '6 hours') ORDER BY nlpscore DESC, timeutc DESC";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['news3'])) {
|
||||
// Corrected query with NULLS LAST
|
||||
$query = "
|
||||
SELECT * FROM news
|
||||
WHERE (timeutc > NOW() - INTERVAL '24 hours' AND impact_score > 25)
|
||||
OR (timeutc > NOW() - INTERVAL '6 hours') ORDER BY impact_score DESC NULLS LAST, timeutc DESC limit 50";
|
||||
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = []; // It's good practice to initialize the array
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// --- FIXED SECTION: newsarchive ---
|
||||
if (isset($_GET['newsarchive'])) {
|
||||
// Initialize variables
|
||||
$start = isset($_GET['start']) ? $_GET['start'] : null;
|
||||
$end = isset($_GET['end']) ? $_GET['end'] : null;
|
||||
$keys = isset($_GET['key']) ? $_GET['key'] : [];
|
||||
// Convert keys to an array if it's a string
|
||||
if (is_string($keys)) {
|
||||
$keys = explode(',', $keys);
|
||||
}
|
||||
|
||||
$patterns = array_map(function($term) {
|
||||
return trim($term);
|
||||
}, $keys);
|
||||
|
||||
// Handle case with no search terms
|
||||
if (empty($patterns)) {
|
||||
$query = "SELECT * FROM news";
|
||||
$params = [];
|
||||
} else {
|
||||
// Build parameter placeholders
|
||||
$placeholders = [];
|
||||
for ($i = 1; $i <= count($patterns); $i++) {
|
||||
// FIXED: Use concatenation to create "$1::text"
|
||||
$placeholders[] = '$' . $i . '::text';
|
||||
}
|
||||
$placeholder_string = implode(',', $placeholders);
|
||||
|
||||
$query = "
|
||||
SELECT
|
||||
n.*,
|
||||
(
|
||||
SELECT COUNT(*)
|
||||
FROM unnest(ARRAY[{$placeholder_string}]::text[]) AS pattern
|
||||
WHERE concat(n.summary, ' ', n.headline) ILIKE pattern
|
||||
) AS match_count
|
||||
FROM news n
|
||||
WHERE concat(summary, ' ', headline) ILIKE ANY (ARRAY[{$placeholder_string}]::text[])
|
||||
";
|
||||
$params = array_map(function($term) { return "%{$term}%"; }, $patterns);
|
||||
}
|
||||
|
||||
// Add date filters if provided
|
||||
$param_count = count($patterns);
|
||||
if ($start) {
|
||||
$param_count++;
|
||||
// FIXED: Use concatenation instead of deprecated interpolation
|
||||
$query .= " AND timeutc >= $" . $param_count;
|
||||
$params[] = $start;
|
||||
}
|
||||
if ($end) {
|
||||
$param_count++;
|
||||
// FIXED: Use concatenation instead of deprecated interpolation
|
||||
$query .= " AND timeutc <= $" . $param_count;
|
||||
$params[] = $end;
|
||||
}
|
||||
|
||||
$query .= " ORDER BY match_count DESC, timeutc desc";
|
||||
|
||||
// Execute query
|
||||
$result = pg_query_params($dbconn, $query, $params) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
if (isset($_GET['wv511'])) {
|
||||
$query = "SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'type', name,
|
||||
'reported', first_seen,
|
||||
'end', last_seen_in_feed,
|
||||
'county', county,
|
||||
'state', st,
|
||||
'remark', latest_description,
|
||||
'lat', st_y(st_centroid(geom)),
|
||||
'lon', st_x(st_centroid(geom))
|
||||
)
|
||||
)
|
||||
)
|
||||
) as geojson
|
||||
FROM public.wv511
|
||||
WHERE last_updated > NOW() - INTERVAL '2 hours'";
|
||||
|
||||
// Prepare and execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
if (!$result) {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query failed: ' . pg_last_error()]);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['geojson'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['geojson']; // Direct output since it's already JSON from jsonb_build_object
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'No results found']);
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['ky511'])) {
|
||||
$query = "SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(geom)::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'reported', first_seen,
|
||||
'end', last_seen_in_feed,
|
||||
'county', county,
|
||||
'state', st,
|
||||
'remark', latest_description,
|
||||
'lat', st_y(st_centroid(geom)),
|
||||
'lon', st_x(st_centroid(geom))
|
||||
)
|
||||
)
|
||||
)
|
||||
) as geojson
|
||||
FROM ky511.ky511
|
||||
WHERE last_updated > NOW() - INTERVAL '2 hours'";
|
||||
|
||||
// Prepare and execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
if (!$result) {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query failed: ' . pg_last_error()]);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch the result
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['geojson'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['geojson']; // Direct output since it's already JSON from jsonb_build_object
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'No results found']);
|
||||
}
|
||||
|
||||
// Free result and close connection
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['getCombinedTable'])) {
|
||||
// Combined SQL query using UNION ALL with CAST for 'id'
|
||||
$query = "
|
||||
SELECT * FROM (
|
||||
-- OHGO Query
|
||||
SELECT
|
||||
'ohgo' AS source,
|
||||
CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
CAST(id AS TEXT) AS id, -- Cast id to TEXT
|
||||
category,
|
||||
roadstatus,
|
||||
cwa,
|
||||
county,
|
||||
state,
|
||||
location,
|
||||
routename,
|
||||
description,
|
||||
TO_CHAR(start, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(endtime, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(lastupdate, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM ohgo
|
||||
WHERE (endtime IS NULL OR endtime > NOW() - INTERVAL '24 hours') AND start > now() - interval '144 hours'
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- WV511 Query
|
||||
SELECT
|
||||
'wv511' AS source,
|
||||
CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
CAST(id AS TEXT) AS id, -- Cast id to TEXT
|
||||
wv511.name as category,
|
||||
NULL AS roadstatus,
|
||||
cwa,
|
||||
county,
|
||||
st as state,
|
||||
'Map Link' AS location,
|
||||
NULL AS routename,
|
||||
latest_description as description,
|
||||
TO_CHAR(first_seen, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(last_seen_in_feed, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(last_updated, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM wv511
|
||||
WHERE (last_seen_in_feed IS NULL OR last_seen_in_feed > NOW() - INTERVAL '24 hours') AND first_seen > now() - interval '144 hours'
|
||||
and wv511.name !~ 'Crash' and
|
||||
wv511.name !~ 'Vehicle' and wv511.name !~ 'Dead Animal' and wv511.name !~ 'Debris in Roadway' and wv511.name !~ 'Congestion-Delay' and
|
||||
wv511.name !~ 'Pot hole' and wv511.name !~ 'Debris On Bridge' and wv511.name !~ 'Attenuator' and wv511.name !~ 'Pedestrian' and
|
||||
wv511.name !~ 'Bridge Closed' and wv511.name !~ 'Truck on escape' and wv511.name !~ 'Bridge Incident' and wv511.name !~ 'Escape Ramp' AND
|
||||
wv511.name !~ 'Signal'
|
||||
UNION ALL
|
||||
|
||||
-- KY511 Query
|
||||
SELECT
|
||||
'ky511.ky511' AS source,
|
||||
CASE WHEN COALESCE(lsr, FALSE) THEN 'true' ELSE 'false' END AS lsr,
|
||||
CASE WHEN COALESCE(hide, FALSE) THEN 'true' ELSE 'false' END AS hide,
|
||||
ROUND(ST_Y(geom)::numeric, 3) AS lat,
|
||||
ROUND(ST_X(geom)::numeric, 3) AS lon,
|
||||
CAST(id AS TEXT) AS id, -- Cast id to TEXT
|
||||
'Weather' as category,
|
||||
NULL AS roadstatus,
|
||||
cwa,
|
||||
county,
|
||||
st as state,
|
||||
'Map Link' AS location,
|
||||
NULL AS routename,
|
||||
latest_description as description,
|
||||
TO_CHAR(first_seen, 'YYYY-MM-DD HH24:MI') AS start,
|
||||
TO_CHAR(last_seen_in_feed, 'YYYY-MM-DD HH24:MI') AS endtime,
|
||||
TO_CHAR(last_updated, 'YYYY-MM-DD HH24:MI') AS lastupdate
|
||||
FROM ky511.ky511
|
||||
WHERE (last_seen_in_feed IS NULL OR last_seen_in_feed > NOW() - INTERVAL '24 hours') AND first_seen > now() - interval '144 hours'
|
||||
) AS combined_data
|
||||
ORDER BY start ASC;
|
||||
";
|
||||
|
||||
// Execute the query
|
||||
$result = pg_query($dbconn, $query);
|
||||
|
||||
// Set header before any output
|
||||
header('Content-Type: application/json');
|
||||
|
||||
if (!$result) {
|
||||
// Output error as JSON
|
||||
echo json_encode(['error' => 'Combined query failed: ' . pg_last_error($dbconn)]);
|
||||
// Close connection if needed
|
||||
// pg_close($dbconn);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Fetch results into an array
|
||||
$dataArray = [];
|
||||
while ($row = pg_fetch_assoc($result)) {
|
||||
$dataArray[] = $row;
|
||||
}
|
||||
if ($dataArray === false) {
|
||||
echo json_encode(['error' => 'Failed to fetch results.']);
|
||||
pg_free_result($result);
|
||||
// pg_close($dbconn);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Output the combined results as JSON
|
||||
echo json_encode($dataArray);
|
||||
|
||||
// Free result memory
|
||||
pg_free_result($result);
|
||||
|
||||
// Optionally close the connection
|
||||
// pg_close($dbconn);
|
||||
|
||||
exit; // Stop script execution
|
||||
}
|
||||
|
||||
if (isset($_GET['updater'])) {
|
||||
$allowedTables = ['wv511', 'ky511.ky511', 'ohgo'];
|
||||
if (isset($_GET['lsr']) && isset($_GET['id']) && isset($_GET['table'])) {
|
||||
// --- Handle LSR Update ---
|
||||
$requestedTable = $_GET['table'];
|
||||
$lsrInput = $_GET['lsr'];
|
||||
$idInput = $_GET['id']; // ID validation missing here, see note below
|
||||
|
||||
if (!in_array($requestedTable, $allowedTables)) {
|
||||
http_response_code(400);
|
||||
die('Error (LSR): Invalid table specified.');
|
||||
}
|
||||
|
||||
$lsrflag = ($lsrInput === 'true') ? 'true' : 'false';
|
||||
$id = $idInput; // WARNING: ID is not validated/sanitized here for LSR!
|
||||
|
||||
$tableNameEscaped = $requestedTable;
|
||||
// Note: {$tableNameEscaped} is VALID complex syntax. The issue was with ${var}
|
||||
$query = "UPDATE {$tableNameEscaped} SET lsr = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($lsrflag, $id));
|
||||
|
||||
if ($result) {
|
||||
$affectedRows = pg_affected_rows($result);
|
||||
echo "LSR Update successful for table '{$requestedTable}'. {$affectedRows} row(s) affected for ID {$id}.";
|
||||
} else {
|
||||
http_response_code(500);
|
||||
error_log("LSR Query failed for table '{$requestedTable}', ID {$id}: " . pg_last_error($dbconn));
|
||||
die('Error: The LSR update query failed.');
|
||||
}
|
||||
} else if (isset($_GET['hide']) && isset($_GET['id']) && isset($_GET['table'])) {
|
||||
// --- Handle Hide Update ---
|
||||
$requestedTable = $_GET['table'];
|
||||
$hideInput = $_GET['hide'];
|
||||
$idInput = $_GET['id'];
|
||||
|
||||
if (!in_array($requestedTable, $allowedTables)) {
|
||||
http_response_code(400);
|
||||
die('Error (Hide): Invalid table specified.');
|
||||
}
|
||||
|
||||
$hideflag = ($hideInput === 'true') ? 'true' : 'false';
|
||||
|
||||
// Use INT validation for ID here - make sure this matches your DB column type
|
||||
$id = $idInput;
|
||||
|
||||
$tableNameEscaped = $requestedTable;
|
||||
$query = "UPDATE {$tableNameEscaped} SET hide = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($hideflag, $id));
|
||||
|
||||
if ($result) {
|
||||
$affectedRows = pg_affected_rows($result);
|
||||
echo "Hide Update successful for table '{$requestedTable}'. {$affectedRows} row(s) affected for ID {$id}.";
|
||||
} else {
|
||||
http_response_code(500);
|
||||
error_log("Hide Query failed for table '{$requestedTable}', ID {$id}: " . pg_last_error($dbconn));
|
||||
die('Error: The Hide update query failed.');
|
||||
}
|
||||
} else {
|
||||
// --- Handle Missing Parameters Error ---
|
||||
// Neither 'lsr' nor 'hide' (along with id and table) were provided correctly.
|
||||
http_response_code(400); // Bad Request
|
||||
// Check which parameters *are* present to give a potentially more helpful error
|
||||
$missing = [];
|
||||
if (!isset($_GET['id'])) $missing[] = '"id"';
|
||||
if (!isset($_GET['table'])) $missing[] = '"table"';
|
||||
if (!isset($_GET['lsr']) && !isset($_GET['hide'])) {
|
||||
$missing[] = 'action ("lsr" or "hide")';
|
||||
} else if (isset($_GET['lsr']) && (!isset($_GET['id']) || !isset($_GET['table']))) {
|
||||
// LSR was specified, but others missing
|
||||
} else if (isset($_GET['hide']) && (!isset($_GET['id']) || !isset($_GET['table']))) {
|
||||
// Hide was specified, but others missing
|
||||
}
|
||||
|
||||
if (!empty($missing)) {
|
||||
die('Error: Missing required parameter(s): ' . implode(', ', $missing) . '. Please provide a valid action ("lsr" or "hide"), "id", and "table".');
|
||||
} else {
|
||||
// Should ideally not happen with the logic above, but as a fallback:
|
||||
die('Error: Invalid request parameters. Please provide action ("lsr" or "hide"), "id", and "table".');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
37
php/mp4.php
Normal file
37
php/mp4.php
Normal file
@@ -0,0 +1,37 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
// This service expects POST data with specific parameters
|
||||
$elements = $_POST['data'];
|
||||
$numimages = $_POST['images'];
|
||||
$delay = $_POST['delay'];
|
||||
$lastdelay = $_POST['lastdelay'];
|
||||
$maxh = $_POST['maxh'];
|
||||
$maxv = $_POST['maxv'];
|
||||
|
||||
if (! is_numeric($maxh)) {
|
||||
$maxh = 500;
|
||||
}
|
||||
|
||||
if (! is_numeric($maxv)) {
|
||||
$maxv = 400;
|
||||
}
|
||||
|
||||
$numimages = $numimages - 1;
|
||||
|
||||
$inputfiles = "";
|
||||
|
||||
foreach ($elements as $value) {
|
||||
if ($value != $elements[array_key_last($elements)]) {
|
||||
$inputfiles = $inputfiles . " -delay {$delay} {$value}";
|
||||
}
|
||||
if ($value == $elements[array_key_last($elements)]) {
|
||||
$inputfiles = $inputfiles . " -delay {$lastdelay} {$value}";
|
||||
}
|
||||
}
|
||||
|
||||
$gif = shell_exec("convert {$inputfiles} -resize {$maxh}x{$maxv}\> -layers Optimize gif:-");
|
||||
|
||||
echo base64_encode($gif);
|
||||
exit;
|
||||
?>
|
||||
385
php/nws.php
Normal file
385
php/nws.php
Normal file
@@ -0,0 +1,385 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, current point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$query = "SELECT * FROM nws order by lastupdate asc";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['officestats11'])) {
|
||||
// Get all unique lastupdate dates from the database
|
||||
$date_query = "SELECT DISTINCT DATE(lastupdate) as unique_date
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND office ~ 'WFO'
|
||||
ORDER BY unique_date ASC"; // Changed from DESC to ASC
|
||||
|
||||
$date_result = pg_query($dbconn, $date_query)
|
||||
or die('Date query failed: ' . pg_last_error());
|
||||
|
||||
$datetime_points = [];
|
||||
while ($row = pg_fetch_array($date_result, null, PGSQL_ASSOC)) {
|
||||
$dt = DateTime::createFromFormat('Y-m-d', $row['unique_date']);
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
|
||||
pg_free_result($date_result);
|
||||
|
||||
if (empty($datetime_points)) {
|
||||
echo json_encode(['error' => 'No valid datetime points found in database']);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Debug: Log the datetime points
|
||||
error_log("Processed datetime points: " . implode(', ', $datetime_points));
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY ABS(EXTRACT(EPOCH FROM (lastupdate - CAST($1 AS TIMESTAMP)))) ASC) AS rn
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND lastupdate <= CAST($1 AS TIMESTAMP) + INTERVAL '1 day'
|
||||
AND lastupdate >= CAST($1 AS TIMESTAMP) - INTERVAL '3 days'
|
||||
AND office ~ 'WFO'
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
office,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY office, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.office,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.office = lr.office) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.office
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each unique datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Debug: Log before output
|
||||
error_log("Final results: " . json_encode($results_array));
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
}
|
||||
|
||||
if (isset($_GET['officestats'])) {
|
||||
if (isset($_GET['datetime'])) {
|
||||
// Expecting datetime as comma-separated dates or JSON array
|
||||
$input_dates = is_array($_GET['datetime'])
|
||||
? $_GET['datetime']
|
||||
: explode(',', $_GET['datetime']);
|
||||
|
||||
// Process each date and set to end of day
|
||||
$input_dates = array_unique($input_dates);
|
||||
$input_dates = array_values($input_dates);
|
||||
$datetime_points = [];
|
||||
foreach ($input_dates as $date) {
|
||||
$dt = DateTime::createFromFormat('m-d-Y', trim($date));
|
||||
if ($dt === false) {
|
||||
error_log("Invalid date skipped: " . trim($date));
|
||||
continue;
|
||||
}
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
|
||||
// Ensure uniqueness and reindex
|
||||
$datetime_points = array_unique($datetime_points);
|
||||
$datetime_points = array_values($datetime_points);
|
||||
|
||||
// Debug: Log the datetime points
|
||||
error_log("Processed datetime points: " . implode(', ', $datetime_points));
|
||||
|
||||
if (empty($datetime_points)) {
|
||||
echo json_encode(['error' => 'No valid datetime points provided']);
|
||||
exit;
|
||||
}
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY ABS(EXTRACT(EPOCH FROM (lastupdate - CAST($1 AS TIMESTAMP)))) ASC) AS rn
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND lastupdate <= CAST($1 AS TIMESTAMP) + INTERVAL '1 day'
|
||||
AND lastupdate >= CAST($1 AS TIMESTAMP) - INTERVAL '3 days'
|
||||
AND office ~ 'WFO'
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
office,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY office, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.office,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.office = lr.office) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.office
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each provided datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Debug: Log before output
|
||||
error_log("Final results: " . json_encode($results_array));
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['regionstats'])) {
|
||||
if (isset($_GET['datetime'])) {
|
||||
// Expecting datetime as comma-separated dates or JSON array
|
||||
$input_dates = is_array($_GET['datetime'])
|
||||
? $_GET['datetime']
|
||||
: explode(',', $_GET['datetime']);
|
||||
|
||||
// Process each date and set to end of day
|
||||
$input_dates = array_unique($input_dates);
|
||||
$input_dates = array_values($input_dates);
|
||||
$datetime_points = [];
|
||||
foreach ($input_dates as $date) {
|
||||
$dt = DateTime::createFromFormat('m-d-Y', trim($date));
|
||||
if ($dt === false) {
|
||||
error_log("Invalid date skipped: " . trim($date));
|
||||
continue;
|
||||
}
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
|
||||
// Ensure uniqueness and reindex
|
||||
$datetime_points = array_unique($datetime_points);
|
||||
$datetime_points = array_values($datetime_points);
|
||||
|
||||
// Debug: Log the datetime points
|
||||
error_log("Processed datetime points: " . implode(', ', $datetime_points));
|
||||
|
||||
if (empty($datetime_points)) {
|
||||
echo json_encode(['error' => 'No valid datetime points provided']);
|
||||
exit;
|
||||
}
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY ABS(EXTRACT(EPOCH FROM (lastupdate - CAST($1 AS TIMESTAMP)))) ASC) AS rn,
|
||||
SUBSTRING(office FROM 'NWS/([EWPASC]R)') AS region
|
||||
FROM nws
|
||||
WHERE lastupdate <= CAST($1 AS TIMESTAMP) + INTERVAL '1 day' - INTERVAL '1 second'
|
||||
AND lastupdate >= CAST($1 AS TIMESTAMP) - INTERVAL '3 days'
|
||||
AND office ~ 'NWS/[EWPASC]R'
|
||||
and status = 'active'
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
region,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY region, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.region,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.region = lr.region) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.region
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each provided datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Debug: Log before output
|
||||
//error_log("Final results: " . json_encode($results_array));
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['drilldown'])) {
|
||||
if (isset($_GET['datetime'])) {
|
||||
// Expecting datetime as comma-separated dates or JSON array
|
||||
$input_dates = is_array($_GET['datetime'])
|
||||
? $_GET['datetime']
|
||||
: explode(',', $_GET['datetime']);
|
||||
|
||||
// Process each date and set to end of day
|
||||
$datetime_points = [];
|
||||
foreach ($input_dates as $date) {
|
||||
// Specify the exact format of your input date string
|
||||
$dt = DateTime::createFromFormat('m-d-Y', trim($date)); // Adjust format as needed
|
||||
if ($dt === false) {
|
||||
// Handle invalid date
|
||||
continue;
|
||||
}
|
||||
$dt->setTime(23, 59, 59);
|
||||
$datetime_points[] = $dt->format('Y-m-d H:i:s');
|
||||
}
|
||||
$datetime_points = array_unique($datetime_points);
|
||||
$datetime_points = array_values($datetime_points);
|
||||
|
||||
$query = "WITH latest_records AS (
|
||||
SELECT *,
|
||||
ROW_NUMBER() OVER (PARTITION BY personid ORDER BY lastupdate DESC) AS rn
|
||||
FROM nws
|
||||
WHERE status = 'active'
|
||||
AND lastupdate <= $1
|
||||
),
|
||||
otitle_counts AS (
|
||||
SELECT
|
||||
office,
|
||||
otitle,
|
||||
COUNT(*) AS otitle_count
|
||||
FROM latest_records
|
||||
WHERE rn = 1
|
||||
GROUP BY office, otitle
|
||||
)
|
||||
SELECT
|
||||
lr.office,
|
||||
COUNT(DISTINCT lr.personid) AS unique_person_count,
|
||||
(SELECT ARRAY_AGG(json_obj ORDER BY json_obj->>'otitle' ASC)
|
||||
FROM (SELECT DISTINCT jsonb_build_object(
|
||||
'otitle', tc2.otitle,
|
||||
'count', tc2.otitle_count
|
||||
) AS json_obj
|
||||
FROM otitle_counts tc2
|
||||
WHERE tc2.office = lr.office) AS subquery
|
||||
) AS title_counts_array
|
||||
FROM latest_records lr
|
||||
WHERE lr.rn = 1
|
||||
GROUP BY lr.office
|
||||
ORDER BY unique_person_count DESC";
|
||||
|
||||
$results_array = [];
|
||||
|
||||
// Execute query for each provided datetime
|
||||
foreach ($datetime_points as $datetime) {
|
||||
$result = pg_query_params($dbconn, $query, array($datetime))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$office_data = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$office_data[] = $line;
|
||||
}
|
||||
|
||||
// Store results with the datetime used
|
||||
$results_array[] = [
|
||||
'provided_datetime' => $datetime,
|
||||
'data' => $office_data
|
||||
];
|
||||
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Return JSON encoded results
|
||||
echo json_encode($results_array);
|
||||
}
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
23
php/ohgo.php
Normal file
23
php/ohgo.php
Normal file
@@ -0,0 +1,23 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT lat,lon,id,category,roadstatus,cwa,county,state,location,routename,description,lsr,date_trunc('minute', start) as start, date_trunc('minute', endtime) as endtime,date_trunc('minute', lastupdate) as lastupdate from ohgo where endtime is null or endtime > now() - interval '48 hours' order by start asc;";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
23
php/power.php
Normal file
23
php/power.php
Normal file
@@ -0,0 +1,23 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT lat,lon,outagen FROM power WHERE active = true and cwa = 'RLX'";
|
||||
$result = pg_query($query) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Printing results in HTML
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
684
php/powerapi.php
Normal file
684
php/powerapi.php
Normal file
@@ -0,0 +1,684 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, current point outage info
|
||||
if (empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'time', startguess,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'outage', outagen,
|
||||
'lastchange', lastchange,
|
||||
'cause', cause,
|
||||
'area_geometry', ST_AsGeoJSON(COALESCE(realareageom, realgeom))::json
|
||||
)
|
||||
)
|
||||
ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE cwa = $1 AND active = true
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, array('RLX'));
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Check if we got results
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
header('Content-Type: application/json');
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
die('Query execution failed: ' . $e->getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['states'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', jsonb_agg(features.feature)
|
||||
)
|
||||
FROM (
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(ST_Transform(geom, 4326))::jsonb,
|
||||
'properties', to_jsonb(properties) - 'geom'
|
||||
) AS feature
|
||||
FROM (
|
||||
SELECT *
|
||||
FROM states
|
||||
WHERE state IN ('WV', 'VA', 'KY', 'MD', 'PA', 'OH')
|
||||
) AS properties
|
||||
) AS features
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
// Set proper JSON header and handle output
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['jsonb_build_object'])) {
|
||||
echo $resultArray[0]['jsonb_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
http_response_code(500);
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['max'])) {
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
try {
|
||||
$starttime = getParam('start');
|
||||
$endtime = getParam('end');
|
||||
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
max(outage) as max_outage,
|
||||
county,
|
||||
state
|
||||
FROM (
|
||||
SELECT DISTINCT ON (county, state, update)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served
|
||||
FROM countyoutages
|
||||
WHERE update > $2
|
||||
AND update < $3
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
) as subquery
|
||||
GROUP BY county, state
|
||||
";
|
||||
|
||||
$result = pg_query_params(
|
||||
$dbconn,
|
||||
$query,
|
||||
['RLX', $starttime, $endtime]
|
||||
);
|
||||
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode(['error' => 'Both start and end parameters are required']);
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['county'])) {
|
||||
try {
|
||||
$query = "
|
||||
WITH latest_update AS (
|
||||
SELECT MAX(update) as max_update FROM countyoutages
|
||||
)
|
||||
SELECT
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
MAX(update) as time,
|
||||
SUM(served) as served,
|
||||
ROUND(
|
||||
CASE
|
||||
WHEN SUM(served) > 0 THEN (SUM(outages)::FLOAT / SUM(served)) * 100
|
||||
ELSE 0
|
||||
END::NUMERIC, 2
|
||||
) as perout
|
||||
FROM countyoutages
|
||||
JOIN latest_update ON countyoutages.update = latest_update.max_update
|
||||
WHERE cwa = $1
|
||||
GROUP BY county, state
|
||||
ORDER BY county, state;
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX']);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['countyarchive'])) {
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
try {
|
||||
$starttime = getParam('start');
|
||||
$endtime = getParam('end');
|
||||
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state, update)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served
|
||||
FROM countyoutages
|
||||
WHERE update > $2
|
||||
AND update < $3
|
||||
AND cwa = $1
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX', $starttime, $endtime]);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
exit;
|
||||
}
|
||||
} else {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(400);
|
||||
echo json_encode(['error' => 'Both start and end parameters are required']);
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['archivepoint'])) {
|
||||
try {
|
||||
if (!isset($_GET['start']) || !isset($_GET['end'])) {
|
||||
throw new Exception('Both start and end parameters are required');
|
||||
}
|
||||
|
||||
$starttime = getParam('start');
|
||||
$endtime = getParam('end');
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'time', startguess,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'outage', outagen,
|
||||
'lastchange', lastchange,
|
||||
'cause', cause
|
||||
)
|
||||
)
|
||||
ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE cwa = $1
|
||||
AND startguess > $2
|
||||
AND lastchange < $3
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX', $starttime, $endtime]);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'required') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if(@$_GET['svr'] =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['svr']) && $_GET['svr'] === 'archive') {
|
||||
try {
|
||||
$result = null;
|
||||
|
||||
if (isset($_GET['start']) && isset($_GET['end'])) {
|
||||
$starttime = getParam('start');
|
||||
$endtime = getParam('end');
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'issue', issue,
|
||||
'end', endtime,
|
||||
'vtec', vtec,
|
||||
'type', warntype
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE issue > $1
|
||||
AND endtime < $2
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, [$starttime, $endtime]);
|
||||
} elseif (!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(nwspoly)::json,
|
||||
'properties', json_build_object(
|
||||
'issue', issue,
|
||||
'end', endtime,
|
||||
'vtec', vtec,
|
||||
'type', warntype
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
FROM svr
|
||||
WHERE issue < NOW() - INTERVAL '24 hours'
|
||||
AND endtime > NOW() - INTERVAL '24 hours'
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
} else {
|
||||
throw new Exception('Both start and end parameters are required together');
|
||||
}
|
||||
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'required') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['svrpolys'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT
|
||||
vtec,
|
||||
outagesvalid,
|
||||
polygonpop,
|
||||
outagesbuffer,
|
||||
lsrids
|
||||
FROM svr
|
||||
WHERE EXTRACT(EPOCH FROM (CURRENT_TIMESTAMP - endtime)) / 60 / 60 / 24 < 60
|
||||
";
|
||||
|
||||
$result = pg_query($dbconn, $query);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['powerids'])) {
|
||||
try {
|
||||
$powerids = $_GET['powerids'];
|
||||
|
||||
// Validate input exists and isn't empty
|
||||
if (empty($powerids)) {
|
||||
throw new Exception('No power IDs provided');
|
||||
}
|
||||
|
||||
// Convert comma-separated string to array and sanitize
|
||||
$poweridArray = explode(',', $powerids);
|
||||
$sanitizedIds = array_filter(array_map('intval', $poweridArray));
|
||||
|
||||
if (empty($sanitizedIds)) {
|
||||
throw new Exception('Invalid power ID format');
|
||||
}
|
||||
|
||||
// Prepare placeholders for the query
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$query = "
|
||||
SELECT
|
||||
lat,
|
||||
lon,
|
||||
lastchange,
|
||||
startguess,
|
||||
peakoutage,
|
||||
cause,
|
||||
lsrtime,
|
||||
lsrref,
|
||||
(lsrtime AT TIME ZONE 'America/New_York')::timestamp as lsrlocal
|
||||
FROM power
|
||||
WHERE id IN ($placeholders)
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $sanitizedIds);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = pg_fetch_all($result) ?: [];
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'Invalid') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['poweridsgeojson'])) {
|
||||
try {
|
||||
$powerids = $_GET['poweridsgeojson'];
|
||||
|
||||
if (empty($powerids)) {
|
||||
throw new Exception('No power IDs provided');
|
||||
}
|
||||
|
||||
// Convert and sanitize power IDs
|
||||
$poweridArray = explode(',', $powerids);
|
||||
$sanitizedIds = array_filter(array_map('intval', $poweridArray));
|
||||
|
||||
if (empty($sanitizedIds)) {
|
||||
throw new Exception('Invalid power ID format');
|
||||
}
|
||||
|
||||
// Prepare placeholders
|
||||
$placeholders = implode(',', array_map(function($i) { return '$' . $i; }, range(1, count($sanitizedIds))));
|
||||
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE id IN ($placeholders)
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, $sanitizedIds);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
echo $resultArray[0]['json_build_object'];
|
||||
} else {
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
if (isset($result)) {
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
$statusCode = strpos($e->getMessage(), 'Invalid') !== false ? 400 : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['polygongeojson'])) {
|
||||
$result = null; // Initialize result to null for catch block safety
|
||||
try {
|
||||
$polygonGeoJsonString = $_GET['polygongeojson'];
|
||||
|
||||
if (empty($polygonGeoJsonString)) {
|
||||
throw new Exception('No GeoJSON polygon provided', 400); // Use exception code for status
|
||||
}
|
||||
|
||||
// 1. Validate if the input is valid JSON
|
||||
// We decode here primarily to check JSON validity.
|
||||
// We'll pass the *original string* to PostGIS's ST_GeomFromGeoJSON for robustness.
|
||||
$polygonGeoJson = json_decode($polygonGeoJsonString);
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
throw new Exception('Invalid JSON format: ' . json_last_error_msg(), 400);
|
||||
}
|
||||
|
||||
// 2. Optional: Basic structural validation (can rely on PostGIS for full validation)
|
||||
if (!is_object($polygonGeoJson) || !isset($polygonGeoJson->type) || !in_array($polygonGeoJson->type, ['MultiPolygon', 'Polygon'])) {
|
||||
// Allow both Polygon and MultiPolygon for flexibility? Or stick to MultiPolygon?
|
||||
// Let's allow Polygon too, as ST_Within works with both.
|
||||
// If you strictly need *only* MultiPolygon, change the check.
|
||||
throw new Exception('Input GeoJSON must be of type Polygon or MultiPolygon.', 400);
|
||||
}
|
||||
if (!isset($polygonGeoJson->coordinates) || !is_array($polygonGeoJson->coordinates)) {
|
||||
throw new Exception('Input GeoJSON must have a coordinates array.', 400);
|
||||
}
|
||||
|
||||
// 3. Prepare the PostgreSQL Query using PostGIS functions
|
||||
// - ST_GeomFromGeoJSON($1): Parses the input GeoJSON string.
|
||||
// - ST_SetSRID(..., 4326): Assigns the WGS84 SRID (standard for GeoJSON). Adjust if your data uses a different SRID.
|
||||
// - ST_Within(realgeom, ...): Checks if the power outage geometry is within the provided polygon geometry.
|
||||
// - Ensure your 'realgeom' column has a spatial index for performance!
|
||||
$query = "
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(realgeom)::json,
|
||||
'properties', json_build_object(
|
||||
'id', id,
|
||||
'time', (startguess AT TIME ZONE 'UTC')::timestamp,
|
||||
'county', county,
|
||||
'state', state,
|
||||
'cause', cause,
|
||||
'outage', peakoutage,
|
||||
'lsrtime', (lsrtime AT TIME ZONE 'UTC')::timestamp
|
||||
)
|
||||
) ORDER BY startguess ASC -- Optional ordering
|
||||
)
|
||||
)
|
||||
FROM power
|
||||
WHERE ST_Within(realgeom, ST_SetSRID(ST_GeomFromGeoJSON($1), 4326))
|
||||
";
|
||||
// Note: If 'realgeom' might be NULL, you might add "AND realgeom IS NOT NULL"
|
||||
|
||||
// 4. Execute the query with the GeoJSON string as a parameter
|
||||
$params = [$polygonGeoJsonString];
|
||||
$result = pg_query_params($dbconn, $query, $params);
|
||||
|
||||
if ($result === false) {
|
||||
// Check for specific PostGIS errors related to invalid GeoJSON input
|
||||
$pgError = pg_last_error($dbconn);
|
||||
if (strpos($pgError, 'invalid GeoJSON representation') !== false || strpos($pgError, 'ParseException') !== false || strpos($pgError, 'Invalid polygon') !== false) {
|
||||
throw new Exception('Invalid GeoJSON geometry data provided: ' . $pgError, 400);
|
||||
} else {
|
||||
// Throw a generic server error for other query failures
|
||||
throw new Exception('Query failed: ' . $pgError, 500);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. Fetch and Output Results
|
||||
$resultArray = pg_fetch_all($result);
|
||||
|
||||
header('Content-Type: application/json');
|
||||
if ($resultArray && isset($resultArray[0]['json_build_object'])) {
|
||||
// Ensure null result from json_agg (no features found) returns empty array
|
||||
$outputJson = $resultArray[0]['json_build_object'];
|
||||
$outputData = json_decode($outputJson, true);
|
||||
if (isset($outputData['features']) && $outputData['features'] === null) {
|
||||
$outputData['features'] = [];
|
||||
echo json_encode($outputData);
|
||||
} else {
|
||||
echo $outputJson; // Output the JSON directly from Postgres
|
||||
}
|
||||
} else {
|
||||
// Should ideally be handled by the check above, but as a fallback
|
||||
echo json_encode(['type' => 'FeatureCollection', 'features' => []]);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
} catch (Exception $e) {
|
||||
// 6. Error Handling
|
||||
if (isset($result) && is_resource($result)) { // Check if $result is a valid resource before freeing
|
||||
pg_free_result($result);
|
||||
}
|
||||
header('Content-Type: application/json');
|
||||
// Use exception code for status if provided (>=400), default to 500
|
||||
$statusCode = ($e->getCode() >= 400 && $e->getCode() < 600) ? $e->getCode() : 500;
|
||||
http_response_code($statusCode);
|
||||
echo json_encode(['error' => $e->getMessage()]);
|
||||
exit; // Stop script execution after error
|
||||
}
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
147
php/powerapitest.php
Normal file
147
php/powerapitest.php
Normal file
@@ -0,0 +1,147 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, current point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
//county/state max
|
||||
if($_GET['max'] ?? null) {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"select distinct on (county,state) max(outage),county,state from (select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $2 and update < $3 and cwa = $1 group by county,state,update) as potato group by county,state",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['county'])) {
|
||||
try {
|
||||
$query = "
|
||||
SELECT DISTINCT ON (county, state)
|
||||
county,
|
||||
state,
|
||||
SUM(outages) as outage,
|
||||
update as time,
|
||||
SUM(served) as served,
|
||||
ROUND(
|
||||
CAST(
|
||||
CASE
|
||||
WHEN SUM(served) = 0 THEN NULL
|
||||
ELSE (SUM(outages)::FLOAT / SUM(served)) * 100
|
||||
END AS NUMERIC
|
||||
), 2
|
||||
) as perout
|
||||
FROM countyoutages
|
||||
WHERE update = (SELECT MAX(update) FROM countyoutages)
|
||||
AND (cwa = $1 OR cwa = $2 OR cwa = $3 OR cwa = $4 OR cwa = $5 OR cwa = $6 OR cwa = $7)
|
||||
GROUP BY county, state, update
|
||||
";
|
||||
|
||||
$result = pg_query_params($dbconn, $query, ['RLX','JKL','ILN','PBZ','MRX','LWX','RNK']);
|
||||
if ($result === false) {
|
||||
throw new Exception('Query failed: ' . pg_last_error());
|
||||
}
|
||||
|
||||
$results = [];
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$results[] = $line;
|
||||
}
|
||||
|
||||
header('Content-Type: application/json');
|
||||
echo json_encode($results);
|
||||
|
||||
pg_free_result($result);
|
||||
} catch (Exception $e) {
|
||||
header('Content-Type: application/json');
|
||||
http_response_code(500);
|
||||
echo json_encode(['error' => 'Query execution failed: ' . $e->getMessage()]);
|
||||
exit;
|
||||
}
|
||||
}
|
||||
|
||||
//county archive
|
||||
if($_GET['countyarchive'] ?? null) {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"select distinct on (county,state,update) county,state,sum(outages) as outage, update as time, sum(served) as served from countyoutages where update > $9 and update < $10 and (cwa = $1 or cwa = $2 or cwa = $3 or cwa = $4 or cwa = $5 or cwa = $6 or cwa = $7 or cwa = $8) group by county,state,update",
|
||||
array('RLX','JKL','ILN','PBZ','MRX','LWX','RNK','CTP',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Archive point data
|
||||
if($_GET['archivepoint'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
// Svr related functionality
|
||||
if($_GET['svr'] ?? null =='current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
if($_GET['svr'] ?? null == 'archive') {
|
||||
if($_GET['start'] ?? null) {
|
||||
$starttime = pg_escape_string($_GET['start']);
|
||||
if($_GET['end'] ?? null) {
|
||||
$endtime = pg_escape_string($_GET['end']);
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
97
php/searchapi.php
Normal file
97
php/searchapi.php
Normal file
@@ -0,0 +1,97 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, current point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and active = true",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
//county current
|
||||
if(isset($_GET['county'])) {
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT distinct on (county,state) update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 order by county,state,update desc",
|
||||
array('RLX')) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
|
||||
//county archive
|
||||
if(isset($_GET['countyarchive'])) {
|
||||
if(isset($_GET['start'])) {
|
||||
$starttime = getParam('start');
|
||||
if(isset($_GET['end'])) {
|
||||
$endtime = getParam('end');
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT county,state, update as time, county, state, outages as outage,served FROM countyoutages where cwa = $1 and update > $2 and update < $3 order by update asc",
|
||||
array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//Archive point data
|
||||
if(isset($_GET['archivepoint'])) {
|
||||
if(isset($_GET['start'])) {
|
||||
$starttime = getParam('start');
|
||||
if(isset($_GET['end'])) {
|
||||
$endtime = getParam('end');
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(realgeom)::json,'properties',json_build_object('time',startguess,'county',county,'state',state,'outage',outagen,'lastchange',lastchange,'cause',cause))order by startguess asc)) FROM power WHERE cwa = $1 and startguess > $2 and lastchange < $3"
|
||||
,array('RLX',$starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(isset($_GET['svr']) && $_GET['svr'] === 'current') {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() and endtime > now()") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
if(isset($_GET['svr']) && $_GET['svr'] === 'archive') {
|
||||
if(isset($_GET['start'])) {
|
||||
$starttime = getParam('start');
|
||||
if(isset($_GET['end'])) {
|
||||
$endtime = getParam('end');
|
||||
|
||||
$result = pg_query_params($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue > $1 and endtime < $2"
|
||||
,array($starttime,$endtime)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isset($_GET['start']) && !isset($_GET['end'])) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'end',endtime,'vtec',vtec,'type',warntype)))) FROM svr where issue < now() - interval '24 hours' and endtime > now() - interval '24 hours'") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
43
php/single.php
Normal file
43
php/single.php
Normal file
@@ -0,0 +1,43 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
header('Content-Type: application/json');
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Check if camid is actually set to avoid warnings
|
||||
if (!isset($_GET['camid'])) {
|
||||
echo json_encode(array("error" => "No camid specified"));
|
||||
exit;
|
||||
}
|
||||
|
||||
$camid = $_GET['camid'];
|
||||
|
||||
// Performing SQL query
|
||||
$query = "SELECT *, COALESCE(hydro, false) as hydro, COALESCE(airport, false) as airport FROM cams WHERE camid = $1";
|
||||
|
||||
// Use pg_query_params to safely bind the $camid variable
|
||||
$result = pg_query_params($dbconn, $query, array($camid))
|
||||
or die('Query failed: ' . pg_last_error());
|
||||
|
||||
// Processing results
|
||||
$array = array();
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
// Ensure hydro is a proper boolean
|
||||
$line['hydro'] = ($line['hydro'] === 't' || $line['hydro'] === true);
|
||||
// Ensure airport is a proper boolean
|
||||
$line['airport'] = ($line['airport'] === 't' || $line['airport'] === true);
|
||||
$array[] = $line;
|
||||
}
|
||||
|
||||
// Output the ORIGINAL full array (including errorcode) to the client
|
||||
echo json_encode($array);
|
||||
|
||||
// Free resultset
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
69
php/stormdata.php
Normal file
69
php/stormdata.php
Normal file
@@ -0,0 +1,69 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
if ($_SERVER['REQUEST_METHOD'] === 'POST') {
|
||||
$input_data = null;
|
||||
$request_type = null;
|
||||
$contentType = trim(strtolower($_SERVER['HTTP_CONTENT_TYPE'] ?? $_SERVER['CONTENT_TYPE'] ?? ''));
|
||||
|
||||
if (strpos($contentType, 'application/json') === 0) {
|
||||
$raw_post_data = file_get_contents('php://input');
|
||||
|
||||
if ($raw_post_data === false || $raw_post_data === '') {
|
||||
send_error(400, 'Received empty request body or could not read input.', "Error: Could not read php://input or it was empty.");
|
||||
}
|
||||
|
||||
$input_data = json_decode($raw_post_data, true);
|
||||
|
||||
if (json_last_error() !== JSON_ERROR_NONE) {
|
||||
send_error(400, 'Invalid JSON payload received.', 'JSON Decode Error: ' . json_last_error_msg() . " | Raw data snippet: " . substr($raw_post_data, 0, 100));
|
||||
} elseif (!is_array($input_data)) {
|
||||
send_error(400, 'Invalid JSON payload: Expected a JSON object.', "JSON Decode Warning: Result is not an array. Data: " . print_r($input_data, true));
|
||||
} else {
|
||||
$request_type = $input_data['request_type'] ?? null;
|
||||
}
|
||||
} else {
|
||||
send_error(415, 'Unsupported Media Type. This endpoint requires application/json.', "Unsupported Media Type Received: " . $contentType);
|
||||
}
|
||||
|
||||
if ($request_type === null) {
|
||||
if (is_array($input_data) && !isset($input_data['request_type'])) {
|
||||
send_error(400, 'Missing "request_type" field within the request payload.');
|
||||
} else {
|
||||
send_error(400, 'Missing required parameter: request_type (or processing error).');
|
||||
}
|
||||
}
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
switch ($request_type) {
|
||||
case 'ohgo':
|
||||
handle_ohgo_request($dbconn, $input_data);
|
||||
break;
|
||||
case 'ohgonopoly':
|
||||
handle_ohgo_request_no_poly($dbconn, $input_data);
|
||||
break;
|
||||
case 'power':
|
||||
handle_power_request($dbconn, $input_data);
|
||||
break;
|
||||
case 'powernopoly':
|
||||
handle_power_request_no_poly($dbconn, $input_data);
|
||||
break;
|
||||
case 'wupoly':
|
||||
handle_wu_request_poly($dbconn, $input_data);
|
||||
break;
|
||||
case 'campoly':
|
||||
handle_cam_request($dbconn, $input_data);
|
||||
break;
|
||||
default:
|
||||
send_error(400, 'Invalid request_type specified: ' . htmlspecialchars($request_type));
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
http_response_code(405);
|
||||
header('Allow: POST');
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo json_encode(['error' => 'Invalid request method. Only POST is allowed.']);
|
||||
exit;
|
||||
}
|
||||
?>
|
||||
43
php/update_field.php
Normal file
43
php/update_field.php
Normal file
@@ -0,0 +1,43 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
// Get POST data
|
||||
$camid = $_POST['camid'];
|
||||
$field = $_POST['field'];
|
||||
$value = $_POST['value'];
|
||||
|
||||
// Validate inputs
|
||||
if (empty($camid) || empty($field)) {
|
||||
echo json_encode(['success' => false, 'message' => 'Invalid input']);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Check if the field is valid
|
||||
if (!in_array($field, ['hydro', 'airport'])) {
|
||||
echo json_encode(['success' => false, 'message' => 'Invalid field']);
|
||||
exit;
|
||||
}
|
||||
|
||||
// Convert to proper boolean for PostgreSQL
|
||||
// JavaScript sends true/false as strings 'true' or 'false'
|
||||
$value_bool = ($value === 'true');
|
||||
|
||||
// Update the field value in the database - use boolean directly
|
||||
// PostgreSQL accepts 't'/'f' for boolean values
|
||||
$query = "UPDATE cams SET $field = $1 WHERE camid = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($value_bool ? 't' : 'f', $camid));
|
||||
|
||||
if ($result) {
|
||||
echo json_encode(['success' => true]);
|
||||
} else {
|
||||
$error = pg_last_error($dbconn);
|
||||
echo json_encode(['success' => false, 'message' => $error]);
|
||||
}
|
||||
|
||||
// Closing connection
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
82
php/ver.php
Normal file
82
php/ver.php
Normal file
@@ -0,0 +1,82 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT jsonb_build_object('type', 'FeatureCollection','features', jsonb_agg(features.feature)) FROM (SELECT jsonb_build_object('type', 'Feature','geometry', ST_AsGeoJSON(ST_Transform(geom, 4326))::jsonb,'properties', to_jsonb(properties) - 'geom') AS feature FROM (SELECT *FROM pzone where cwa ='RLX') AS properties) AS features") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
header('Content-Type: application/json; charset=utf-8');
|
||||
echo($resultArray[0]['jsonb_build_object']);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['lsrslist'])) {
|
||||
$result = pg_query($dbconn,"SELECT * from simplever") or die('Query failed: ' . pg_last_error());
|
||||
while ($line = pg_fetch_array($result, null, PGSQL_ASSOC)) {
|
||||
$array[] = $line;
|
||||
}
|
||||
echo json_encode($array);
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['reset'])) {
|
||||
$result = pg_query($dbconn,"truncate simplever") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray);
|
||||
}
|
||||
|
||||
if (isset($_GET['lsrs'])) {
|
||||
if (isset($_GET['zone'])) {
|
||||
$zone = $_GET['zone'];
|
||||
if (isset($_GET['lsr'])) {
|
||||
$lsr = (int) $_GET['lsr'];
|
||||
} else {
|
||||
$lsr = 1;
|
||||
}
|
||||
if (isset($_GET['lsrs'])) {
|
||||
$dir = $_GET['dir'];
|
||||
}
|
||||
|
||||
if ($dir == 1) {
|
||||
$result = pg_query_params($dbconn,"INSERT into simplever (zone,lsr) values ($1,$2) on conflict (zone) do update set lsr = (simplever.lsr + 1) where simplever.zone = $1", array($zone,$lsr)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
} else {
|
||||
$result = pg_query_params($dbconn,"INSERT into simplever (zone,lsr) values ($1,$2) on conflict (zone) do update set lsr = 0 where simplever.zone = $1", array($zone,$lsr)) or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
}
|
||||
pg_free_result($result);
|
||||
}
|
||||
}
|
||||
|
||||
if (isset($_GET['inc'])) {
|
||||
if ($_GET['inc'] == 'true') {
|
||||
$hideflag = 'true';
|
||||
} else {
|
||||
$hideflag = 'false';
|
||||
}
|
||||
$id = (int) $_GET['id'];
|
||||
$query = "UPDATE reports SET hide = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($hideflag, $id)) or die('Query failed: ' . pg_last_error());
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
if (isset($_GET['hide'])) {
|
||||
if ($_GET['hide'] == 'true') {
|
||||
$hideflag = 'true';
|
||||
} else {
|
||||
$hideflag = 'false';
|
||||
}
|
||||
$id = (int) $_GET['id'];
|
||||
$query = "UPDATE reports SET hide = $1 WHERE id = $2";
|
||||
$result = pg_query_params($dbconn, $query, array($hideflag, $id)) or die('Query failed: ' . pg_last_error());
|
||||
pg_free_result($result);
|
||||
}
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
20
php/warntrack.php
Normal file
20
php/warntrack.php
Normal file
@@ -0,0 +1,20 @@
|
||||
<?php
|
||||
require_once __DIR__ . '/common.php';
|
||||
|
||||
$dbconn = getDBConnection();
|
||||
|
||||
//no gets, curent point outage info
|
||||
if(empty(array_diff_key($_GET, array('service' => '')))) {
|
||||
$result = pg_query($dbconn,
|
||||
"SELECT json_build_object('type', 'FeatureCollection','features', json_agg(json_build_object('type','Feature', 'geometry', ST_AsGeoJSON(nwspoly)::json,'properties',json_build_object('issue',issue,'endtime',endtime,'warntype',warntype,'etin',etin,'followups',followups,'followup',followup,'canexp',canexp,'warnexpired',warnexpired,'vtectext',vtectext,'office',office))order by issue desc)) FROM warntracker WHERE office = 'KRLX' and svstype = 'NEW' and EXTRACT(EPOCH FROM (current_timestamp - endtime ))/60 < 2400") or die('Query failed: ' . pg_last_error());
|
||||
$resultArray = pg_fetch_all($result);
|
||||
echo($resultArray[0]['json_build_object']);
|
||||
}
|
||||
|
||||
pg_free_result($result);
|
||||
|
||||
// Close database connection when needed
|
||||
if (isset($dbconn)) {
|
||||
pg_close($dbconn);
|
||||
}
|
||||
?>
|
||||
555
power2.py
Normal file
555
power2.py
Normal file
@@ -0,0 +1,555 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import pandas as pd
|
||||
from requests.packages.urllib3.exceptions import InsecureRequestWarning
|
||||
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepwvnew = ['0320001','0320003','0320010','0320011','0320012','0320013','0320021','0320030','0320031','0320100','0320102','0320120']
|
||||
aepohnew = ['0320013','0320010','0320011','0320012','0320003','0320001','0302322','0302233','0302232','0302223','0320102','0320100']
|
||||
aepkynew = ['0320031','0320030','0320021','0320013','0320012','0320011','0320010','0320003','0320001']
|
||||
firstenergy = ['030223','030232','032001','032003','032010','032012']
|
||||
dominionva = ['0320121','0320120','0300103','0320102','0320101','0320100','0320031','0320013','0320011']
|
||||
baltimore = ['0320011','0320100','0320101','0320013','0320102','0320103']
|
||||
pepco = ['03201002','03201003','03201020','03201021']
|
||||
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvkubra = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubra = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/pa/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
domvameta = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta = 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
|
||||
aepwvcluster = 'cluster-2'
|
||||
aepohcluster = 'cluster-1'
|
||||
aepkycluster = 'cluster-2'
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepbasewv = 'https://kubra.io/cluster-data/'
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
domvabase = 'https://outagemap.dominionenergy.com/resources/data/external/interval_generation_data/'
|
||||
graysonrecc = 'https://outages.graysonrecc.com/data/outages.json'
|
||||
|
||||
kubrabase = 'https://kubra.io/cluster-data/'
|
||||
firstenergybase = 'https://kubra.io/cluster-data/'
|
||||
firstenergycluster = 'cluster-4'
|
||||
firstenergyhex1 = 'f5f94943-5df4-4752-a0a7-8ef4baded880'
|
||||
firstenergyhex2 = 'e2986f8a-5a69-4d2f-821c-e5db03932b68'
|
||||
|
||||
southcentraljson = 'https://outage.southcentralpower.com/data/outages.json'
|
||||
|
||||
allcountyoutages = []
|
||||
allkubraoutages = []
|
||||
allaepkubracoutages = []
|
||||
|
||||
def remove_external_curly_braces(s):
|
||||
try:
|
||||
p = s[0]
|
||||
return p
|
||||
except Error as e:
|
||||
print('error in curly ' + e)
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
try:
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
returndata = (hexes[2],hexes[3])
|
||||
return returndata
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
|
||||
def kubra(baseurl,cluster,namearray,meta):
|
||||
try:
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra(baseurl,cluster,newnamearray,meta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
|
||||
def kubra_aep(baseurl,cluster,namearray,meta):
|
||||
data = get_kubra_hexes(meta)
|
||||
#print(data)
|
||||
hex1, hex2 = get_kubra_hexes(meta)
|
||||
newnamearray = []
|
||||
for i in namearray:
|
||||
dir = str(i)
|
||||
dir = dir[-3:]
|
||||
dir = str(dir[::-1])
|
||||
url = baseurl + dir + '/' + hex1 + '/' + hex2 + '/public/' + cluster + '/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('desc').get('cluster')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == True:
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == False:
|
||||
allaepkubracoutages.append(j)
|
||||
#allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
kubra_aep(baseurl,cluster,newnamearray,meta)
|
||||
|
||||
def insertkubra(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause').get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
# print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
def insert_kubra_aep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = "Pending Investigation" # Default to Pending if no cause is found
|
||||
cause_dict = j.get('desc').get('cause')
|
||||
if cause_dict:
|
||||
cause = cause_dict.get('EN-US')
|
||||
#cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start_time')
|
||||
if start != None:
|
||||
try:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
try:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%z")
|
||||
except ValueError:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status').get('EN-US')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
#print(val)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def remove_dupes(l):
|
||||
b = []
|
||||
for i in range(0, len(l)):
|
||||
if l[i] not in l[i+1:]:
|
||||
b.append(l[i])
|
||||
return b
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def newaep(meta,namearray,baseurl):
|
||||
newnamearray = []
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
outageinfo = None
|
||||
try:
|
||||
outageinfo = j.get('title')
|
||||
except:
|
||||
continue
|
||||
if outageinfo == 'Area Outage':
|
||||
for k in range(4):
|
||||
newnamearray.append(str(i)+ str(k))
|
||||
if outageinfo == 'Outage Information':
|
||||
allkubraoutages.append(j)
|
||||
|
||||
|
||||
newnamearray = list(dict.fromkeys(newnamearray))
|
||||
if len(newnamearray) > 0:
|
||||
newaep(meta,newnamearray,baseurl)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_bad_offset(offset):
|
||||
try:
|
||||
if ":" == offset[-3:-2]:
|
||||
offset = offset[:-3]+offset[-2:]
|
||||
return offset
|
||||
except:
|
||||
return offset
|
||||
|
||||
def fix_bad_timestamp(timestamp):
|
||||
parsed_timestamp = pd.to_datetime(timestamp)
|
||||
return parsed_timestamp
|
||||
|
||||
S = requests.Session()
|
||||
S.verify = False
|
||||
|
||||
|
||||
|
||||
def southcentral():
|
||||
Sp = requests.Session()
|
||||
Sp.verify = False
|
||||
#Sp.proxies.update(proxies)
|
||||
temp = Sp.get(southcentraljson).text
|
||||
outageinfo = json.loads(temp)
|
||||
# print(outageinfo)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
outageinfo = json.loads(S.get(graysonrecc).text)
|
||||
if len(outageinfo) != 0:
|
||||
|
||||
for i in outageinfo:
|
||||
id = i.get('outageRecID')
|
||||
lat = i.get('outagePoint').get('lat')
|
||||
lon = i.get('outagePoint').get('lng')
|
||||
start = i.get('outageStartTime')
|
||||
end = i.get('outageEndTime')
|
||||
cause = i.get('cause')
|
||||
initial = i.get('customersOutInitially')
|
||||
now = i.get('customersOutNow')
|
||||
change = i.get('outageModifiedTime')
|
||||
crew = i.get('outageWorkStatus')
|
||||
# change = check_bad_offset(change)
|
||||
# start = check_bad_offset(start)
|
||||
# end = check_bad_offset(end)
|
||||
|
||||
|
||||
if start != None:
|
||||
start = fix_bad_timestamp(start)
|
||||
if end != None:
|
||||
end = fix_bad_timestamp(end)
|
||||
if change != None:
|
||||
change = fix_bad_timestamp(change)
|
||||
# change = datetime.strptime(change,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,start,derivedstart,cause,outagen,crew_status,peakoutage,pointgeom,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s) on conflict (pointgeom) do update set (outagen, cause, start, crew_status,lastchange) = (%s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon, start, current_timestamp, cause, now, crew, initial,id,change,'True',now,cause,start,crew,change)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def check_outages(meta,namearray,baseurl):
|
||||
metainfo = json.loads(S.get(meta).text)
|
||||
metadir = metainfo['directory']
|
||||
for i in namearray:
|
||||
url = baseurl + metadir + '/outages/' + i + '.json'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']:
|
||||
id = j.get('id')
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,genericid,derivedstart,lastchange,active) values (%s,%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,id,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
val = (sql,vals)
|
||||
cursor.execute(sql,vals)
|
||||
|
||||
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
try:
|
||||
southcentral()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
try:
|
||||
grayson()
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(aepwvmeta,aepwvnew,aepwvbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepohmeta,aepohnew,aepohbase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
#try:
|
||||
# newaep(aepkymeta,aepkynew,aepkybase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepwvcluster,aepwvnew,aepwvkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepohcluster,aepohnew,aepohkubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
kubra_aep(kubrabase,aepkycluster,aepkynew,aepkykubra)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
try:
|
||||
#newaep(firstpowerwvmeta,firstpowerwvnew,firstpowerwvbase)
|
||||
kubra(firstenergybase,firstenergycluster,firstenergy,wvfemeta)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
#try:
|
||||
# newaep(domvameta,dominionva,domvabase)
|
||||
#except Exception as e:
|
||||
# print(e)
|
||||
|
||||
#kubra(kubrabase,aepwvcluster,aepwvnew,aepwvmeta)
|
||||
|
||||
|
||||
nodupe = remove_dupes(allcountyoutages)
|
||||
nodupekubra = remove_dupes(allkubraoutages)
|
||||
nodupeaepkubra = remove_dupes(allaepkubracoutages)
|
||||
#print(nodupe)
|
||||
|
||||
def insertaep(data):
|
||||
for j in data:
|
||||
try:
|
||||
custa = j.get('desc').get('cust_a').get('val')
|
||||
except:
|
||||
continue
|
||||
pointgeom = j.get('geom').get('p')
|
||||
if len(pointgeom) == 1:
|
||||
pointlatlon = polyline.decode(pointgeom[0])
|
||||
lat = pointlatlon[0][0]
|
||||
lon = pointlatlon[0][1]
|
||||
else:
|
||||
continue
|
||||
areageom = j.get('geom').get('a')
|
||||
if areageom != None:
|
||||
areageom = remove_external_curly_braces(areageom)
|
||||
else:
|
||||
areageom = None
|
||||
cause = j.get('desc').get('cause')
|
||||
start = j.get('desc').get('start')
|
||||
if start != None:
|
||||
start = datetime.strptime(start,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
etr = j.get('desc').get('etr')
|
||||
if etr == 'ETR-NULL' or etr == 'ETR-EXP': etr = None
|
||||
if etr != None:
|
||||
etr = datetime.strptime(etr,"%Y-%m-%dT%H:%M:%S%f%z")
|
||||
incid = j.get('desc').get('inc_id')
|
||||
crew_status = j.get('desc').get('crew_status')
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#sql = "INSERT INTO power (lat,lon,pointgeom,areageom,start,cause,outagen,crew_status,incidentid,peakoutage,etr,derivedstart,lastchange,active) values (%s,%s, %s, %s, %s, %s, %s, %s, %s ,%s, %s, %s, %s, %s) on conflict (pointgeom) do update set (outagen, cause, start, etr, crew_status,lastchange) = (%s, %s, %s, %s, %s, %s)"
|
||||
vals = (lat,lon,pointgeom,areageom, start, cause, custa, crew_status, incid, custa,etr,current_timestamp, current_timestamp,'True',custa, cause, start, etr, crew_status,current_timestamp)
|
||||
#val = (sql,vals)
|
||||
print(vals)
|
||||
#cursor.execute(sql,vals)
|
||||
#conn.commit()
|
||||
|
||||
|
||||
if len(nodupe) > 0:
|
||||
insertaep(nodupe)
|
||||
if len(nodupekubra) > 0:
|
||||
insertkubra(nodupekubra)
|
||||
if len(nodupeaepkubra) > 0:
|
||||
insert_kubra_aep(nodupeaepkubra)
|
||||
|
||||
cursor.execute('UPDATE public.power SET realgeom = ST_SetSRID(ST_MakePoint(lon, lat), 4326) where (lat is not null and lon is not null and realgeom is null)')
|
||||
cursor.execute('UPDATE public.power SET peakoutage = outagen where outagen > peakoutage')
|
||||
cursor.execute('update public.power set county = county.countyname from public.county where ST_contains(county.geom,power.realgeom) and power.county is null')
|
||||
cursor.execute('update public.power set cwa = fzone.cwa from public.fzone where ST_contains(fzone.geom,power.realgeom) and power.cwa is null')
|
||||
cursor.execute('update public.power set state = county.state from public.county where ST_contains(county.geom,power.realgeom) and power.state is null')
|
||||
cursor.execute('update public.power set startguess = least(start,derivedstart)')
|
||||
cursor.execute('update public.power set realareageom = st_linefromencodedpolyline(areageom) where areageom is not null and realareageom is null')
|
||||
cursor.execute("update power set pointgeom = NULL where lastchange < now() - interval '2 hours'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
cursor.execute("update power set active = true where lastchange > now() - interval '30 minutes'")
|
||||
cursor.execute("update power set active = false where lastchange < now() - interval '30 minutes'")
|
||||
conn.commit()
|
||||
#cursor.execute("delete from power where cwa != 'RLX'")
|
||||
cursor.execute("delete from power where lastchange < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
#print(allkubraoutages)
|
||||
cursor.close()
|
||||
conn.close()
|
||||
447
power3.py
Normal file
447
power3.py
Normal file
@@ -0,0 +1,447 @@
|
||||
import requests
|
||||
import polyline
|
||||
import json
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
from datetime import datetime, timezone
|
||||
from geojson import Point, Feature, FeatureCollection, dump
|
||||
import re
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('power3.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
proxies = {"http":"http://nws:nws@localhost:9000"}
|
||||
|
||||
aepohmeta = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
aepwvmeta = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json"
|
||||
#firstpowerwvmeta = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/metadata.json"
|
||||
aepkymeta = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/metadata.json'
|
||||
wvfemeta= 'https://kubra.io/stormcenter/api/v1/stormcenters/6c715f0e-bbec-465f-98cc-0b81623744be/views/5ed3ddf1-3a6f-4cfd-8957-eba54b5baaad/currentState?preview=false'
|
||||
aepwvkubrameta = "https://kubra.io/stormcenter/api/v1/stormcenters/6674f49e-0236-4ed8-a40a-b31747557ab7/views/8cfe790f-59f3-4ce3-a73f-a9642227411f/currentState?preview=false"
|
||||
aepohkubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/9c0735d8-b721-4dce-b80b-558e98ce1083/views/9b2feb80-69f8-4035-925e-f2acbcf1728e/currentState?preview=false'
|
||||
aepkykubrameta = 'https://kubra.io/stormcenter/api/v1/stormcenters/23dcd38e-2573-4e20-a463-959b11cae011/views/60f31606-5702-4a1e-a74c-08d866b7a6fa/currentState?preview=false'
|
||||
|
||||
aepwvbase = "http://outagemap.appalachianpower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
aepohbase = "http://outagemap.aepohio.com.s3.amazonaws.com/resources/data/external/interval_generation_data/"
|
||||
#firstpowerwvbase = "https://s3.amazonaws.com/outages.sc4.firstenergycorp.com/resources/data/mdwv/interval_generation_data/"
|
||||
aepkybase = 'http://outagemap.kentuckypower.com.s3.amazonaws.com/resources/data/external/interval_generation_data/'
|
||||
graysoncounty = 'https://outages.graysonrecc.com/data/boundaries.json'
|
||||
flemingjson = 'https://outage.fme.coop/data/boundaries.json'
|
||||
|
||||
#buckeye rec
|
||||
#https://outage.buckeyerec.coop/maps/OutageWebMap/maps/GWT.rpc
|
||||
#washington
|
||||
#https://weci.ebill.coop/woViewer/MapWiseWeb/GWT.rpc
|
||||
allcountyoutages = []
|
||||
|
||||
S = requests.Session()
|
||||
|
||||
|
||||
|
||||
|
||||
def fleming():
|
||||
logger.info("Starting fleming()")
|
||||
state = 'KY'
|
||||
company = 'FLEM'
|
||||
try:
|
||||
temp = S.get(flemingjson)
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from fleming")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for fleming: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for fleming: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def bigsandy():
|
||||
logger.info("Starting bigsandy()")
|
||||
state = 'OH'
|
||||
company = 'BS'
|
||||
try:
|
||||
temp = S.get('https://outagemap.bigsandyrecc.com/data/boundaries.json')
|
||||
temp.raise_for_status()
|
||||
tempdata = json.loads(temp.text)
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from bigsandy")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for bigsandy: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for bigsandy: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in bigsandy: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
def southcentralpower():
|
||||
logger.info("Starting southcentralpower()")
|
||||
company = 'SCP'
|
||||
url = 'https://outage.southcentralpower.com/data/boundaries.json'
|
||||
Sp = requests.Session()
|
||||
# Sp.proxies.update(proxies)
|
||||
try:
|
||||
response = Sp.get(url)
|
||||
response.raise_for_status()
|
||||
tempdata = json.loads(response.text)
|
||||
state = 'OH'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
logger.info(f"Successfully processed {len(tempdata[0]['boundaries'])} boundaries from southcentralpower")
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed for southcentralpower: {e}")
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error for southcentralpower: {e}")
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in southcentralpower: {e}")
|
||||
|
||||
#wv https://kubra.io/data/e2ae0326-9912-436a-9355-eb2687e798b1/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json
|
||||
def ku_get_url():
|
||||
logger.info("Starting ku_get_url()")
|
||||
try:
|
||||
url = 'https://stormcenter.lge-ku.com/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40'
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
x = re.search(r"instanceId: '(.*?)',", r.text)
|
||||
if not x:
|
||||
logger.error("Could not find instanceId in ku_get_url")
|
||||
return None
|
||||
urlcom = x.group(1)
|
||||
urlcom = 'https://kubra.io/stormcenter/api/v1/stormcenters/' + urlcom + '/views/a6cee9e4-312b-4b77-9913-2ae371eb860d/currentState?preview=false'
|
||||
stuff = S.get(urlcom)
|
||||
stuff.raise_for_status()
|
||||
jsonstuff = json.loads(stuff.text)
|
||||
interval_data = jsonstuff.get('data').get('interval_generation_data')
|
||||
if not interval_data:
|
||||
logger.error("Could not find interval_generation_data in ku_get_url")
|
||||
return None
|
||||
urlcom = 'https://kubra.io/' + interval_data + '/public/reports/1d6f7e68-e192-43c1-bfdc-d809333d8e40_report.json'
|
||||
logger.info("Successfully generated URL for ku")
|
||||
return urlcom
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in ku_get_url: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in ku_get_url: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in ku_get_url: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def county_json(meta,url,jsonname):
|
||||
logger.info(f"Starting county_json for meta: {meta}")
|
||||
try:
|
||||
response = S.get(meta)
|
||||
response.raise_for_status()
|
||||
metainfo = json.loads(response.text)
|
||||
metadir = metainfo['directory']
|
||||
url = url + metadir + jsonname
|
||||
outage = S.get(url)
|
||||
outage.raise_for_status()
|
||||
logger.info(f"Successfully fetched county JSON from {url}")
|
||||
return outage
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger.error(f"Request failed in county_json: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
logger.error(f"JSON decode error in county_json: {e}")
|
||||
return None
|
||||
except KeyError as e:
|
||||
logger.error(f"Key error in county_json (missing 'directory'): {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in county_json: {e}")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def ku():
|
||||
ku = []
|
||||
url = ku_get_url()
|
||||
data = S.get(url).text
|
||||
tempdata = json.loads(data)
|
||||
temp = tempdata['file_data']['areas'][2]['areas'][0]['areas']
|
||||
temp1 = tempdata['file_data']['areas'][2]['areas'][1]['areas']
|
||||
temp2 = tempdata['file_data']['areas'][1]['areas'][0]['areas']
|
||||
|
||||
for i in temp:
|
||||
ku.append(i)
|
||||
for i in temp1:
|
||||
ku.append(i)
|
||||
for i in temp2:
|
||||
ku.append(i)
|
||||
for o in ku:
|
||||
outageinfo = o['cust_a']['val'],o['cust_s'],o['name'].capitalize(),o['state'],o['utility']
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def grayson():
|
||||
company = 'GRE'
|
||||
outage = S.get(graysoncounty)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'KY'
|
||||
for j in tempdata[0]['boundaries']:
|
||||
outageinfo = j.get('customersOutNow'),j.get('customersServed'),j.get('name'),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def aep_county_vawv(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
state = 'WV'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][2]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name'), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
state = 'VA'
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][1]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_oh(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'OH'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def aep_county_ky(meta,url,jsonname):
|
||||
company = 'AEP'
|
||||
state = 'KY'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(), state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
def firstenergy_county(meta,url,jsonname):
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = county_json(meta,url,jsonname)
|
||||
if outage.headers.get('Content-Type').startswith('application/octet-stream'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas'][0]['areas'][0]['areas']:
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('area_name').capitalize(),state, company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def get_kubra_hexes(url):
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
bothhex = tempdata.get('data').get('cluster_interval_generation_data')
|
||||
hexes = bothhex.split('/')
|
||||
return hexes[2],hexes[3]
|
||||
|
||||
|
||||
|
||||
|
||||
def kubra_fe(baseurl1,baseurl2,meta):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
company = 'FE'
|
||||
state = 'WV'
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
for j in tempdata['file_data']['areas']:
|
||||
if j.get('key') == "county":
|
||||
outageinfo = j.get('cust_a').get('val'), j.get('cust_s'), j.get('name').capitalize(),state,company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
|
||||
def kubra_aep(baseurl1,baseurl2,meta,company='AEP'):
|
||||
|
||||
hex2 = get_kubra_hexes(meta)
|
||||
url = baseurl1 + hex2[1] + baseurl2
|
||||
outage = S.get(url)
|
||||
if outage.headers.get('Content-Type').startswith('application/json'):
|
||||
tempdata = json.loads(outage.text)
|
||||
process_outage_data(tempdata,company)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def process_outage_data(data,company):
|
||||
"""
|
||||
Identifies the data structure and loops through the county-level data.
|
||||
|
||||
Args:
|
||||
data (dict): The parsed JSON data as a Python dictionary.
|
||||
"""
|
||||
# Navigate to the primary list of areas
|
||||
primary_areas = data.get("file_data", {}).get("areas", [])
|
||||
|
||||
# If the list is empty, there's nothing to process
|
||||
if not primary_areas:
|
||||
print("No 'areas' data found.")
|
||||
return
|
||||
|
||||
# --- This is the key logic to handle both formats ---
|
||||
# Check the key of the first item to determine the format
|
||||
first_item_key = primary_areas[0].get("key")
|
||||
|
||||
if first_item_key == "state":
|
||||
# Format 1: Loop through each state object
|
||||
for state_area in primary_areas:
|
||||
state_name = state_area.get("name", "Unknown State")
|
||||
# Get the nested list of counties for this state
|
||||
county_list = state_area.get("areas", [])
|
||||
for county in county_list:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
elif first_item_key == "county":
|
||||
# Format 2: The primary list is already the county list
|
||||
for county in primary_areas:
|
||||
# We are now at the county level
|
||||
if county.get("key") == "county":
|
||||
outageinfo = county.get('cust_a').get('val'), county.get('cust_s'), county.get('name').capitalize(),county.get('state'),company
|
||||
allcountyoutages.append(outageinfo)
|
||||
|
||||
else:
|
||||
print("Unknown data format. Could not find 'state' or 'county' key.")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting kubra_fe for FE WV")
|
||||
kubra_fe('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',wvfemeta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_fe for FE WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP WV")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/7929429f-635d-4761-b6c7-78f646cef3c2_report.json',aepwvkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP WV: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP OH")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/1bc6bd19-2315-4548-980a-6df73b93b355_report.json',aepohkubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP OH: {e}")
|
||||
try:
|
||||
logger.info("Attempting kubra_aep for AEP KY")
|
||||
kubra_aep('https://kubra.io/data/','/public/reports/8c3b0b30-c9e8-4e8f-8b0d-999c568bb085_report.json',aepkykubrameta)
|
||||
except Exception as e:
|
||||
logger.error(f"Error in kubra_aep for AEP KY: {e}")
|
||||
|
||||
|
||||
try:
|
||||
logger.info("Attempting grayson")
|
||||
grayson()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in grayson: {e}")
|
||||
try:
|
||||
logger.info("Attempting ku")
|
||||
ku()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in ku: {e}")
|
||||
try:
|
||||
logger.info("Attempting southcentralpower")
|
||||
southcentralpower()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in southcentralpower: {e}")
|
||||
try:
|
||||
logger.info("Attempting bigsandy")
|
||||
bigsandy()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in bigsandy: {e}")
|
||||
try:
|
||||
logger.info("Attempting fleming")
|
||||
fleming()
|
||||
except Exception as e:
|
||||
logger.error(f"Error in fleming: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
current_timestamp = str(datetime.utcnow())
|
||||
#for i in allcountyoutages:
|
||||
# sql = 'insert into countyoutages (outages, served, county, state, update, company) values (%s, %s, %s, %s, %s, %s)'
|
||||
# val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
# cursor.execute(sql,val)
|
||||
#conn.commit()
|
||||
|
||||
all_values = []
|
||||
for i in allcountyoutages:
|
||||
# Make sure the order matches the SQL placeholders
|
||||
val = (i[0], i[1], i[2], i[3], current_timestamp, i[4])
|
||||
all_values.append(val)
|
||||
|
||||
# 2. Define the SQL statement ONCE
|
||||
sql = 'INSERT INTO countyoutages (outages, served, county, state, update, company) VALUES (%s, %s, %s, %s, %s, %s)'
|
||||
|
||||
# 3. Execute the command ONCE with all the data
|
||||
if all_values: # Only execute if there's data to insert
|
||||
try:
|
||||
cursor.executemany(sql, all_values)
|
||||
conn.commit() # Commit after successful execution
|
||||
logger.info(f"Successfully inserted {len(all_values)} records into the database")
|
||||
except Exception as e:
|
||||
logger.error(f"Database error during insert: {e}")
|
||||
conn.rollback() # Rollback in case of error
|
||||
else:
|
||||
logger.warning("No data to insert into the database")
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cursor.execute('update countyoutages set cwa = county.cwa from county where county.countyname = countyoutages.county and county.state = countyoutages.state and countyoutages.cwa is null')
|
||||
|
||||
conn.commit()
|
||||
|
||||
#cursor.execute("delete from countyoutages where cwa != 'RLX'")
|
||||
cursor.execute("delete from countyoutages where cwa is null")
|
||||
#cursor.execute("delete from countyoutages where update < now () - interval '365 days'")
|
||||
conn.commit()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#print(allcountyoutages)
|
||||
|
||||
cursor.close()
|
||||
conn.close()
|
||||
192
providers/additional_functions.py
Normal file
192
providers/additional_functions.py
Normal file
@@ -0,0 +1,192 @@
|
||||
|
||||
|
||||
import psycopg2
|
||||
import psycopg2.extensions
|
||||
import requests
|
||||
from PIL import Image
|
||||
from io import BytesIO
|
||||
import numpy as np
|
||||
import math
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
|
||||
|
||||
color_to_dbz = {
|
||||
(0, 0, 0): -60,(255, 255, 255): -80, (133, 113, 143): -32, (133, 114, 143): -31.5, (134, 115, 141): -31,
|
||||
(135, 117, 139): -30.5, (135, 118, 139): -30, (136, 119, 137): -29.5, (137, 121, 135): -29,
|
||||
(137, 122, 135): -28.5, (138, 123, 133): -28, (139, 125, 132): -27.5, (139, 126, 132): -27,
|
||||
(140, 127, 130): -26.5, (141, 129, 128): -26, (141, 130, 128): -25.5, (142, 131, 126): -25,
|
||||
(143, 132, 124): -24.5, (143, 133, 124): -24, (144, 135, 123): -23.5, (145, 136, 121): -23,
|
||||
(145, 137, 121): -22.5, (146, 139, 119): -22, (147, 141, 117): -21.5, (150, 145, 83): -21,
|
||||
(152, 148, 87): -20.5, (155, 151, 91): -20, (157, 154, 96): -19.5, (160, 157, 100): -19,
|
||||
(163, 160, 104): -18.5, (165, 163, 109): -18, (168, 166, 113): -17.5, (170, 169, 118): -17,
|
||||
(173, 172, 122): -16.5, (176, 175, 126): -16, (178, 178, 131): -15.5, (183, 184, 140): -15,
|
||||
(186, 187, 144): -14.5, (189, 190, 148): -14, (191, 193, 153): -13.5, (194, 196, 157): -13,
|
||||
(196, 199, 162): -12.5, (199, 202, 166): -12, (202, 205, 170): -11.5, (204, 208, 175): -11,
|
||||
(210, 212, 180): -10.5, (207, 210, 180): -10, (201, 204, 180): -9.5, (198, 201, 180): -9,
|
||||
(195, 199, 180): -8.5, (192, 196, 180): -8, (189, 193, 180): -7.5, (185, 190, 180): -7,
|
||||
(182, 187, 180): -6.5, (179, 185, 180): -6, (176, 182, 180): -5.5, (173, 179, 180): -5,
|
||||
(170, 176, 180): -4.5, (164, 171, 180): -4, (160, 168, 180): -3.5, (157, 165, 180): -3,
|
||||
(154, 162, 180): -2.5, (151, 160, 180): -2, (148, 157, 180): -1.5, (145, 154, 180): -1,
|
||||
(148, 155, 181): -0.5, (144, 152, 180): 0, (140, 149, 179): 0.5, (136, 146, 178): 1,
|
||||
(128, 140, 176): 1.5, (124, 137, 175): 2, (120, 134, 174): 2.5, (116, 131, 172): 3,
|
||||
(112, 128, 171): 3.5, (108, 125, 170): 4, (103, 121, 169): 4.5, (99, 118, 168): 5,
|
||||
(95, 115, 167): 5.5, (91, 112, 166): 6, (87, 109, 164): 6.5, (79, 103, 162): 7,
|
||||
(75, 100, 161): 7.5, (71, 97, 160): 8, (67, 94, 159): 8.5, (65, 91, 158): 9,
|
||||
(67, 97, 162): 9.5, (69, 104, 166): 10, (72, 111, 170): 10.5,
|
||||
(74, 118, 174): 11, (77, 125, 178): 11.5, (79, 132, 182): 12, (81, 139, 187): 12.5,
|
||||
(86, 153, 195): 13, (89, 159, 199): 13.5, (91, 166, 203): 14, (94, 173, 207): 14.5,
|
||||
(96, 180, 212): 15, (98, 187, 216): 15.5, (101, 194, 220): 16, (103, 201, 224): 16.5,
|
||||
(106, 208, 228): 17, (111, 214, 232): 17.5, (104, 214, 215): 18, (89, 214, 179): 18.5,
|
||||
(82, 214, 162): 19, (75, 214, 144): 19.5, (67, 214, 126): 20, (60, 214, 109): 20.5,
|
||||
(53, 214, 91): 21, (17, 213, 24): 21.5, (17, 209, 23): 22, (16, 205, 23): 22.5,
|
||||
(16, 200, 22): 23, (16, 196, 22): 23.5, (15, 188, 21): 24, (15, 183, 20): 24.5,
|
||||
(14, 179, 20): 25, (14, 175, 19): 25.5, (14, 171, 19): 26, (13, 166, 18): 26.5,
|
||||
(13, 162, 18): 27, (13, 158, 17): 27.5, (12, 153, 17): 28, (12, 149, 16): 28.5,
|
||||
(12, 145, 16): 29, (11, 136, 15): 29.5, (11, 132, 14): 30, (10, 128, 14): 30.5,
|
||||
(10, 124, 13): 31, (10, 119, 13): 31.5, (9, 115, 12): 32, (9, 111, 12): 32.5,
|
||||
(9, 107, 11): 33, (8, 102, 11): 33.5, (8, 98, 10): 34, (9, 94, 9): 34.5,
|
||||
(50, 115, 8): 35, (70, 125, 8): 35.5, (91, 136, 7): 36, (111, 146, 7): 36.5,
|
||||
(132, 157, 6): 37, (152, 168, 6): 37.5, (173, 178, 5): 38, (193, 189, 5): 38.5,
|
||||
(214, 199, 4): 39, (234, 210, 4): 39.5, (255, 226, 0): 40, (255, 216, 0): 40.5,
|
||||
(255, 211, 0): 41, (255, 206, 0): 41.5, (255, 201, 0): 42, (255, 196, 0): 42.5,
|
||||
(255, 192, 0): 43, (255, 187, 0): 43.5, (255, 182, 0): 44, (255, 177, 0): 44.5,
|
||||
(255, 172, 0): 45, (255, 167, 0): 45.5, (255, 162, 0): 46, (255, 153, 0): 46.5,
|
||||
(255, 148, 0): 47, (255, 143, 0): 47.5, (255, 138, 0): 48, (255, 133, 0): 48.5,
|
||||
(255, 128, 0): 49, (255, 0, 0): 49.5, (248, 0, 0): 50, (241, 0, 0): 50.5,
|
||||
(234, 0, 0): 51, (227, 0, 0): 51.5, (213, 0, 0): 52, (205, 0, 0): 52.5,
|
||||
(198, 0, 0): 53, (191, 0, 0): 53.5, (184, 0, 0): 54, (177, 0, 0): 54.5,
|
||||
(170, 0, 0): 55, (163, 0, 0): 55.5, (155, 0, 0): 56, (148, 0, 0): 56.5,
|
||||
(141, 0, 0): 57, (127, 0, 0): 57.5, (120, 0, 0): 58, (113, 0, 0): 58.5,
|
||||
(255, 255, 255): 59, (255, 245, 255): 59.5, (255, 234, 255): 60, (255, 223, 255): 60.5,
|
||||
(255, 212, 255): 61, (255, 201, 255): 61.5, (255, 190, 255): 62, (255, 179, 255): 62.5,
|
||||
(255, 157, 255): 63, (255, 146, 255): 63.5, (255, 117, 255): 64, (252, 107, 253): 64.5,
|
||||
(249, 96, 250): 65, (246, 86, 247): 65.5, (243, 75, 244): 66, (240, 64, 241): 66.5,
|
||||
(237, 54, 239): 67, (234, 43, 236): 67.5, (231, 32, 233): 68, (225, 11, 227): 68.5,
|
||||
(178, 0, 255): 69, (172, 0, 252): 69.5, (164, 0, 247): 70, (155, 0, 244): 70.5,
|
||||
(147, 0, 239): 71, (136, 0, 234): 71.5, (131, 0, 232): 72, (121, 0, 226): 72.5,
|
||||
(114, 0, 221): 73, (105, 0, 219): 73.5, (5, 236, 240): 74, (5, 235, 240): 74.5,
|
||||
(5, 234, 240): 75, (5, 221, 224): 75.5, (5, 220, 224): 76, (5, 219, 224): 76.5,
|
||||
(5, 205, 208): 77, (5, 204, 208): 77.5, (4, 189, 192): 78, (4, 188, 192): 78.5,
|
||||
(4, 187, 192): 79, (4, 174, 176): 79.5, (4, 173, 176): 80, (4, 158, 160): 80.5,
|
||||
(4, 157, 160): 81, (4, 156, 160): 81.5, (3, 142, 144): 82, (3, 141, 144): 82.5,
|
||||
(3, 140, 144): 83, (3, 126, 128): 83.5, (3, 125, 128): 84, (3, 111, 112): 84.5,
|
||||
(3, 110, 112): 85, (3, 109, 112): 85.5, (2, 95, 96): 86, (2, 94, 96): 86.5,
|
||||
(2, 79, 80): 87, (2, 78, 80): 87.5, (2, 77, 80): 88, (2, 63, 64): 88.5,
|
||||
(2, 62, 64): 89, (2, 61, 64): 89.5, (1, 48, 48): 90, (1, 47, 48): 90.5, (1, 32, 32): 91,
|
||||
(1, 31, 32): 91.5, (1, 30, 32): 92, (58, 103, 181): 92.5, (58, 102, 181): 93,
|
||||
(58, 101, 181): 93.5, (58, 100, 181): 94, (58, 99, 181): 94.5, (58, 98, 181): 95
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
def get_radar_data(lat, lon, time):
|
||||
base_url = "https://mesonet.agron.iastate.edu/cgi-bin/wms/nexrad/n0q-t.cgi"
|
||||
params = {
|
||||
"service": "WMS", "version": "1.1.1", "request": "GetMap", "layers": "nexrad-n0q-wmst",
|
||||
"srs": "EPSG:4326", "bbox": f"{lon-0.075},{lat-0.075},{lon+0.075},{lat+0.075}",
|
||||
"width": "256", "height": "256", "format": "image/png", "time": time
|
||||
}
|
||||
response = requests.get(base_url, params=params)
|
||||
image = Image.open(BytesIO(response.content))
|
||||
radar_data = np.array(image)
|
||||
return radar_data, image
|
||||
|
||||
|
||||
|
||||
def find_max_reflectivity(lat, lon, start_time, end_time):
|
||||
start_dt = start_time - timedelta(minutes=5) - timedelta(minutes=start_time.minute % 5, seconds=start_time.second, microseconds=start_time.microsecond)
|
||||
end_dt = end_time + timedelta(minutes=5) - timedelta(minutes=end_time.minute % 5, seconds=end_time.second, microseconds=end_time.microsecond)
|
||||
|
||||
max_reflectivity = 0
|
||||
max_reflectivity_time = None
|
||||
current_dt = start_dt
|
||||
while current_dt <= end_dt:
|
||||
time_str = current_dt.strftime("%Y-%m-%dT%H:%M:%SZ")
|
||||
radar_data, _ = get_radar_data(lat, lon, time_str)
|
||||
|
||||
for y in range(radar_data.shape[0]):
|
||||
for x in range(radar_data.shape[1]):
|
||||
r, g, b = radar_data[y, x, :3]
|
||||
if (r, g, b) in color_to_dbz:
|
||||
dbz_value = color_to_dbz[(r, g, b)]
|
||||
if dbz_value != None and dbz_value > max_reflectivity:
|
||||
max_reflectivity = dbz_value
|
||||
max_reflectivity_time = time_str
|
||||
|
||||
current_dt += timedelta(minutes=5)
|
||||
|
||||
return max_reflectivity, max_reflectivity_time
|
||||
|
||||
#"update power set vtec = svr.vtec from svr where ST_Contains(svr.nwspoly,power.realgeom) and power.outagen > 50 and power.startguess> svr.issue and power.startguess < svr.endtime + (120 ||'minutes')::interval"
|
||||
#CHECK FOR point in svr polygon within 2 hours of polygon end
|
||||
#if exists, add vtec to vtec
|
||||
#run max radar reflectivity where vtec exists and populate lsrtime and lsrref
|
||||
|
||||
|
||||
conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws')
|
||||
cursor = conn.cursor()
|
||||
|
||||
sql = "update newpower set vtec = svr.vtec from svr where ST_Contains(svr.nwspoly,newpower.realgeom) and newpower.outagen > 5 and newpower.startguess > svr.issue and newpower.startguess < svr.endtime + (120 ||'minutes')::interval"
|
||||
cursor.execute(sql)
|
||||
conn.commit()
|
||||
|
||||
|
||||
#find potentiall verifying reports and add ids to lsrids in svr
|
||||
"""
|
||||
|
||||
UPDATE svr SET lsrids = array_cat( your_array_column, ARRAY[ (SELECT unnest(ARRAY[1, 2, 3]) EXCEPT SELECT unnest(your_array_column)) ] );
|
||||
|
||||
|
||||
"""
|
||||
sql = """WITH unique_ids AS (
|
||||
SELECT svr.vtec, array_agg(DISTINCT newpower.id) AS new_ids
|
||||
FROM svr
|
||||
JOIN newpower ON svr.vtec = newpower.vtec
|
||||
WHERE EXTRACT(EPOCH FROM (current_timestamp - svr.endtime ))/60/60/24 < 3
|
||||
GROUP BY svr.vtec
|
||||
)
|
||||
UPDATE svr
|
||||
SET lsrids = ARRAY(
|
||||
SELECT DISTINCT unnest(array_cat(svr.lsrids, unique_ids.new_ids))
|
||||
)
|
||||
FROM unique_ids
|
||||
WHERE svr.vtec = unique_ids.vtec
|
||||
"""
|
||||
|
||||
cursor.execute(sql)
|
||||
conn.commit()
|
||||
|
||||
|
||||
cursor.execute("SELECT r.id, r.lat, r.lon, s.issue, s.endtime FROM newpower r JOIN svr s ON r.vtec = s.vtec WHERE r.vtec IS NOT NULL AND r.lsrref is null and s.issue > '2024-07-25' and outagen > 4 limit 50")
|
||||
|
||||
svrreports = cursor.fetchall()
|
||||
|
||||
for i in svrreports:
|
||||
repid = i[0]
|
||||
lat = i[1]
|
||||
lon = i[2]
|
||||
start_time = i[3]
|
||||
end_time = i[4]
|
||||
max_reflectivity, reflectivity_time = find_max_reflectivity(lat, lon, start_time, end_time)
|
||||
sql = "UPDATE newpower set lsrtime = %s, lsrref = %s where id = %s"
|
||||
vals = (reflectivity_time,max_reflectivity,repid)
|
||||
print(vals)
|
||||
cursor.execute(sql,vals)
|
||||
conn.commit()
|
||||
|
||||
"""
|
||||
|
||||
lat = 38.52513
|
||||
lon = -82.459656
|
||||
start_time = datetime.strptime("2024-09-27T18:48:00Z","%Y-%m-%dT%H:%M:%SZ")
|
||||
end_time = datetime.strptime("2024-09-27T19:15:00Z","%Y-%m-%dT%H:%M:%SZ")
|
||||
max_reflectivity, reflectivity_time = find_max_reflectivity(lat, lon, start_time, end_time)
|
||||
print("Maximum Reflectivity:", max_reflectivity)
|
||||
print("Time of Maximum Reflectivity:", reflectivity_time)
|
||||
|
||||
"""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user