diff --git a/elev.sql b/elev.sql deleted file mode 100644 index e69de29..0000000 diff --git a/gengeo.py b/gengeo.py deleted file mode 100644 index ceb70c7..0000000 --- a/gengeo.py +++ /dev/null @@ -1,47 +0,0 @@ -import psycopg2 -import json - -# Connect to your PostgreSQL database -conn = psycopg2.connect( - host="localhost", - database="nws", - user="nws", - password="nws" -) - -# Create a cursor object -cur = conn.cursor() - -# Execute the SQL query -cur.execute(""" -SELECT json_build_object( - 'type', 'FeatureCollection', - 'features', json_agg( - json_build_object( - 'type', 'Feature', - 'geometry', ST_AsGeoJSON(geom)::json, - 'properties', json_build_object( - 'county', countyname, - 'state', state, - 'lat', lat, - 'lon', lon - ) - ) - ) -) -FROM county -WHERE cwa = 'RLX'; -""") - -# Fetch the result -geojson_result = cur.fetchone()[0] - -# Print the GeoJSON result -outfile = open("rlxtest.json", "w") -json.dump(geojson_result,outfile, indent=2) - -# Close the cursor and connection -cur.close() -conn.close() - - diff --git a/geo.py b/geo.py deleted file mode 100644 index c7dadaf..0000000 --- a/geo.py +++ /dev/null @@ -1,48 +0,0 @@ -#You can convert panoid to lat/lon using a free API call https://maps.googleapis.com/maps/api/streetview/metadata?pano=PANOID&key=YOURAPIKEY - - -#https://maps.googleapis.com/maps/api/streetview/metadata?pano=onUr8119UohoEeRXfBNArQ&key=AIzaSyDNmQaLwMoVluAJ8PMIZZyMUfp3hlbsndw - -import requests -import os -import json -import webbrowser - - -S = requests.Session() -apikey = 'AIzaSyDNmQaLwMoVluAJ8PMIZZyMUfp3hlbsndw' - - - -def geocheat(panoidurl): - - query = requests.utils.urlparse(panoidurl).query - params = dict(x.split('=') for x in query.split('&')) - - if 'panoid' in params: - panoid = params['panoid'] - - url = 'https://maps.googleapis.com/maps/api/streetview/metadata?pano='+panoid+'&key=AIzaSyDNmQaLwMoVluAJ8PMIZZyMUfp3hlbsndw' - dataresponse = json.loads(S.get(url).text) - #r = requests.get(url, timeout=3) - #if r.status_code == 200: - lat = dataresponse['location']['lat'] - lon = dataresponse['location']['lng'] - - - #print(lat,lon) - # print(r.content) - #print(dataresponse) - mapurl = "https://maps.google.com/maps?q=loc:" + str(lat) + "+" + str(lon) - - #os.system("start \"\" + mapurl) - webbrowser.open(mapurl, new = 1) - -poop = True -while poop: - - cheatme = input("Enter URL with panoid: ") - geocheat(cheatme) - - - diff --git a/gr2power.py b/gr2power.py deleted file mode 100644 index 255354c..0000000 --- a/gr2power.py +++ /dev/null @@ -1,66 +0,0 @@ -import requests -import json -import psycopg2 -import psycopg2.extensions -from datetime import datetime, timezone -import re -from shapely import wkt - -def generate_gr2_triangle_placefile(filename="power_outages_gr2.txt"): - """ - Generates a GR2Analyst placefile using Triangles and Text for power outages. - - Args: - filename (str): The name of the placefile to create. - """ - try: - conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws') - cursor = conn.cursor() - - cursor.execute("SELECT startguess,outagen,lastchange,st_astext(realareageom),lat,lon,cause, outagen FROM power WHERE cwa = 'RLX' and active = true") # Include outagen in select - outages = cursor.fetchall() - - with open(filename, "w") as pf: - pf.write("Refresh: 1\n") - pf.write("Threshold: 999 nautical_miles\n") - pf.write("Title: Power Outages (RLX CWA) - Triangles\n") - pf.write("Font: 1, 11, 0, \"Courier New\"\n") - pf.write("Color: 255 0 0\n") # Red for triangles (outage area) - - for outage in outages: - startguess, outagen_num_db, lastchange, realareageom_wkt, lat, lon, cause, outagen_display_num = outage # Get outagen as outagen_display_num - - # Format hover text (optional for triangles, but can add later if needed) - hover_text = f"Outage #{outagen_display_num}\\n" # Use outagen_display_num for display - hover_text += f"Cause: {cause}\\n" - hover_text += f"Last Update: {lastchange.strftime('%Y-%m-%d %H:%M:%Z UTC')}" - - # Triangle vertices - create a small triangle around the outage point - triangle_lat_offset = 0.02 # Adjust for triangle size (latitude offset) - triangle_lon_offset = 0.03 # Adjust for triangle size (longitude offset) - - pf.write("Triangles:\n") - pf.write(f" {lat - triangle_lat_offset}, {lon - triangle_lon_offset}\n") # Vertex 1: Southwest - pf.write(f" {lat + triangle_lat_offset}, {lon - triangle_lon_offset}\n") # Vertex 2: Southeast - pf.write(f" {lat}, {lon + triangle_lon_offset}\n") # Vertex 3: North - pf.write("End:\n") - - # Blue text label for outage number - pf.write("Color: 0 0 255\n") # Blue for text - pf.write(f"Text: {lat}, {lon}, 1, \"Outage #{outagen_display_num}\", \"{hover_text}\"\n") # Include hover text - - pf.write("End:\n") - - print(f"GR2Analyst Triangle placefile '{filename}' generated successfully.") - - except psycopg2.Error as db_error: - print(f"Database error: {db_error}") - except IOError as io_error: - print(f"File I/O error: {io_error}") - finally: - if conn: - conn.close() - -if __name__ == "__main__": - generate_gr2_triangle_placefile() - # generate_gr2_triangle_placefile("my_outages_triangle.txt") \ No newline at end of file diff --git a/ham.php b/ham.php deleted file mode 100644 index 7e07f72..0000000 --- a/ham.php +++ /dev/null @@ -1,24 +0,0 @@ - diff --git a/hamadmin.html b/hamadmin.html deleted file mode 100644 index dffed9d..0000000 --- a/hamadmin.html +++ /dev/null @@ -1,62 +0,0 @@ - - - - SRD Update - - - - - - -

Form for updating bands that are currently being monitored/worked

-

Fill out these as pairs then click submit, you need to fill 1 pair as a minimum:

-
-
- -
- -
-
-
- -
- -
-
-
- -
- -
-
-
- -
- -
-
-
- -
- -
-
-
- -
- -
-
-
-
- -
-
-
- - - - - - diff --git a/ltg.html b/ltg.html deleted file mode 100644 index 784297b..0000000 --- a/ltg.html +++ /dev/null @@ -1,481 +0,0 @@ - - - - RLX Lightning Archive - - - - - - - - - - - - - - - - - - - -
-
-Legend - -
- -
-
- - - - Use the box drawing thing in the top right so you don't have to manually enter these
-
-
- -
-
- -
-
- -
-
- -
- -
- - - -
- - -
- -
-
- -
- -
- - - - - -
-
-
-
-
- -
-
- Cam List - Add Camera - WU obs - 5m ASOS obs - Power Outages - CoCoRaHS Remarks - Questions? Comments? - - - -
- - - - - - - - - - - diff --git a/ltg.php b/ltg.php deleted file mode 100644 index 5acf320..0000000 --- a/ltg.php +++ /dev/null @@ -1,27 +0,0 @@ - - - diff --git a/onedb.html b/onedb.html deleted file mode 100644 index d5b19fb..0000000 --- a/onedb.html +++ /dev/null @@ -1,59 +0,0 @@ - - - - Wunderground obs - - - - - - - - -QPE 00L is direct from WU, the other QPE are derived and may be off if time settings on the individual PWS are incorrect - -
-
- - - - - diff --git a/onerain.py b/onerain.py deleted file mode 100644 index 5d1c224..0000000 --- a/onerain.py +++ /dev/null @@ -1,152 +0,0 @@ -import time -import requests -import json -import geojson -import psycopg2 -import psycopg2.extensions -from psycopg2.extras import Json -conn = psycopg2.connect(host='localhost', database='nws', user='nws', password='nws') -cursor = conn.cursor() - - -#one rain sensor database with last obs and metadata -#one rain sensor database with all obs - -#metsensor = [30,53,11,10,50,44,40,41] -metsensor = [(3,'tempf'),(2,'raintotal'),(8,'humidity'),(7,'winddir'),(4,'windspd'),(5,'windgust')] -rainsensor = [(2,'raintotal')] - - -features = [] -alldata = [] -S = requests.Session() - -cursor.execute("SELECT siteid, sitetype from onerainsites") -#WHERE (Active = True) and ((EXTRACT(EPOCH FROM (current_timestamp - lastob ))/60) > 8 or (lastob ISNULL))" -allonerainsites = cursor.fetchall() - - - -def getData(siteid,sensorid): - apiget = 'https://wvdhsem.onerain.com/export/flot/?method=sensor&site_id=' + str(siteid) + '&device_id=' + str(sensorid) - #print(apiget) - dataresponse = json.loads(S.get(apiget).text) - #print(dataresponse) - - - return dataresponse - - - - -# sql = 'INSERT INTO wuobs (stationid, dateutc, winddir, windspd, windgust, tempf, dewpt, humidity, pressure, preciptotal) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) ON CONFLICT (stationid,dateutc) DO NOTHING' -# sql2 = 'UPDATE wusites SET lastob = %s, winddir = %s, windspd = %s, windgust= %s, tempf = %s, dewpt = %s, humidity = %s, pressure = %s, preciptotal = %s WHERE stationid = %s' -# cursor.execute(sql,ob) -# cursor.execute(sql2,ob2) -# conn.commit() -# setDerived(stationid) - -def setDerived(stationid): - data1 = (stationid,) - data = (stationid,stationid) - ago24 = "UPDATE wusites set ago24 = subquery.preciptotal, ago24time = subquery.dateutc from (select preciptotal,stationid,dateutc from wuobs WHERE (dateutc < timezone('utc',current_timestamp) - interval '23 hours') and (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s" - ago3 = "UPDATE wusites set ago3 = subquery.preciptotal, ago3time = subquery.dateutc from (select preciptotal,stationid,dateutc from wuobs WHERE (dateutc < timezone('utc',current_timestamp) - interval '2.5 hours') and (dateutc > timezone('utc',current_timestamp) - interval '3 hours') and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s" - ago6 = "UPDATE wusites set ago6 = subquery.preciptotal, ago6time = subquery.dateutc from (select preciptotal,stationid,dateutc from wuobs WHERE (dateutc < timezone('utc',current_timestamp) - interval '5.2 hours') and (dateutc > timezone('utc',current_timestamp) - interval '6 hours') and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s" - midnight = "UPDATE wusites set lastmidnight = subquery.preciptotal from (select preciptotal,stationid from wuobs WHERE (dateutc < timezone('UTC', current_date::timestamp at time zone 'US/Eastern')) and (dateutc > timezone('UTC', current_date::timestamp at time zone 'US/Eastern' - interval '40 minutes')) and stationid = %s order by dateutc desc limit 1) as subquery where wusites.stationid = %s" - windmax = "UPDATE wusites set windmax = subquery.windmax from (SELECT MAX(windgust) as windmax FROM wuobs where (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s) as subquery where wusites.stationid = %s" - maxt = "UPDATE wusites set maxt = subquery.maxt from (SELECT MAX(tempf) as maxt FROM wuobs where (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s) as subquery where wusites.stationid = %s" - mint = "UPDATE wusites set mint = subquery.mint from (SELECT MIN(tempf) as mint FROM wuobs where (dateutc > timezone('utc',current_timestamp) - interval '24 hours') and stationid = %s) as subquery where wusites.stationid = %s" - - - - cursor.execute(ago24,data) - cursor.execute(ago6,data) - cursor.execute(ago3,data) - cursor.execute(windmax,data) - cursor.execute(midnight,data) - cursor.execute(maxt,data) - cursor.execute(mint,data) - conn.commit() - rain3 = "update wusites set rain3 = (case when (wusites.ago3time < timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal + wusites.lastmidnight - wusites.ago3) when (wusites.ago3time > timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal - wusites.ago3) end) where wusites.stationid = %s" - rain6 = "update wusites set rain6 = (case when (wusites.ago6time < timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal + wusites.lastmidnight - wusites.ago6) when (wusites.ago6time > timezone('UTC', current_date::timestamp at time zone 'US/Eastern') and wusites.stationid = %s) then (wusites.preciptotal - wusites.ago6) end) where wusites.stationid = %s" - rain24 = "update wusites set rain24 = (wusites.preciptotal + wusites.lastmidnight - wusites.ago24) where wusites.stationid = %s" - data2 = (stationid,stationid,stationid) - cursor.execute(rain3,data2) - cursor.execute(rain6,data2) - cursor.execute(rain24,data1) - conn.commit() - - - -for i in allonerainsites: - if i[1] == 'MET': - for j in metsensor: - - tempdata = getData(i[0],j[0]) - for p in tempdata['data']: - datafield = p[1] - datefield = int(p[0])/1000 - ob = (i[0],datefield,int(j[0]),datafield) - sql = 'INSERT INTO onerainobs (siteid, dateutc, sensorid, data1) VALUES (%s,to_timestamp(%s),%s,%s) ON CONFLICT (siteid,sensorid,dateutc,data1) DO NOTHING' - cursor.execute(sql,ob) - conn.commit() - if i[1] == 'Rain': - for j in rainsensor: - tempdata = getData(i[0],j[0]) - for p in tempdata['data']: - datafield = p[1] - datefield = int(p[0])/1000 - ob = (i[0],datefield,int(j[0]),datafield) - sql = 'INSERT INTO onerainobs (siteid, dateutc, sensorid, data1) VALUES (%s,to_timestamp(%s),%s,%s) ON CONFLICT (siteid,sensorid,dateutc,data1) DO NOTHING' - cursor.execute(sql,ob) - - conn.commit() - - - - -for i in allonerainsites: - if i[1] == 'MET': - for j in metsensor: - sql = 'SELECT data1,dateutc from onerainobs where siteid = %s and sensorid = %s order by dateutc desc limit 1' - getob = (i[0],j[0]) - cursor.execute(sql,getob) - #WHERE (Active = True) and ((EXTRACT(EPOCH FROM (current_timestamp - lastob ))/60) > 8 or (lastob ISNULL))" - obdata = cursor.fetchall() - - if len(obdata) == 1: - for l in obdata: - - - sensor = str(j[1]) - sensortime = str(j[1])+'time' - sql = 'UPDATE onerainsites set ' + sensor + ' = %s, ' + sensortime + ' = %s where siteid = %s' - updateob = (l[0],l[1],i[0]) - - - - cursor.execute(sql,updateob) - conn.commit() - - if i[1] == 'Rain': - - for j in rainsensor: - sql = 'SELECT data1,dateutc from onerainobs where siteid = %s and sensorid = %s order by dateutc desc limit 1' - getob = (i[0],j[0]) - cursor.execute(sql,getob) - #WHERE (Active = True) and ((EXTRACT(EPOCH FROM (current_timestamp - lastob ))/60) > 8 or (lastob ISNULL))" - obdata = cursor.fetchall() - - if len(obdata) == 1: - for l in obdata: - - - sensor = str(j[1]) - sensortime = str(j[1])+'time' - sql = 'UPDATE onerainsites set ' + sensor + ' = %s, ' + sensortime + ' = %s where siteid = %s' - updateob = (l[0],l[1],i[0]) - - - - cursor.execute(sql,updateob) - conn.commit() \ No newline at end of file diff --git a/oneraindb.php b/oneraindb.php deleted file mode 100644 index 9ef8c5e..0000000 --- a/oneraindb.php +++ /dev/null @@ -1,26 +0,0 @@ - - - -