264 lines
12 KiB
Python
264 lines
12 KiB
Python
import logging
|
|
import json
|
|
from datetime import datetime, timezone, timedelta
|
|
from urllib.parse import urlparse
|
|
from pyproj import Transformer
|
|
|
|
import get_rpc_config_auto
|
|
from providers.base import BaseProvider, BaseCountyProvider
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class GwtRpcBaseProvider:
|
|
"""Base class for GWT-RPC providers to share common logic like auto-repair."""
|
|
def __init__(self, config, session):
|
|
self.config = config
|
|
self.session = session
|
|
self.name = config.get('name', 'Unknown')
|
|
self.map_url = config.get('map_url')
|
|
self.state_filter = config.get('state_filter')
|
|
self.AUTO_UPDATE_COOLDOWN_HOURS = 4
|
|
|
|
# Set up session headers and cookies from config
|
|
self.session.headers.update({
|
|
'User-Agent': config.get('user_agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'),
|
|
'Accept': '*/*',
|
|
'Sec-Fetch-Site': 'same-origin'
|
|
})
|
|
if config.get('cookies'):
|
|
for cookie in config['cookies']:
|
|
self.session.cookies.set(cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path'])
|
|
|
|
def attempt_auto_repair(self):
|
|
if not self.map_url: return False
|
|
last_update = self.config.get('last_auto_update')
|
|
if last_update:
|
|
try:
|
|
last_dt = datetime.fromisoformat(last_update)
|
|
if last_dt.tzinfo is None: last_dt = last_dt.replace(tzinfo=timezone.utc)
|
|
if datetime.now(timezone.utc) - last_dt < timedelta(hours=self.AUTO_UPDATE_COOLDOWN_HOURS):
|
|
logger.info(f"Skipping auto-repair for {self.name} (Cooldown active).")
|
|
return False
|
|
except ValueError: pass
|
|
|
|
logger.info(f"Attempting Auto-Repair for {self.name}...")
|
|
try:
|
|
# This function needs to be defined in the main script context to save config.
|
|
# We import it here, inside the method, to avoid circular import errors at startup.
|
|
if isinstance(self, GwtRpcCountyProvider):
|
|
from newpower import update_provider_config
|
|
else:
|
|
from newpower2 import update_provider_config
|
|
_, valid_headers, valid_cookies, valid_body = get_rpc_config_auto.fetch_live_data(self.map_url)
|
|
if valid_headers and valid_body:
|
|
logger.info(f"Repair successful! Updating {self.name}.")
|
|
excluded = {'content-length', 'host', 'connection', 'cookie', 'accept-encoding', 'sec-ch-ua', 'sec-ch-ua-mobile', 'sec-ch-ua-platform', 'origin'}
|
|
clean_headers = {k: v for k, v in valid_headers.items() if k.lower() not in excluded}
|
|
clean_headers['Referer'] = self.map_url
|
|
|
|
new_settings = {
|
|
'headers': clean_headers, 'body': valid_body, 'cookies': valid_cookies,
|
|
'user_agent': valid_headers.get('user-agent'),
|
|
}
|
|
|
|
# Update in-memory config for the current run
|
|
self.config.update(new_settings)
|
|
self.config['last_auto_update'] = datetime.now(timezone.utc).isoformat()
|
|
|
|
# Update session for the current run
|
|
self.session.cookies.clear()
|
|
for cookie in valid_cookies:
|
|
self.session.cookies.set(cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path'])
|
|
|
|
# Save to disk for next time
|
|
update_provider_config(self.name, self.config)
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Auto-repair failed: {e}")
|
|
return False
|
|
|
|
def _fetch_rpc_data(self, is_retry=False):
|
|
url = self.config.get('url')
|
|
headers = self.config.get('headers', {})
|
|
body = self.config.get('body')
|
|
if not url or not body: return None
|
|
|
|
parsed_url = urlparse(url)
|
|
origin = f"{parsed_url.scheme}://{parsed_url.netloc}"
|
|
correct_referer = headers.get('Referer') or headers.get('x-gwt-module-base') or origin
|
|
|
|
req_headers = headers.copy()
|
|
req_headers['Content-Type'] = 'text/x-gwt-rpc; charset=UTF-8'
|
|
req_headers['Referer'] = correct_referer
|
|
|
|
resp = self.session.post(url, headers=req_headers, data=body, verify=False)
|
|
|
|
if "//EX" in resp.text or resp.status_code == 500:
|
|
logger.error(f"GWT Failure for {self.name}.")
|
|
if is_retry: return None
|
|
if self.attempt_auto_repair():
|
|
logger.info("Retrying fetch with new settings...")
|
|
return self._fetch_rpc_data(is_retry=True)
|
|
return None
|
|
|
|
if not resp.ok: return None
|
|
return json.loads(resp.text.replace('//OK', ''))
|
|
|
|
|
|
class GwtRpcCountyProvider(GwtRpcBaseProvider, BaseCountyProvider):
|
|
def fetch(self):
|
|
try:
|
|
data = self._fetch_rpc_data()
|
|
if data:
|
|
return self._extract_county_summary(data)
|
|
return []
|
|
except Exception as e:
|
|
logger.error(f"County fetch error for {self.name}: {e}")
|
|
return []
|
|
|
|
def _extract_county_summary(self, data_list):
|
|
try:
|
|
string_table = next((item for item in data_list if isinstance(item, list)), None)
|
|
if not string_table: return []
|
|
|
|
stream_raw = [item for item in data_list if not isinstance(item, list)]
|
|
stream = [int(token) for token in stream_raw if isinstance(token, (int, float, str)) and str(token).replace('.','',1).isdigit()]
|
|
|
|
REGION_SIG = "cc.nisc.oms.clientandserver.v2.pojo.Region/3192921568"
|
|
INTEGER_SIG = "java.lang.Integer/3438268394"
|
|
CATEGORY_KEY = "County"
|
|
|
|
def get_index(val):
|
|
try: return string_table.index(val) + 1
|
|
except ValueError: return 0
|
|
|
|
region_type_id = get_index(REGION_SIG)
|
|
integer_type_id = get_index(INTEGER_SIG)
|
|
county_type_id = get_index(CATEGORY_KEY)
|
|
|
|
if region_type_id == 0: return []
|
|
|
|
results = []
|
|
i = 0
|
|
while i < len(stream):
|
|
if stream[i] == region_type_id:
|
|
try:
|
|
p = i + 1
|
|
served = 0
|
|
out = 0
|
|
|
|
# Check for served customers. Only advance pointer if found.
|
|
if p + 1 < len(stream) and stream[p+1] == integer_type_id:
|
|
served = stream[p]
|
|
p += 2
|
|
|
|
# Check for customers out. Only advance pointer if found.
|
|
if p + 1 < len(stream) and stream[p+1] == integer_type_id:
|
|
out = stream[p]
|
|
p += 2
|
|
|
|
name_idx, cat_idx = stream[p], stream[p+1]
|
|
|
|
if cat_idx == county_type_id:
|
|
name = string_table[name_idx - 1] if 0 < name_idx <= len(string_table) else "Unknown"
|
|
results.append({'county': name, 'state': self.state_filter, 'company': self.name, 'outages': out, 'served': served})
|
|
i = p + 1 # Advance main loop counter past this processed region
|
|
except IndexError: pass
|
|
i += 1
|
|
return results
|
|
except Exception as e:
|
|
logger.error(f"Could not parse county summary for {self.name}: {e}")
|
|
return []
|
|
|
|
|
|
class GwtRpcProvider(GwtRpcBaseProvider, BaseProvider):
|
|
def __init__(self, config, session):
|
|
super().__init__(config, session)
|
|
self.transformer = None
|
|
self.STATE_BOUNDS = {
|
|
'WV': {'lat_min': 37.0, 'lat_max': 40.7, 'lon_min': -82.7, 'lon_max': -77.7},
|
|
'OH': {'lat_min': 38.4, 'lat_max': 42.0, 'lon_min': -84.9, 'lon_max': -80.5},
|
|
'KY': {'lat_min': 36.4, 'lat_max': 39.2, 'lon_min': -89.6, 'lon_max': -81.9},
|
|
'IA': {'lat_min': 40.3, 'lat_max': 43.6, 'lon_min': -96.7, 'lon_max': -90.1}
|
|
}
|
|
if config.get('epsg'):
|
|
try:
|
|
self.transformer = Transformer.from_crs(f"EPSG:{config['epsg']}", "EPSG:4326", always_xy=True)
|
|
except: logger.error(f"EPSG Error for {self.name}")
|
|
|
|
def fetch(self):
|
|
try:
|
|
data = self._fetch_rpc_data()
|
|
if data:
|
|
return self._extract_outages(data)
|
|
return []
|
|
except Exception as e:
|
|
logger.error(f"Fetch error {self.name}: {e}")
|
|
return []
|
|
|
|
def _extract_outages(self, data_list):
|
|
results = []
|
|
try:
|
|
string_table = next((item for item in data_list if isinstance(item, list)), None)
|
|
if not string_table: return []
|
|
|
|
stream_raw = [item for item in data_list if not isinstance(item, list)]
|
|
stream = [int(token) for token in stream_raw if isinstance(token, (int, float))]
|
|
|
|
OUTAGE_SIG_KEYWORD = ".pojo.Outage/"
|
|
outage_sig_full = next((s for s in string_table if OUTAGE_SIG_KEYWORD in s), None)
|
|
if not outage_sig_full: return []
|
|
|
|
outage_type_id = string_table.index(outage_sig_full) + 1
|
|
|
|
i = 0
|
|
while i < len(stream):
|
|
if stream[i] == outage_type_id:
|
|
try:
|
|
p = i + 1
|
|
outagen = stream[p]; p += 1
|
|
crew_status_idx = stream[p]; p += 1
|
|
cause_idx = stream[p]; p += 1
|
|
etr_high = stream[p]; p += 1
|
|
etr_low = stream[p]; p += 1; p += 1
|
|
start_high = stream[p]; p += 1
|
|
start_low = stream[p]; p += 1; p += 1
|
|
coord_x = stream[p]; p += 1
|
|
coord_y = stream[p]; p += 1
|
|
|
|
lat, lon = None, None
|
|
if self.transformer and coord_x and coord_y:
|
|
try:
|
|
lon, lat = self.transformer.transform(coord_x, coord_y)
|
|
if not self._is_valid(lat, lon): lat, lon = None, None
|
|
except: pass
|
|
|
|
if lat and lon:
|
|
start_ms = (start_high << 32) | start_low
|
|
etr_ms = (etr_high << 32) | etr_low
|
|
start_time = datetime.fromtimestamp(start_ms / 1000, tz=timezone.utc) if start_ms > 0 else None
|
|
etr_time = datetime.fromtimestamp(etr_ms / 1000, tz=timezone.utc) if etr_ms > 0 else None
|
|
|
|
cause = string_table[cause_idx - 1].strip() if 0 < cause_idx <= len(string_table) else "Unknown"
|
|
crew_status = string_table[crew_status_idx - 1].strip() if 0 < crew_status_idx <= len(string_table) else "Unknown"
|
|
|
|
results.append({
|
|
'incidentid': f"{self.name}-{lat:.5f}-{lon:.5f}", 'utility': self.name,
|
|
'lat': lat, 'lon': lon, 'pointgeom': f"{lat:.5f},{lon:.5f}",
|
|
'start': start_time, 'etr': etr_time, 'outagen': outagen,
|
|
'cause': cause, 'crew_status': crew_status,
|
|
'last_change': datetime.now(timezone.utc)
|
|
})
|
|
except (IndexError, TypeError):
|
|
pass
|
|
i += 1
|
|
return results
|
|
except Exception as e:
|
|
logger.error(f"Could not parse point outages for {self.name}: {e}")
|
|
return []
|
|
|
|
def _is_valid(self, lat, lon):
|
|
if not self.state_filter: return True
|
|
b = self.STATE_BOUNDS.get(self.state_filter)
|
|
if not b: return True
|
|
return b['lat_min'] <= lat <= b['lat_max'] and b['lon_min'] <= lon <= b['lon_max'] |