fix again

This commit is contained in:
2025-12-07 12:42:43 +00:00
parent a765aa23f4
commit d24587c321
13 changed files with 21 additions and 185 deletions

View File

@@ -16,11 +16,11 @@ from requests.packages.urllib3.exceptions import InsecureRequestWarning
import get_rpc_config_auto
# Import provider classes
from base import BaseCountyProvider
from kubra import KubraCountyProvider
from simple import SimpleCountyJsonProvider
from nisc import NiscCountyProvider
from gwt_rpc import GwtRpcCountyProvider
from providers.base import BaseCountyProvider
from providers.kubra import KubraCountyProvider
from providers.simple import SimpleCountyJsonProvider
from providers.nisc import NiscCountyProvider
from providers.gwt_rpc import GwtRpcCountyProvider
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

View File

@@ -19,11 +19,11 @@ from requests.packages.urllib3.exceptions import InsecureRequestWarning
import get_rpc_config_auto
# Import provider classes
from base import BaseProvider
from kubra import KubraProvider
from simple import SimpleJsonProvider
from gwt_rpc import GwtRpcProvider
from nisc import NiscHostedProvider
from providers.base import BaseProvider
from providers.kubra import KubraProvider
from providers.simple import SimpleJsonProvider
from providers.gwt_rpc import GwtRpcProvider
from providers.nisc import NiscHostedProvider
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -5,7 +5,7 @@ from urllib.parse import urlparse
from pyproj import Transformer
import get_rpc_config_auto
from base import BaseProvider, BaseCountyProvider
from providers.base import BaseProvider, BaseCountyProvider
logger = logging.getLogger(__name__)
@@ -43,13 +43,12 @@ class GwtRpcBaseProvider:
logger.info(f"Attempting Auto-Repair for {self.name}...")
try:
# This function needs to be defined in the main script context to save config
from newpower import update_provider_config as update_county_config
except ImportError:
from newpower2 import update_provider_config as update_point_config
update_county_config = update_point_config # Fallback
try:
# This function needs to be defined in the main script context to save config.
# We import it here, inside the method, to avoid circular import errors at startup.
if isinstance(self, GwtRpcCountyProvider):
from newpower import update_provider_config
else:
from newpower2 import update_provider_config
_, valid_headers, valid_cookies, valid_body = get_rpc_config_auto.fetch_live_data(self.map_url)
if valid_headers and valid_body:
logger.info(f"Repair successful! Updating {self.name}.")
@@ -72,7 +71,7 @@ class GwtRpcBaseProvider:
self.session.cookies.set(cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path'])
# Save to disk for next time
update_county_config(self.name, self.config)
update_provider_config(self.name, self.config)
return True
except Exception as e:
logger.error(f"Auto-repair failed: {e}")

View File

@@ -4,7 +4,7 @@ import logging
import polyline
import mercantile
from datetime import datetime
from base import BaseProvider, BaseCountyProvider
from providers.base import BaseProvider, BaseCountyProvider
logger = logging.getLogger(__name__)

View File

@@ -1,7 +1,7 @@
import logging
from datetime import datetime, timezone
from pyproj import Transformer
from base import BaseProvider, BaseCountyProvider
from providers.base import BaseProvider, BaseCountyProvider
logger = logging.getLogger(__name__)

View File

@@ -1,6 +1,6 @@
import logging
from datetime import datetime
from base import BaseProvider, BaseCountyProvider
from providers.base import BaseProvider, BaseCountyProvider
logger = logging.getLogger(__name__)

163
test.py
View File

@@ -1,163 +0,0 @@
import json
import os
def decode_gwt_rpc(payload):
"""
Decodes a GWT-RPC payload to extract outage data for Counties.
"""
# 1. Clean the payload
# GWT responses often start with //OK. We strip that.
if payload.startswith("//OK"):
payload = payload[4:]
# 2. Parse the FULL payload as JSON
# The GWT payload is structurally a JSON array: [stream_data..., [string_table], flags...]
try:
full_data = json.loads(payload)
except json.JSONDecodeError as e:
print(f"Error parsing payload JSON: {e}")
return None
# 3. Separate Stream and String Table
# The String Table is a list of strings located near the end of the main array.
# The "Stream" is everything before that string table.
string_table = None
stream_raw = []
# Iterate through the parsed array to find the string table (which is a list)
for item in full_data:
if isinstance(item, list):
string_table = item
# Once we find the table, we assume the rest are flags and stop adding to stream
break
else:
stream_raw.append(item)
if not string_table:
print("Error: String table not found in payload.")
return None
# 4. Normalize the Stream
# The decoder logic relies on integers (1-based indices).
# The raw stream might contain floats or strings that we need to cast or filter.
stream = []
for token in stream_raw:
if isinstance(token, int):
stream.append(token)
elif isinstance(token, float):
stream.append(int(token))
elif isinstance(token, str):
# Sometimes numeric values are sent as strings in the stream
try:
stream.append(int(float(token)))
except ValueError:
# If it's a non-numeric string token (like a cache ID), ignore it
pass
# 5. Decode Logic
try:
# Define the signatures we are looking for in the String Table
REGION_SIG = "cc.nisc.oms.clientandserver.v2.pojo.Region/3192921568"
INTEGER_SIG = "java.lang.Integer/3438268394"
CATEGORY_KEY = "County"
# Helper to find 1-based index
def get_index(val):
try: return string_table.index(val) + 1
except ValueError: return 0
region_type_id = get_index(REGION_SIG)
integer_type_id = get_index(INTEGER_SIG)
county_type_id = get_index(CATEGORY_KEY)
if region_type_id == 0:
print("Error: Region type signature not found in string table.")
# Debug: Print first few strings to verify if signatures changed
# print("Available strings:", string_table[:10])
return None
results = []
i = 0
stream_len = len(stream)
# Iterate through the stream looking for Region objects
while i < stream_len:
if stream[i] == region_type_id:
try:
# We found a Region. The next few integers define its properties.
# Pointer 'p' is relative to current index 'i'
p = i + 1
# --- Field 1: Total Served ---
# Logic: Value is valid if followed by Integer Type ID
served = 0
val1 = stream[p]
p += 1
if p < stream_len and stream[p] == integer_type_id:
served = val1
p += 1 # Skip type ID
# --- Field 2: Number Out ---
out = 0
val2 = stream[p]
p += 1
if p < stream_len and stream[p] == integer_type_id:
out = val2
p += 1 # Skip type ID
# --- Field 3: Name Index ---
name_idx = stream[p]
p += 1
# --- Field 4: Category Index ---
cat_idx = stream[p]
# Check if this is a County
if cat_idx == county_type_id:
name = "Unknown"
if 0 < name_idx <= len(string_table):
name = string_table[name_idx - 1]
percent = 0.0
if served > 0:
percent = (out / served) * 100
results.append({
"county": name,
"served": served,
"out": out,
"percent": percent
})
except IndexError:
pass
i += 1
return results
except Exception as e:
print(f"Error during stream traversal: {e}")
return None
if __name__ == "__main__":
filename = "outage_data.txt"
if os.path.exists(filename):
with open(filename, "r", encoding="utf-8") as f:
raw_content = f.read().strip()
data = decode_gwt_rpc(raw_content)
if data:
# Sort A-Z
data.sort(key=lambda x: x['county'])
print(f"{'County':<20} | {'Served':>8} | {'Out':>8} | {'Percent':>8}")
print("-" * 55)
for row in data:
print(f"{row['county']:<20} | {row['served']:>8} | {row['out']:>8} | {row['percent']:>7.2f}%")
else:
print("No data found.")
else:
print(f"File '{filename}' not found. Please create it and paste the payload.")