diff --git a/newpower.py b/newpower.py index fca17ee..62de15b 100644 --- a/newpower.py +++ b/newpower.py @@ -16,11 +16,11 @@ from requests.packages.urllib3.exceptions import InsecureRequestWarning import get_rpc_config_auto # Import provider classes -from base import BaseCountyProvider -from kubra import KubraCountyProvider -from simple import SimpleCountyJsonProvider -from nisc import NiscCountyProvider -from gwt_rpc import GwtRpcCountyProvider +from providers.base import BaseCountyProvider +from providers.kubra import KubraCountyProvider +from providers.simple import SimpleCountyJsonProvider +from providers.nisc import NiscCountyProvider +from providers.gwt_rpc import GwtRpcCountyProvider requests.packages.urllib3.disable_warnings(InsecureRequestWarning) diff --git a/newpower2.py b/newpower2.py index 93ff3b7..f5d3059 100644 --- a/newpower2.py +++ b/newpower2.py @@ -19,11 +19,11 @@ from requests.packages.urllib3.exceptions import InsecureRequestWarning import get_rpc_config_auto # Import provider classes -from base import BaseProvider -from kubra import KubraProvider -from simple import SimpleJsonProvider -from gwt_rpc import GwtRpcProvider -from nisc import NiscHostedProvider +from providers.base import BaseProvider +from providers.kubra import KubraProvider +from providers.simple import SimpleJsonProvider +from providers.gwt_rpc import GwtRpcProvider +from providers.nisc import NiscHostedProvider requests.packages.urllib3.disable_warnings(InsecureRequestWarning) diff --git a/providers/__pycache__/base.cpython-310.pyc b/providers/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000..da4bd0a Binary files /dev/null and b/providers/__pycache__/base.cpython-310.pyc differ diff --git a/providers/__pycache__/gwt_rpc.cpython-310.pyc b/providers/__pycache__/gwt_rpc.cpython-310.pyc new file mode 100644 index 0000000..f99c856 Binary files /dev/null and b/providers/__pycache__/gwt_rpc.cpython-310.pyc differ diff --git a/providers/__pycache__/kubra.cpython-310.pyc b/providers/__pycache__/kubra.cpython-310.pyc new file mode 100644 index 0000000..c8a252a Binary files /dev/null and b/providers/__pycache__/kubra.cpython-310.pyc differ diff --git a/providers/__pycache__/nisc.cpython-310.pyc b/providers/__pycache__/nisc.cpython-310.pyc new file mode 100644 index 0000000..7de96ef Binary files /dev/null and b/providers/__pycache__/nisc.cpython-310.pyc differ diff --git a/providers/__pycache__/simple.cpython-310.pyc b/providers/__pycache__/simple.cpython-310.pyc new file mode 100644 index 0000000..ba1795c Binary files /dev/null and b/providers/__pycache__/simple.cpython-310.pyc differ diff --git a/base.py b/providers/base.py similarity index 100% rename from base.py rename to providers/base.py diff --git a/gwt_rpc.py b/providers/gwt_rpc.py similarity index 96% rename from gwt_rpc.py rename to providers/gwt_rpc.py index 99b7958..9ea22f6 100644 --- a/gwt_rpc.py +++ b/providers/gwt_rpc.py @@ -5,7 +5,7 @@ from urllib.parse import urlparse from pyproj import Transformer import get_rpc_config_auto -from base import BaseProvider, BaseCountyProvider +from providers.base import BaseProvider, BaseCountyProvider logger = logging.getLogger(__name__) @@ -43,13 +43,12 @@ class GwtRpcBaseProvider: logger.info(f"Attempting Auto-Repair for {self.name}...") try: - # This function needs to be defined in the main script context to save config - from newpower import update_provider_config as update_county_config - except ImportError: - from newpower2 import update_provider_config as update_point_config - update_county_config = update_point_config # Fallback - - try: + # This function needs to be defined in the main script context to save config. + # We import it here, inside the method, to avoid circular import errors at startup. + if isinstance(self, GwtRpcCountyProvider): + from newpower import update_provider_config + else: + from newpower2 import update_provider_config _, valid_headers, valid_cookies, valid_body = get_rpc_config_auto.fetch_live_data(self.map_url) if valid_headers and valid_body: logger.info(f"Repair successful! Updating {self.name}.") @@ -72,7 +71,7 @@ class GwtRpcBaseProvider: self.session.cookies.set(cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path']) # Save to disk for next time - update_county_config(self.name, self.config) + update_provider_config(self.name, self.config) return True except Exception as e: logger.error(f"Auto-repair failed: {e}") diff --git a/kubra.py b/providers/kubra.py similarity index 99% rename from kubra.py rename to providers/kubra.py index e267400..d2393b5 100644 --- a/kubra.py +++ b/providers/kubra.py @@ -4,7 +4,7 @@ import logging import polyline import mercantile from datetime import datetime -from base import BaseProvider, BaseCountyProvider +from providers.base import BaseProvider, BaseCountyProvider logger = logging.getLogger(__name__) diff --git a/nisc.py b/providers/nisc.py similarity index 98% rename from nisc.py rename to providers/nisc.py index 3dca4d7..ca1abbc 100644 --- a/nisc.py +++ b/providers/nisc.py @@ -1,7 +1,7 @@ import logging from datetime import datetime, timezone from pyproj import Transformer -from base import BaseProvider, BaseCountyProvider +from providers.base import BaseProvider, BaseCountyProvider logger = logging.getLogger(__name__) diff --git a/simple.py b/providers/simple.py similarity index 97% rename from simple.py rename to providers/simple.py index 344d912..14bbd47 100644 --- a/simple.py +++ b/providers/simple.py @@ -1,6 +1,6 @@ import logging from datetime import datetime -from base import BaseProvider, BaseCountyProvider +from providers.base import BaseProvider, BaseCountyProvider logger = logging.getLogger(__name__) diff --git a/test.py b/test.py deleted file mode 100644 index 1e69a6a..0000000 --- a/test.py +++ /dev/null @@ -1,163 +0,0 @@ -import json -import os - -def decode_gwt_rpc(payload): - """ - Decodes a GWT-RPC payload to extract outage data for Counties. - """ - # 1. Clean the payload - # GWT responses often start with //OK. We strip that. - if payload.startswith("//OK"): - payload = payload[4:] - - # 2. Parse the FULL payload as JSON - # The GWT payload is structurally a JSON array: [stream_data..., [string_table], flags...] - try: - full_data = json.loads(payload) - except json.JSONDecodeError as e: - print(f"Error parsing payload JSON: {e}") - return None - - # 3. Separate Stream and String Table - # The String Table is a list of strings located near the end of the main array. - # The "Stream" is everything before that string table. - - string_table = None - stream_raw = [] - - # Iterate through the parsed array to find the string table (which is a list) - for item in full_data: - if isinstance(item, list): - string_table = item - # Once we find the table, we assume the rest are flags and stop adding to stream - break - else: - stream_raw.append(item) - - if not string_table: - print("Error: String table not found in payload.") - return None - - # 4. Normalize the Stream - # The decoder logic relies on integers (1-based indices). - # The raw stream might contain floats or strings that we need to cast or filter. - stream = [] - for token in stream_raw: - if isinstance(token, int): - stream.append(token) - elif isinstance(token, float): - stream.append(int(token)) - elif isinstance(token, str): - # Sometimes numeric values are sent as strings in the stream - try: - stream.append(int(float(token))) - except ValueError: - # If it's a non-numeric string token (like a cache ID), ignore it - pass - - # 5. Decode Logic - try: - # Define the signatures we are looking for in the String Table - REGION_SIG = "cc.nisc.oms.clientandserver.v2.pojo.Region/3192921568" - INTEGER_SIG = "java.lang.Integer/3438268394" - CATEGORY_KEY = "County" - - # Helper to find 1-based index - def get_index(val): - try: return string_table.index(val) + 1 - except ValueError: return 0 - - region_type_id = get_index(REGION_SIG) - integer_type_id = get_index(INTEGER_SIG) - county_type_id = get_index(CATEGORY_KEY) - - if region_type_id == 0: - print("Error: Region type signature not found in string table.") - # Debug: Print first few strings to verify if signatures changed - # print("Available strings:", string_table[:10]) - return None - - results = [] - i = 0 - stream_len = len(stream) - - # Iterate through the stream looking for Region objects - while i < stream_len: - if stream[i] == region_type_id: - try: - # We found a Region. The next few integers define its properties. - # Pointer 'p' is relative to current index 'i' - p = i + 1 - - # --- Field 1: Total Served --- - # Logic: Value is valid if followed by Integer Type ID - served = 0 - val1 = stream[p] - p += 1 - if p < stream_len and stream[p] == integer_type_id: - served = val1 - p += 1 # Skip type ID - - # --- Field 2: Number Out --- - out = 0 - val2 = stream[p] - p += 1 - if p < stream_len and stream[p] == integer_type_id: - out = val2 - p += 1 # Skip type ID - - # --- Field 3: Name Index --- - name_idx = stream[p] - p += 1 - - # --- Field 4: Category Index --- - cat_idx = stream[p] - - # Check if this is a County - if cat_idx == county_type_id: - name = "Unknown" - if 0 < name_idx <= len(string_table): - name = string_table[name_idx - 1] - - percent = 0.0 - if served > 0: - percent = (out / served) * 100 - - results.append({ - "county": name, - "served": served, - "out": out, - "percent": percent - }) - - except IndexError: - pass - i += 1 - - return results - - except Exception as e: - print(f"Error during stream traversal: {e}") - return None - -if __name__ == "__main__": - filename = "outage_data.txt" - if os.path.exists(filename): - with open(filename, "r", encoding="utf-8") as f: - raw_content = f.read().strip() - - data = decode_gwt_rpc(raw_content) - - if data: - # Sort A-Z - data.sort(key=lambda x: x['county']) - - print(f"{'County':<20} | {'Served':>8} | {'Out':>8} | {'Percent':>8}") - print("-" * 55) - for row in data: - print(f"{row['county']:<20} | {row['served']:>8} | {row['out']:>8} | {row['percent']:>7.2f}%") - else: - print("No data found.") - else: - print(f"File '{filename}' not found. Please create it and paste the payload.") - \ No newline at end of file