fixed rpc, separated providers

This commit is contained in:
2025-12-07 03:57:20 +00:00
parent 32638df3a9
commit f9f0fc5093
4 changed files with 163 additions and 131 deletions

View File

@@ -3,7 +3,6 @@ import time
from playwright.sync_api import sync_playwright
def analyze_gwt_response(response_text):
"""Finds potential coordinates to validate response data."""
candidates = []
try:
if response_text.startswith("//OK"):
@@ -17,71 +16,82 @@ def analyze_gwt_response(response_text):
if abs(val1) > 100000 and abs(val2) > 100000:
candidates.append((val1, val2))
if len(candidates) > 5: break
except:
pass
except: pass
return candidates
def get_fresh_config(map_url):
def fetch_live_data(map_url):
"""
Launches headless browser to scrape headers, body, AND cookies.
Uses a real browser to fetch data AND capture the raw request details.
"""
print(f"--- Auto-Repair: Launching Browser for {map_url} ---")
captured_request = None
captured_cookies = []
print(f"--- Browser Fetch: {map_url} ---")
data_result = None
captured_headers = None
captured_cookies = None
captured_body = None # <--- New: Capture raw body
with sync_playwright() as p:
browser = p.chromium.launch(headless=True)
# Create a persistent context to ensure cookies are tracked
context = browser.new_context()
browser = p.chromium.launch(headless=True, args=['--disable-blink-features=AutomationControlled'])
context = browser.new_context(
user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36'
)
page = context.new_page()
def handle_request(request):
nonlocal captured_request
nonlocal captured_headers, captured_body
if ".rpc" in request.url and request.method == "POST":
# Capture the request details blindly before we even know if it works
if "getCombinedOutageDetails" in request.post_data or "getOutages" in request.post_data:
captured_headers = request.headers
captured_body = request.post_data
def handle_response(response):
nonlocal data_result
if ".rpc" in response.url and response.request.method == "POST":
try:
if "getCombinedOutageDetails" in request.post_data or "getOutages" in request.post_data:
captured_request = {
'url': request.url,
'headers': request.headers,
'body': request.post_data
}
except:
pass
if "getCombinedOutageDetails" in response.request.post_data or "getOutages" in response.request.post_data:
text = response.text()
if text.startswith("//OK"):
data_result = json.loads(text[4:])
print(" [+] Captured Data via Browser")
except: pass
page.on("request", handle_request)
page.on("response", handle_response)
try:
page.goto(map_url, wait_until="networkidle", timeout=45000)
time.sleep(5)
# Capture cookies from the browser context
page.goto(map_url, wait_until="networkidle", timeout=60000)
for _ in range(10):
if data_result: break
time.sleep(1)
captured_cookies = context.cookies()
except Exception as e:
print(f"Auto-Repair Browser Error: {e}")
print(f"Browser Fetch Error: {e}")
finally:
browser.close()
if captured_request:
req_headers = captured_request['headers']
# Clean headers (keep specific GWT ones, discard dynamic browser ones that requests handles)
clean_headers = {
'content-type': req_headers.get('content-type', 'text/x-gwt-rpc; charset=UTF-8'),
'x-gwt-module-base': req_headers.get('x-gwt-module-base'),
'x-gwt-permutation': req_headers.get('x-gwt-permutation'),
'Referer': map_url
}
return data_result, captured_headers, captured_cookies, captured_body
def get_fresh_config(map_url):
data, headers, cookies, body = fetch_live_data(map_url)
if headers and body:
# Minimal cleaning: Only remove headers that 'requests' MUST generate itself
# This keeps all custom NISC/GWT headers safe.
forbidden = {'content-length', 'host', 'connection', 'cookie', 'accept-encoding'}
clean_headers = {k: v for k, v in headers.items() if k.lower() not in forbidden}
return {
'headers': clean_headers,
'body': captured_request['body'],
'url': captured_request['url'],
'cookies': captured_cookies # <--- Return cookies
'body': body, # Save exact body
'url': headers.get('url', map_url.replace('.html', '') + '/GWT.rpc'), # Best guess URL if missing
'cookies': cookies,
'user_agent': headers.get('user-agent')
}
return None
if __name__ == "__main__":
url = input("Enter Map URL: ")
print(get_fresh_config(url))
res = get_fresh_config(url)
if res:
print("Success! Captured Body length:", len(res['body']))
print("Captured Headers:", res['headers'].keys())