From f9f0fc5093c1ba5936dccacb361e6a37a389e409 Mon Sep 17 00:00:00 2001 From: John Peck Date: Sun, 7 Dec 2025 03:57:20 +0000 Subject: [PATCH] fixed rpc, separated providers --- .../get_rpc_config_auto.cpython-310.pyc | Bin 2259 -> 3297 bytes get_rpc_config_auto.py | 94 +++++----- newpower2.py | 160 ++++++++++-------- providers.json | 40 +++-- 4 files changed, 163 insertions(+), 131 deletions(-) diff --git a/__pycache__/get_rpc_config_auto.cpython-310.pyc b/__pycache__/get_rpc_config_auto.cpython-310.pyc index 586788b75a027168c50c148d71508570090d0473..4b0576e2637f7d03c5feed92c388803228c725e2 100644 GIT binary patch literal 3297 zcmai0UvnF`5eIO%z>oJ5#xxI9mkzf9HoxsI#b7TTFIkEiHAKAAbHa9 zj>dta#W{97m0Q~nZXf!XW`>+T<(u>~;I&Wc7ij8q;_lu_vXqySgT>zi7Q6fVVdII3 z5`o|Bw^xGOQ-u5zJ4b&ubbbLPc?3cbK|`{h*L0mipM|V#ty`1`OYp~JoeNtyP}`y) zT&Nvc5XCK~u1QyvcIkRiI291~-5T$aT6S+Yxa86_|>0sYCdWH>P*I3noE zVJT1Tffi)BOwClX6V+>-(BJ8)pxM%OiZlB^2Wl)gpd|kQ(If-f{~{$r*lI-8WD1H4 z@<(=-42Y&fmXZ-2Fm0tw^IfKXt8MM1Y?lh_3@Ell?k@~rF127i$68pg6KLD;1=j`b zHW+-h20i;oub6V!fp2k`DW>*8JEg-?%2T_+MwAjbhr*1Ft0-9`DO~q~m)E(Z1pl9B zGi7J_1+P}`-rnPIt33uH82*WpJ#eLdWKHYY04RU#Qx2T>|5@-FF93j z!17~pfT2AG#e$Mth6vH%|CD?M-v8R#1?vmz8G{0T9zb~pv9L@2@^HXXw#hajpoQI} z!g0y^QpaAfcTpd)8fQ)0FHLW$t=dk2JTa)Rw;=bT^yFgDU6Q?%W&EloJQA3be0o6 zzVnchK@s+vfZNQZ#jnWM5UChDpTm6y#orerJFk1RhY@8Vlweo;poOEt;)Ii3I6rbP zk>t-k*ddQIN}jT(G^I~TgRhYCYyHnk9{lblGVy#*Nk8;9RNPCX@^tJqq^`F-j0Erc z@{(8gJG!f+r(4ofeh-#E=*mQULTWz|O74Lb)b4ZU?&h{(C>Kym!@`z5U7R zoloEO!eC2!w`6@QE_*jxDsIc_rOU7Z{=F5y;j3W(6JVoK7tB%fn=(31L`X^q(o-0K zOCT`(HpwF zaCIYYZv>GPce@(2Sjw$!N-&d;BxP=_t_qFQmbw)SQ|!cvu4M==p#7vu;2wp2e}Ppe z_FXAz8?o3i(}&&t)fvv!S-6v#Yyl~R60n;&`)$1JIEWcG!yJIE`(MubZ)cHG3xjQ$ z9b35;#&thT7AwcUJ_ca49TItFzdu0Z(0(6+ZRjS;Ecj9XVrE0SX@M2bWLC`D>IzUk zNALjY+QJ6vzoeaEAq7z9^gp5idRG*30HKuu8i(=#6349XF01pPiTW`L55)x($T138 zxqkuS^2Npb-i;g#!dpUkYzO{9%$JMmOBs$#v-YEvDym2?ZNN<1?n(U>iL1Lbt4^wrl5nijJtXd8F&v-(0}b1-o2Yj#cIJT zPpXSBtI*NLts~BYZreDa-;L@mJg0}xMfUc%*y%uc8wL*>Z^k-S7-nEMIL+8eC}qbK z>T$dk!1GkR`LHfK$QH(~hjAjyB~xhoof^X6RX86kr|{`Mui+l~ksOaxWz?{OC()Xr zGP8BrQW)oI7R4Nj3<^^}FechPSpYH3InDnrI|(gDotLgP$;RFBv2l3Hrr}}3$88*3 z^9FSvv6t-8Y3rOBpH;Wu^JAz$L||-W&zgXSCMl#TU-AJYF#`)z6g8i6HQxff{2e0v z34x~x9=gYF%7%1AmSFY3PHjj-*pMM!h8E^+BPI&3lMy61*|?V^UQO`4zz41#)31EUm<0!%d(?B!gVt!a0iurb`M%W zyrJ}?+A*3%f~gGN)6`|Ib(%7ZQIHT#&Ud_^)^Sg_R%`n~RIBx;ZbD=$ z?~dQ`?)~Odcy`aPbisc~a^V$SeGHfJa_=rE)Tw{skjg&E--?AyYd~*CR2bmq)T?B? zk|pPUC&+M}vv^1n$PS3peLLai=N&j$8i&9->y4 literal 2259 zcmZ8i&2JM&6rY)${jl>vAOVt=wu@RpO=DZM6{;wp0s*Ni4Tyk%l*-z8CiW)l-DPGR z6Kh=|g|_s7dg=)(IY&mr5y9`y zU!R75jv@4i+3fyVu=xUpyaI$Fh9k5*nDH`(J&8!mTDCA^7NhskGG#V%V79r#+;t+3 zaF=;oc-i9xZgW^|VK|GI@=+@6fL63v!g`O~jO(>D3N||;Y&2CJ8_DqJz--2O81gBQ z2I}GNHyP@Knr$p5Gcbf%-;+7iLkjmvhPH8!C@Uk1ZV_=)*~-bt7G`u7ZQ%vkh{xIrV_>YpQKhY5MQSHVQy#ODqbEks*A&02bg>@9EVoN)Y7Eji z)D&zzp3{$?AaM{(d48CH!Rip6#gljnQ;Xaqf72p)Mhnghf+-eraN!LE z8IS|Kk%6N0j+uMk>p7X-cbJzs+XSQTC*A4HUULDD1y*EZ51}xW*DtVfgAKS{gxmNAisvX& zW1BZI>bY=h0<4+H+(+nFz^FmZEhwj+`<)xxgSmq{21{0dd>gP6+oP4?GK(ULw&11M zJC0=B0SyC$Ve|lrUmhSf3E1{BuRp=|u&Hg5ul8=^eBK8$V$+XluaFhs?93e8V*BR+ zpX|V63WaJdq0+(bmqj0bmx6X&Zvrx#JYW%*(q9otM{;4HD(fOhc`o8Y&ig9~+no2$ zE-n~MuZLV#avj~H<#O3S3t2B;|!}K;jt8m=S0|^W8aS!H#Soxu4O&N*p%lVB`{JKZfi>B+=54jv{C{ zqfv4eyR-th)%42crE7W?V0=Dlt%NaWm)k06@CB}dFoGfj$$%qEmbTg=(yp-|f}*Jj zN{Nbh!CLLKxN0U$7t%zknkggk9`M0mSMWP+F4YsPr|Lnf+JZBviJX^LmY6jPCYw-( z5g$p80#6fJI_dl{noxnFMvLUMKb`n8MY&z|ocNLmSNN4&feNIqhvT(ltGDokPs zV3k*Gvd69H|@i1SobjFQ5IyLFx#+q7Me5(%9Zf?g_Ww-ux{ z1BKW59GanVs@-838QTWWQS+2IVsv7z#~}B$5r!t}5f9?gb^wPDjEgxYrv9H~!>!D48-%)tV%4VecM!N$wr64*gCKd$?$M&}0dA{vn=&oqR=Iv0&`+ zYPD7n#Qvm) 100000 and abs(val2) > 100000: candidates.append((val1, val2)) if len(candidates) > 5: break - except: - pass + except: pass return candidates -def get_fresh_config(map_url): +def fetch_live_data(map_url): """ - Launches headless browser to scrape headers, body, AND cookies. + Uses a real browser to fetch data AND capture the raw request details. """ - print(f"--- Auto-Repair: Launching Browser for {map_url} ---") - - captured_request = None - captured_cookies = [] + print(f"--- Browser Fetch: {map_url} ---") + data_result = None + captured_headers = None + captured_cookies = None + captured_body = None # <--- New: Capture raw body with sync_playwright() as p: - browser = p.chromium.launch(headless=True) - # Create a persistent context to ensure cookies are tracked - context = browser.new_context() + browser = p.chromium.launch(headless=True, args=['--disable-blink-features=AutomationControlled']) + context = browser.new_context( + user_agent='Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + ) page = context.new_page() def handle_request(request): - nonlocal captured_request + nonlocal captured_headers, captured_body if ".rpc" in request.url and request.method == "POST": + # Capture the request details blindly before we even know if it works + if "getCombinedOutageDetails" in request.post_data or "getOutages" in request.post_data: + captured_headers = request.headers + captured_body = request.post_data + + def handle_response(response): + nonlocal data_result + if ".rpc" in response.url and response.request.method == "POST": try: - if "getCombinedOutageDetails" in request.post_data or "getOutages" in request.post_data: - captured_request = { - 'url': request.url, - 'headers': request.headers, - 'body': request.post_data - } - except: - pass + if "getCombinedOutageDetails" in response.request.post_data or "getOutages" in response.request.post_data: + text = response.text() + if text.startswith("//OK"): + data_result = json.loads(text[4:]) + print(" [+] Captured Data via Browser") + except: pass page.on("request", handle_request) + page.on("response", handle_response) try: - page.goto(map_url, wait_until="networkidle", timeout=45000) - time.sleep(5) - - # Capture cookies from the browser context + page.goto(map_url, wait_until="networkidle", timeout=60000) + for _ in range(10): + if data_result: break + time.sleep(1) captured_cookies = context.cookies() - except Exception as e: - print(f"Auto-Repair Browser Error: {e}") + print(f"Browser Fetch Error: {e}") finally: browser.close() - if captured_request: - req_headers = captured_request['headers'] - # Clean headers (keep specific GWT ones, discard dynamic browser ones that requests handles) - clean_headers = { - 'content-type': req_headers.get('content-type', 'text/x-gwt-rpc; charset=UTF-8'), - 'x-gwt-module-base': req_headers.get('x-gwt-module-base'), - 'x-gwt-permutation': req_headers.get('x-gwt-permutation'), - 'Referer': map_url - } - + return data_result, captured_headers, captured_cookies, captured_body + +def get_fresh_config(map_url): + data, headers, cookies, body = fetch_live_data(map_url) + + if headers and body: + # Minimal cleaning: Only remove headers that 'requests' MUST generate itself + # This keeps all custom NISC/GWT headers safe. + forbidden = {'content-length', 'host', 'connection', 'cookie', 'accept-encoding'} + clean_headers = {k: v for k, v in headers.items() if k.lower() not in forbidden} + return { 'headers': clean_headers, - 'body': captured_request['body'], - 'url': captured_request['url'], - 'cookies': captured_cookies # <--- Return cookies + 'body': body, # Save exact body + 'url': headers.get('url', map_url.replace('.html', '') + '/GWT.rpc'), # Best guess URL if missing + 'cookies': cookies, + 'user_agent': headers.get('user-agent') } - return None if __name__ == "__main__": url = input("Enter Map URL: ") - print(get_fresh_config(url)) \ No newline at end of file + res = get_fresh_config(url) + if res: + print("Success! Captured Body length:", len(res['body'])) + print("Captured Headers:", res['headers'].keys()) \ No newline at end of file diff --git a/newpower2.py b/newpower2.py index fec1e06..bdba0ce 100644 --- a/newpower2.py +++ b/newpower2.py @@ -47,16 +47,13 @@ def update_provider_config(provider_name, new_settings): updated = False for p in providers: if p.get('name') == provider_name: - if 'headers' in new_settings: - p['headers'] = new_settings['headers'] - if 'body' in new_settings: - p['body'] = new_settings['body'] - if 'url' in new_settings: - p['url'] = new_settings['url'] + if 'headers' in new_settings: p['headers'] = new_settings['headers'] + if 'body' in new_settings: p['body'] = new_settings['body'] + if 'url' in new_settings: p['url'] = new_settings['url'] + if 'cookies' in new_settings: p['cookies'] = new_settings['cookies'] - # <--- NEW: Save Cookies - if 'cookies' in new_settings: - p['cookies'] = new_settings['cookies'] + # <--- NEW: Save User-Agent + if 'user_agent' in new_settings: p['user_agent'] = new_settings['user_agent'] p['last_auto_update'] = datetime.now(timezone.utc).isoformat() updated = True @@ -67,7 +64,6 @@ def update_provider_config(provider_name, new_settings): return True return False - # --- DATABASE --- class PowerDB: def __init__(self, config): @@ -244,9 +240,11 @@ class GwtRpcProvider(BaseProvider): self.state_filter = config.get('state_filter') self.map_url = config.get('map_url') - # 1. Base Headers + # 1. Set User-Agent (Dynamic > Default) + # We try to use the one from config if available (captured from actual browser) + ua = config.get('user_agent', 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36') self.session.headers.update({ - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', + 'User-Agent': ua, 'Accept': '*/*', 'Sec-Fetch-Site': 'same-origin' }) @@ -254,16 +252,12 @@ class GwtRpcProvider(BaseProvider): parsed_url = urlparse(config.get('url')) self.session.headers.update({'Origin': f"{parsed_url.scheme}://{parsed_url.netloc}"}) - # 2. Load Cookies (if available, but don't rely solely on them) + # Load Cookies if config.get('cookies'): for cookie in config['cookies']: - # Handle expiry mapping if needed, or ignore errors try: self.session.cookies.set( - cookie['name'], - cookie['value'], - domain=cookie['domain'], - path=cookie['path'] + cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path'] ) except: pass @@ -273,7 +267,6 @@ class GwtRpcProvider(BaseProvider): 'KY': {'lat_min': 36.4, 'lat_max': 39.2, 'lon_min': -89.6, 'lon_max': -81.9}, 'IA': {'lat_min': 40.3, 'lat_max': 43.6, 'lon_min': -96.7, 'lon_max': -90.1} } - if config.get('epsg'): try: self.transformer = Transformer.from_crs(f"EPSG:{config['epsg']}", "EPSG:4326", always_xy=True) @@ -282,101 +275,127 @@ class GwtRpcProvider(BaseProvider): def attempt_auto_repair(self): if not self.map_url: return False - # --- Cooldown Check --- - last_update = self.config.get('last_auto_update') - if last_update: - try: - last_dt = datetime.fromisoformat(last_update) - if last_dt.tzinfo is None: last_dt = last_dt.replace(tzinfo=timezone.utc) - if datetime.now(timezone.utc) - last_dt < timedelta(hours=AUTO_UPDATE_COOLDOWN_HOURS): - logger.info(f"Skipping auto-repair for {self.name} (Cooldown active).") - return False - except ValueError: pass + # ... (Cooldown check - keep as is) ... logger.info(f"Attempting Auto-Repair for {self.name}...") try: + # We expect 4 return values now new_settings = get_rpc_config_auto.get_fresh_config(self.map_url) + if new_settings: logger.info(f"Repair successful! Updating {self.name}.") - # Update In-Memory Config (CRITICAL: prevents loop) - current_time = datetime.now(timezone.utc).isoformat() - self.config['headers'] = new_settings['headers'] - self.config['body'] = new_settings['body'] - self.config['url'] = new_settings['url'] - self.config['cookies'] = new_settings.get('cookies', []) - self.config['last_auto_update'] = current_time + # Update In-Memory + self.config.update(new_settings) + self.config['last_auto_update'] = datetime.now(timezone.utc).isoformat() - # Force updates to session - # We clear cookies to ensure we don't mix old/new session logic + # Update Session Cookies self.session.cookies.clear() if new_settings.get('cookies'): - for cookie in new_settings['cookies']: - self.session.cookies.set(cookie['name'], cookie['value'], domain=cookie['domain'], path=cookie['path']) + for c in new_settings['cookies']: + self.session.cookies.set(c['name'], c['value'], domain=c['domain'], path=c['path']) + + # Update Session UA + if new_settings.get('user_agent'): + self.session.headers.update({'User-Agent': new_settings['user_agent']}) + # Persist to disk update_provider_config(self.name, new_settings) return True except Exception as e: logger.error(f"Auto-repair failed: {e}") return False - + def fetch(self, is_retry=False): url = self.config.get('url') headers = self.config.get('headers', {}) body = self.config.get('body') - if not url or not body: return [] + if not url: return [] + # --- STRATEGY A: Standard Requests (Fast) --- try: - # 3. Dynamic Origin Update parsed_url = urlparse(url) origin = f"{parsed_url.scheme}://{parsed_url.netloc}" - self.session.headers.update({'Origin': origin}) - # 4. ALWAYS PRIME SESSION (Fixes the regression) - # Even if we have cookies, they might be stale or missing JSESSIONID. - # Hitting the page refreshes the jar. - prime_url = headers.get('Referer') or headers.get('x-gwt-module-base') or origin - if prime_url: - try: - self.session.get(prime_url, verify=False, timeout=10) + # Priority: Configured Referer > Module Base > Origin + correct_referer = headers.get('Referer') or headers.get('x-gwt-module-base') or origin + + ua = headers.get('User-Agent', self.session.headers['User-Agent']) + if "Headless" in ua: + ua = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36' + + self.session.headers.update({ + 'Origin': origin, + 'Referer': correct_referer, + 'User-Agent': ua + }) + + if self.map_url and not self.config.get('cookies'): + try: self.session.get(correct_referer, verify=False, timeout=10) except: pass req_headers = headers.copy() if 'Content-Type' not in req_headers: req_headers['Content-Type'] = 'text/x-gwt-rpc; charset=UTF-8' + req_headers['Referer'] = correct_referer + req_headers['User-Agent'] = ua + # Debug log (Optional - disable if too noisy) + # logger.info(f"Sending Headers: {json.dumps(req_headers, indent=2)}") + resp = self.session.post(url, headers=req_headers, data=body, verify=False) - # 5. Error Handling & Retry - failed = False - if "//EX" in resp.text: failed = True - if resp.status_code == 500: failed = True - - if failed: - logger.error(f"GWT Failure for {self.name} (Status: {resp.status_code}).") + # --- STRATEGY B: Browser Fallback & Self-Heal --- + if resp.status_code == 500 or "//EX" in resp.text: + logger.warning(f"Standard fetch failed for {self.name} (Status: {resp.status_code}). Switching to Browser Fetch.") - # Check recursion limit - if is_retry: - logger.error(f"Retry failed for {self.name}. Aborting.") - return [] + if self.map_url: + # 1. Fetch data AND credentials via Browser + data, valid_headers, valid_cookies, valid_body = get_rpc_config_auto.fetch_live_data(self.map_url) + + if data: + logger.info(f"Browser success! Self-healing {self.name} configuration...") + + # --- HEADER CLEANING FIX --- + # Instead of selecting specific headers, we exclude known transport headers. + # This preserves custom headers like 'coop.nisc.outagewebmap.configname' + excluded = { + 'content-length', 'host', 'connection', 'cookie', 'accept-encoding', + 'sec-ch-ua', 'sec-ch-ua-mobile', 'sec-ch-ua-platform', 'origin' + } + + clean_headers = {} + for k, v in valid_headers.items(): + if k.lower() not in excluded: + clean_headers[k] = v + + # Ensure we force the correct Referer for next time + clean_headers['Referer'] = self.map_url - if self.attempt_auto_repair(): - logger.info("Retrying fetch with new settings...") - return self.fetch(is_retry=True) - else: - return [] + # 3. Save to JSON so next run is FAST + new_settings = { + 'headers': clean_headers, + 'cookies': valid_cookies, + 'body': valid_body, + 'user_agent': valid_headers.get('user-agent') + } + update_provider_config(self.name, new_settings) + + return self._extract_outages(data) + + logger.error(f"Browser Fetch failed for {self.name}.") + return [] if not resp.ok: return [] text = resp.text if text.startswith('//OK'): text = text[4:] return self._extract_outages(json.loads(text)) + except Exception as e: logger.error(f"Fetch error {self.name}: {e}") - return [] - - # ... Keep _extract_outages and _is_valid as is ... + return [] def _extract_outages(self, data_list): results = [] if not self.transformer: return [] @@ -429,7 +448,6 @@ class GwtRpcProvider(BaseProvider): return b['lat_min'] <= lat <= b['lat_max'] and b['lon_min'] <= lon <= b['lon_max'] - # --- REGISTRY --- PROVIDER_REGISTRY = { 'kubra': KubraProvider, diff --git a/providers.json b/providers.json index 1e86aae..b51a148 100644 --- a/providers.json +++ b/providers.json @@ -104,20 +104,23 @@ "epsg": 3735, "state_filter": "OH", "headers": { - "content-type": "text/x-gwt-rpc; charset=UTF-8", - "x-gwt-module-base": "https://weci.ebill.coop/woViewer/MapWiseWeb/", "x-gwt-permutation": "92F322F8E48548F604D2E1BE43DB1F13", + "x-gwt-module-base": "https://weci.ebill.coop/woViewer/MapWiseWeb/", + "referer": "https://weci.ebill.coop/woViewer/mapviewer.html?config=Outage+Web+Map", + "coop.nisc.outagewebmap.configname": "Outage Web Map", + "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", + "content-type": "text/x-gwt-rpc; charset=UTF-8", "Referer": "https://weci.ebill.coop/woViewer/mapviewer.html?config=Outage+Web+Map" }, "body": "7|0|4|https://weci.ebill.coop/woViewer/MapWiseWeb/|612278413EC26C34D54A3907AA0CDFD8|coop.nisc.oms.webmap.services.RpcCombinedOutageDetailsService|getCombinedOutageDetails|1|2|3|4|0|", - "last_auto_update": "2025-12-07T03:24:46.435173+00:00", + "last_auto_update": "2025-12-07T03:56:27.722877+00:00", "cookies": [ { "name": "__utma", - "value": "105963909.535514741.1765077881.1765077881.1765077881.1", + "value": "105963909.1267880890.1765079787.1765079787.1765079787.1", "domain": ".weci.ebill.coop", "path": "/", - "expires": 1799637880.601006, + "expires": 1799639786.874286, "httpOnly": false, "secure": false, "sameSite": "Lax" @@ -134,10 +137,10 @@ }, { "name": "__utmz", - "value": "105963909.1765077881.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)", + "value": "105963909.1765079787.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)", "domain": ".weci.ebill.coop", "path": "/", - "expires": 1780845880, + "expires": 1780847786, "httpOnly": false, "secure": false, "sameSite": "Lax" @@ -147,7 +150,7 @@ "value": "1", "domain": ".weci.ebill.coop", "path": "/", - "expires": 1765078480, + "expires": 1765080386, "httpOnly": false, "secure": false, "sameSite": "Lax" @@ -157,37 +160,37 @@ "value": "1", "domain": ".weci.ebill.coop", "path": "/", - "expires": 1765078480, + "expires": 1765080386, "httpOnly": false, "secure": false, "sameSite": "Lax" }, { "name": "__utmb", - "value": "105963909.2.10.1765077881", + "value": "105963909.2.10.1765079787", "domain": ".weci.ebill.coop", "path": "/", - "expires": 1765079680, + "expires": 1765081586, "httpOnly": false, "secure": false, "sameSite": "Lax" }, { "name": "__utma", - "value": "105963909.535514741.1765077881.1765077881.1765077881.1", + "value": "105963909.1267880890.1765079787.1765079787.1765079787.1", "domain": "weci.ebill.coop", "path": "/", - "expires": 1799637880.601622, + "expires": 1799639786.87497, "httpOnly": false, "secure": false, "sameSite": "Lax" }, { "name": "__utmb", - "value": "105963909.3.9.1765077881", + "value": "105963909.3.9.1765079787", "domain": "weci.ebill.coop", "path": "/", - "expires": 1765079680, + "expires": 1765081586, "httpOnly": false, "secure": false, "sameSite": "Lax" @@ -204,14 +207,15 @@ }, { "name": "__utmz", - "value": "105963909.1765077881.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)", + "value": "105963909.1765079787.1.1.utmcsr=(direct)|utmccn=(direct)|utmcmd=(none)", "domain": "weci.ebill.coop", "path": "/", - "expires": 1780845880, + "expires": 1780847786, "httpOnly": false, "secure": false, "sameSite": "Lax" } - ] + ], + "user_agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" } ] \ No newline at end of file