#!/usr/bin/env python3
"""
Mission Control unified server.

Replaces `python3 -m http.server 4000`. Serves static files from this directory
AND proxies external APIs that browsers can't reach directly:

  /klaviyo/api/*  -> a.klaviyo.com  (CORS-blocked from browser)
  /aws/ses/*      -> SES via boto3  (requires SigV4 signing — server-side only)
  /aws/sns/*      -> SNS via boto3  (same reason)

For AWS routes the browser passes credentials per request in the headers:
  X-AWS-Access-Key-Id, X-AWS-Secret-Access-Key, X-AWS-Region.

Run:    python3 mc-server.py     (or via the mc-server.service systemd unit)
Stop:   Ctrl-C
Logs:   stderr (and ~/Desktop/MARKETING/mc-server.log via systemd)
"""

from http.server import SimpleHTTPRequestHandler, ThreadingHTTPServer
import urllib.request, urllib.error, urllib.parse, sys, os, signal, json
from datetime import datetime

PORT = 4000
HOST = "127.0.0.1"
ROOT = os.path.dirname(os.path.abspath(__file__))

KLAVIYO_PREFIX = "/klaviyo"
KLAVIYO_HOST   = "a.klaviyo.com"

AWS_PREFIX = "/aws"

# Live-site proxy for the Comp Viewer iPhone preview.
# Fetches pages from nitrouscompetitions.com and strips X-Frame-Options + CSP so the iframe loads.
SITE_PREFIX = "/site"
SITE_HOST   = "www.nitrouscompetitions.com"

# GA4 Realtime via service-account auth.
# Browser POSTs the service-account JSON in a header, server uses google.auth to mint
# an access token, then calls Analytics Data API.
GA_PREFIX = "/ga"

# WhatsApp inbound + outbound + auto-reply
# /wa/webhook    - public POST endpoint for the WA provider (Twilio / 360dialog / Meta WBA) to deliver incoming msgs
# /wa/inbox      - browser GET to list recent incoming messages
# /wa/send       - browser POST to send a WA message via configured provider
# /wa/auto-reply - browser POST to toggle / configure auto-reply (Claude-driven)
WA_PREFIX = "/wa"
WA_INBOX_FILE = os.path.join(ROOT, "wa-inbox.json")
WA_CFG_FILE   = os.path.join(ROOT, "wa-config.json")

# Independent server-side watchdog — runs on a background thread, even with no browser open.
WD_PREFIX   = "/wd"
WD_CFG_FILE = os.path.join(ROOT, "watchdog-config.json")
WD_HIST_FILE= os.path.join(ROOT, "watchdog-history.json")

# WhatsApp QR-bot sidecar — Node.js process on port 4002 (Baileys)
WA_BOT_PREFIX = "/wa-bot"
WA_BOT_PORT   = 4002

# 🗄️ WOO CACHE — server-side prefetch of all WooCommerce data.
# Background thread refreshes:
#   • all products            every  5 min
#   • orders for "today"      every 60s
#   • orders for "week"       every  5 min
#   • per-product lifetime    every 60s for active comps, hourly for old comps
#   • customer database       updated as new orders arrive
#   • winner lists            auto-generated when a comp's draw date passes
# Browser fetches /cache/snapshot which returns everything pre-computed — instant load,
# no rate-limit risk, no CORS, no waiting.
#
# All persistent data lives on /media/sol1/1TB-Drive1/data — kept off the system disk so it
# survives OS reinstall + has plenty of room to grow.
CACHE_PREFIX  = "/cache"
DATA_ROOT          = "/media/sol1/1TB-Drive1/data"
CACHE_FILE         = os.path.join(DATA_ROOT, "woo_cache.json")
CACHE_LIFETIME_FILE= os.path.join(DATA_ROOT, "woo_lifetime.json")
CACHE_CUSTOMERS    = os.path.join(DATA_ROOT, "customers_db.json")     # { email: { name, phone, city, postcode, total_spent, orders, last_seen } }
WINNERS_DIR        = os.path.join(DATA_ROOT, "winners")               # one JSON per finished comp
PROXY_CONFIG_FILE  = os.path.join(DATA_ROOT, "proxies.json")          # outbound proxy rotation pool
RATE_LIMIT_PER_MIN = 35                                               # cap outbound Woo calls — Wordfence default panics around 60/min

# 📚 FULL WooCommerce customer dump (~442K records, 4,426 page files × 100 each).
# Lazy-loaded into memory on first /cache/db/* request, then served from there.
CUSTOMERS_PAGES_DIR = os.path.join(DATA_ROOT, "customers")
ORDERS_PAGES_DIR    = os.path.join(DATA_ROOT, "orders")                     # 365 daily JSON files (full 12-month order history)
WINNINGS_12MO_CACHE = os.path.join(DATA_ROOT, "winnings_12mo_index.json")   # pre-aggregated index, rebuilds on demand

# boto3 import is deferred to first AWS call so static-only setups don't need it
_boto3 = None
def _get_boto3():
    global _boto3
    if _boto3 is None:
        import boto3 as _b   # type: ignore
        _boto3 = _b
    return _boto3


def _normalise(o):
    """Walk a boto3 result and turn datetimes / bytes into JSON-safe values."""
    if isinstance(o, dict):
        return {k: _normalise(v) for k, v in o.items()}
    if isinstance(o, list):
        return [_normalise(x) for x in o]
    if isinstance(o, datetime):
        return o.isoformat()
    if isinstance(o, bytes):
        return o.decode("utf-8", errors="replace")
    return o


class Handler(SimpleHTTPRequestHandler):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, directory=ROOT, **kwargs)

    # ---------- routing ----------
    def _is_klaviyo(self):
        return self.path.startswith(KLAVIYO_PREFIX + "/") or self.path == KLAVIYO_PREFIX

    def _is_aws(self):
        return self.path.startswith(AWS_PREFIX + "/") or self.path == AWS_PREFIX

    def _is_site(self):
        return self.path.startswith(SITE_PREFIX + "/") or self.path == SITE_PREFIX

    def _is_ga(self):
        return self.path.startswith(GA_PREFIX + "/") or self.path == GA_PREFIX

    def _is_wa(self):
        return self.path.startswith(WA_PREFIX + "/") or self.path == WA_PREFIX

    def _is_wd(self):
        return self.path.startswith(WD_PREFIX + "/") or self.path == WD_PREFIX

    def _is_wa_bot(self):
        return self.path.startswith(WA_BOT_PREFIX + "/") or self.path == WA_BOT_PREFIX
    def _is_cache(self):
        return self.path.startswith(CACHE_PREFIX + "/") or self.path == CACHE_PREFIX

    # ---------- CORS ----------
    def _cors(self):
        self.send_header("Access-Control-Allow-Origin", "*")
        self.send_header("Access-Control-Allow-Methods", "GET,POST,PUT,PATCH,DELETE,OPTIONS")
        self.send_header("Access-Control-Allow-Headers",
                         "authorization,revision,content-type,accept,"
                         "x-klaviyo-user-agent,"
                         "x-aws-access-key-id,x-aws-secret-access-key,x-aws-region,"
                         "x-ga-service-account")
        self.send_header("Access-Control-Max-Age", "86400")

    def do_OPTIONS(self):
        if self._is_klaviyo() or self._is_aws() or self._is_site() or self._is_ga() or self._is_wa() or self._is_wd() or self._is_wa_bot():
            self.send_response(204)
            self._cors()
            self.end_headers()
            return
        self.send_response(204)
        self.send_header("Allow", "GET,HEAD,OPTIONS")
        self._cors()
        self.end_headers()

    # ---------- WA Bot passthrough (Mission Control → Node bot on :4002) ----------
    def _wa_bot(self):
        # /wa-bot/<rest>  ->  http://127.0.0.1:4002/<rest>
        path = self.path[len(WA_BOT_PREFIX):] or "/"
        target = f"http://127.0.0.1:{WA_BOT_PORT}{path}"
        length = int(self.headers.get("Content-Length", "0") or 0)
        body = self.rfile.read(length) if length > 0 else None
        try:
            req = urllib.request.Request(target, data=body, method=self.command)
            ct = self.headers.get("Content-Type")
            if ct: req.add_header("Content-Type", ct)
            with urllib.request.urlopen(req, timeout=15) as r:
                self.send_response(r.status)
                ctype = r.getheader("Content-Type", "application/json")
                self.send_header("Content-Type", ctype)
                self._cors()
                self.end_headers()
                self.wfile.write(r.read())
        except urllib.error.HTTPError as e:
            self.send_response(e.code)
            self.send_header("Content-Type", e.headers.get("Content-Type", "application/json"))
            self._cors()
            self.end_headers()
            try: self.wfile.write(e.read())
            except Exception: pass
        except Exception as e:
            self.send_response(502)
            self.send_header("Content-Type", "application/json")
            self._cors()
            self.end_headers()
            self.wfile.write(json.dumps({"error": "wa-bot unreachable: " + str(e), "hint": "is wa-bot.service running?"}).encode())

    # ---------- Independent server-side Watchdog ----------
    def _wd(self):
        parts = self.path[len(WD_PREFIX) + 1:].split("/", 1)
        op = (parts[0] if parts else '').split('?')[0]
        if op == 'configure': return self._wd_configure()
        if op == 'status':    return self._wd_status_get()
        if op == 'start':     return self._wd_set_state(True)
        if op == 'stop':      return self._wd_set_state(False)
        if op == 'run-now':   return self._wd_run_now()
        if op == 'snapshot':  return self._wd_snapshot()
        if op == 'history':   return self._wd_history_get()
        return self._json(404, {"error": "unknown wd endpoint", "got": op})

    def _wd_load_cfg(self):
        try:
            with open(WD_CFG_FILE, 'r') as f: return json.load(f)
        except Exception: return {}
    def _wd_save_cfg(self, cfg):
        try:
            with open(WD_CFG_FILE, 'w') as f: json.dump(cfg, f)
        except Exception as e: sys.stderr.write(f"wd-cfg save error: {e}\n")
    def _wd_load_hist(self):
        try:
            with open(WD_HIST_FILE, 'r') as f: return json.load(f)
        except Exception: return []
    def _wd_save_hist(self, h):
        try:
            with open(WD_HIST_FILE, 'w') as f: json.dump(h[:30], f)
        except Exception as e: sys.stderr.write(f"wd-hist save error: {e}\n")

    def _wd_configure(self):
        length = int(self.headers.get("Content-Length", "0") or 0)
        try: body = json.loads(self.rfile.read(length) or b'{}')
        except Exception as e: return self._json(400, {"error": f"bad json: {e}"})
        cfg = self._wd_load_cfg()
        cfg.update(body)
        self._wd_save_cfg(cfg)
        # If enabled, ensure daemon is running
        WD_DAEMON.reload()
        return self._json(200, self._wd_safe_cfg(cfg))

    def _wd_safe_cfg(self, cfg):
        # Don't return secrets verbatim
        out = {k: v for k, v in cfg.items() if not any(s in k.lower() for s in ("token","key","sak","sid","secret"))}
        out['has_woo']    = bool(cfg.get('woo_ck') and cfg.get('woo_cs'))
        out['has_claude'] = bool(cfg.get('claude_key'))
        out['has_twilio'] = bool(cfg.get('twilio_sid') and cfg.get('twilio_token') and cfg.get('twilio_wa_from'))
        out['running']    = WD_DAEMON.running
        out['last_run']   = WD_DAEMON.last_run
        out['next_run']   = WD_DAEMON.next_run
        return out

    def _wd_status_get(self):
        cfg = self._wd_load_cfg()
        return self._json(200, self._wd_safe_cfg(cfg))

    def _wd_set_state(self, on):
        cfg = self._wd_load_cfg()
        cfg['enabled'] = on
        self._wd_save_cfg(cfg)
        WD_DAEMON.reload()
        return self._json(200, self._wd_safe_cfg(cfg))

    def _wd_run_now(self):
        try:
            result = WD_DAEMON.run_once(send=True)
            return self._json(200, result)
        except Exception as e:
            return self._json(500, {"error": str(e)})

    def _cache_handle(self):
        """Routes for the WooCache. All return JSON.
           GET  /cache/snapshot   - everything in one payload (default)
           GET  /cache/products   - just products
           GET  /cache/orders?period=today|week|prev_today
           GET  /cache/lifetime/<id>
           GET  /cache/status     - meta only (when each cache last refreshed)
           POST /cache/refresh    - force a refresh tick (background)
        """
        path = self.path.split('?', 1)[0][len(CACHE_PREFIX):]
        try:
            if path == '' or path == '/' or path == '/snapshot':
                snap = WOO_CACHE.snapshot()
                self._cjson(snap)
            elif path == '/products':
                self._cjson({ 'products': WOO_CACHE.products, 'age_sec': self._age('products') })
            elif path == '/orders':
                period = (urllib.parse.urlparse(self.path).query and dict(urllib.parse.parse_qsl(urllib.parse.urlparse(self.path).query)).get('period', 'today')) or 'today'
                self._cjson({ 'period': period, 'orders': WOO_CACHE.orders.get(period, []), 'age_sec': self._age(period) })
            elif path.startswith('/lifetime/'):
                pid = path[len('/lifetime/'):]
                self._cjson({ 'product_id': pid, 'data': WOO_CACHE.lifetime.get(str(pid), None) })
            elif path == '/status':
                snap = WOO_CACHE.snapshot()
                # Trim products/orders out of status
                snap.pop('products', None); snap.pop('orders', None); snap.pop('lifetime', None)
                self._cjson(snap)
            elif path == '/refresh':
                # Force everything to be considered stale on next tick (which fires within 30s)
                WOO_CACHE.last_refresh = {k: 0 for k in WOO_CACHE.last_refresh}
                self._cjson({ 'ok': True, 'msg': 'all caches marked stale — next tick will refresh' })
            elif path == '/customers':
                # Search the auto-aggregated customer DB
                qs = dict(urllib.parse.parse_qsl(urllib.parse.urlparse(self.path).query))
                q = (qs.get('q') or '').strip().lower()
                limit = int(qs.get('limit') or 200)
                db = WOO_CACHE._load_customer_db()
                results = []
                for email, rec in db.items():
                    if email.startswith('_'): continue
                    if not q or (q in email or q in (rec.get('name','') or '').lower() or q in (rec.get('postcode','') or '').lower() or q in (rec.get('city','') or '').lower() or q in (rec.get('phone','') or '').lower()):
                        results.append({ 'email': email, **rec })
                        if len(results) >= limit: break
                self._cjson({ 'count': len(results), 'total_customers': len([k for k in db if not k.startswith('_')]), 'results': results })
            elif path == '/db' or path == '/db/' or path == '/db/search':
                # Full ~442K customer search backed by the page dump on the external drive
                qs = dict(urllib.parse.parse_qsl(urllib.parse.urlparse(self.path).query))
                self._cjson(BULK_DB.search(
                    qs.get('q',''),
                    int(qs.get('limit') or 200),
                    int(qs.get('offset') or 0),
                    qs.get('sort') or 'name',
                    qs.get('filter') or '',
                ))
            elif path == '/db/birthday':
                # POST { email, birthday: 'MM-DD' or '' to clear }  →  manual birthday set
                ln = int(self.headers.get('Content-Length') or 0)
                body = json.loads(self.rfile.read(ln)) if ln else {}
                email = (body.get('email') or '').lower().strip()
                bd    = (body.get('birthday') or '').strip()
                if not email: self._cjson({'error': 'email required'}, 400); return
                # Normalise: accept "1990-05-15", "15/05/1990", "5/15", "May 15", "MM-DD"
                import re as _re
                normalised = ''
                if bd:
                    m = _re.search(r'(\d{1,4})[-/.\s](\d{1,2})(?:[-/.\s](\d{1,4}))?', bd)
                    if m:
                        a, b_, c = m.group(1), m.group(2), m.group(3)
                        if c is None:
                            x, y = int(a), int(b_)
                            month, day = (x, y) if x > 12 else (y, x)
                        else:
                            if len(a) == 4:    month, day = int(b_), int(c)
                            elif len(c) == 4:  month, day = int(b_), int(a)
                            else:              month, day = int(b_), int(a)
                        if 1 <= month <= 12 and 1 <= day <= 31:
                            normalised = f"{month:02d}-{day:02d}"
                    if not normalised: self._cjson({'error': 'could not parse birthday — try "DD/MM" or "DD/MM/YYYY"'}, 400); return
                # Persist into the live customer DB (alongside auto-extracted ones)
                db = WOO_CACHE._load_customer_db()
                rec = db.get(email)
                if not rec:
                    # Customer is in the bulk dump but not the live DB yet — seed an entry
                    bulk_rec = BULK_DB.get_by_email(email)
                    if not bulk_rec: self._cjson({'error': 'no customer with that email'}, 404); return
                    rec = { 'name': bulk_rec.get('name',''), 'phone': bulk_rec.get('phone',''),
                            'city': bulk_rec.get('city',''), 'postcode': bulk_rec.get('postcode',''),
                            'orders': bulk_rec.get('orders', 0), 'total_spent': bulk_rec.get('revenue', 0),
                            'first_seen': bulk_rec.get('created',''), 'last_seen': bulk_rec.get('modified','') }
                rec['birthday'] = normalised
                if not normalised: rec.pop('birthday', None)
                db[email] = rec
                WOO_CACHE._save_customer_db(db)
                # Also update the in-memory bulk DB so the UI shows it instantly
                if BULK_DB._loaded:
                    bulk_rec = BULK_DB.by_email.get(email)
                    if bulk_rec: bulk_rec['birthday'] = normalised
                self._cjson({'ok': True, 'email': email, 'birthday': normalised or None})
            elif path == '/db/birthdays/import':
                # POST { csv: "email,birthday\nfoo@x.com,15/05/1990\n..." }  →  bulk import
                ln = int(self.headers.get('Content-Length') or 0)
                body = json.loads(self.rfile.read(ln)) if ln else {}
                csv = body.get('csv') or ''
                if not csv: self._cjson({'error': 'csv required'}, 400); return
                import re as _re
                db = WOO_CACHE._load_customer_db()
                added = skipped = errors = 0
                for line in csv.splitlines():
                    line = line.strip()
                    if not line or line.lower().startswith('email'): continue
                    parts = [p.strip().strip('"') for p in line.split(',')]
                    if len(parts) < 2: errors += 1; continue
                    email = parts[0].lower(); bd_raw = parts[1]
                    m = _re.search(r'(\d{1,4})[-/.\s](\d{1,2})(?:[-/.\s](\d{1,4}))?', bd_raw)
                    if not m: errors += 1; continue
                    a, b_, c = m.group(1), m.group(2), m.group(3)
                    if c is None:
                        x, y = int(a), int(b_); month, day = (x, y) if x > 12 else (y, x)
                    else:
                        if len(a) == 4:    month, day = int(b_), int(c)
                        elif len(c) == 4:  month, day = int(b_), int(a)
                        else:              month, day = int(b_), int(a)
                    if not (1 <= month <= 12 and 1 <= day <= 31): errors += 1; continue
                    norm = f"{month:02d}-{day:02d}"
                    rec = db.get(email)
                    if not rec:
                        bulk_rec = BULK_DB.get_by_email(email)
                        if not bulk_rec: skipped += 1; continue
                        rec = { 'name': bulk_rec.get('name',''), 'phone': bulk_rec.get('phone',''),
                                'city': bulk_rec.get('city',''), 'postcode': bulk_rec.get('postcode',''),
                                'orders': bulk_rec.get('orders', 0), 'total_spent': bulk_rec.get('revenue', 0),
                                'first_seen': bulk_rec.get('created',''), 'last_seen': bulk_rec.get('modified','') }
                    rec['birthday'] = norm
                    db[email] = rec
                    if BULK_DB._loaded:
                        b = BULK_DB.by_email.get(email)
                        if b: b['birthday'] = norm
                    added += 1
                WOO_CACHE._save_customer_db(db)
                self._cjson({'ok': True, 'added': added, 'skipped_no_match': skipped, 'errors': errors})
            elif path == '/db/stats':
                self._cjson(BULK_DB.stats())
            elif path.startswith('/db/email/'):
                email = urllib.parse.unquote(path[len('/db/email/'):])
                rec = BULK_DB.get_by_email(email)
                if rec: self._cjson(rec)
                else:   self._cjson({ 'error': 'no record for that email' }, 404)
            elif path == '/db/winnings/reload':
                BULK_DB.reload_winnings()
                self._cjson({ 'ok': True, 'with_winnings': len(BULK_DB.winnings) })
            elif path.startswith('/db/winnings12mo/'):
                # /cache/db/winnings12mo/<email>?from=YYYY-MM-DD&to=YYYY-MM-DD&range=7d|30d|90d|6m|12m|all
                email_part = path[len('/db/winnings12mo/'):]
                qs = dict(urllib.parse.parse_qsl(urllib.parse.urlparse(self.path).query))
                email = urllib.parse.unquote(email_part.split('?')[0])
                rng  = qs.get('range','')
                from_d = qs.get('from',''); to_d = qs.get('to','')
                # Preset → from-date
                if rng and not from_d:
                    from datetime import datetime, timedelta
                    today = datetime.now()
                    delta = { 'today':0, '7d':7, '30d':30, '60d':60, '90d':90, '6m':183, '12m':365, 'all':36500 }.get(rng)
                    if delta is not None:
                        from_d = (today - timedelta(days=delta)).strftime('%Y-%m-%d')
                if from_d or to_d:
                    agg = BULK_DB.get_winnings_range(email, from_d, to_d)
                    self._cjson({ 'email': email, 'range': rng or 'custom', 'from': from_d, 'to': to_d, **agg })
                else:
                    agg = BULK_DB.get_winnings_12mo(email)
                    self._cjson({
                        'email': email,
                        'comp_count': agg.get('comp_count', 0),
                        'tickets': agg.get('tickets', 0),
                        'spent': agg.get('spent', 0),
                        'comps': agg.get('comps', []),
                        'entries': agg.get('entries', [])[:50],
                    })
            elif path.startswith('/db/winnings/'):
                email = urllib.parse.unquote(path[len('/db/winnings/'):])
                entries = BULK_DB.get_winnings(email)
                self._cjson({
                    'email': email,
                    'count': len(entries),
                    'tickets_total': sum(int(e.get('tickets') or 0) for e in entries),
                    'spent_total': round(sum(float(e.get('total') or 0) for e in entries), 2),
                    'entries': entries,
                })
            elif path == '/birthdays':
                # /cache/birthdays                       → today
                # /cache/birthdays?range=today|week|month|all
                # /cache/birthdays?month=5&day=9         → exact date
                # /cache/birthdays?month=5               → whole month
                # /cache/birthdays?day=9                 → 9th of every month
                # /cache/birthdays?range=all             → every customer with ANY birthday
                from datetime import datetime, timedelta
                qs = dict(urllib.parse.parse_qsl(urllib.parse.urlparse(self.path).query))
                rng = (qs.get('range') or '').lower()
                month_q = qs.get('month'); day_q = qs.get('day')
                today = datetime.now()
                wanted = None             # None means "ALL" — match anything
                if rng == 'all':
                    wanted = None
                elif month_q and day_q:
                    wanted = { f"{int(month_q):02d}-{int(day_q):02d}" }
                elif month_q:
                    m = int(month_q); wanted = set()
                    for d in range(1, 32):
                        try: datetime(2000, m, d); wanted.add(f"{m:02d}-{d:02d}")
                        except ValueError: pass
                elif day_q:
                    # Day-of-month across ALL months
                    wd = int(day_q); wanted = set()
                    for m in range(1, 13):
                        try: datetime(2000, m, wd); wanted.add(f"{m:02d}-{wd:02d}")
                        except ValueError: pass
                elif rng == 'week':
                    wanted = set()
                    for offset in range(0, 8):
                        d = today + timedelta(days=offset)
                        wanted.add(d.strftime('%m-%d'))
                elif rng == 'month':
                    m = today.month; wanted = set()
                    for d in range(1, 32):
                        try: datetime(2000, m, d); wanted.add(f"{m:02d}-{d:02d}")
                        except ValueError: pass
                else:
                    wanted = { today.strftime('%m-%d') }
                db = WOO_CACHE._load_customer_db()
                results = []
                for email, rec in db.items():
                    if email.startswith('_'): continue
                    bd = rec.get('birthday')
                    if not bd: continue
                    if wanted is None or bd in wanted:
                        results.append({ 'email': email, **rec })
                results.sort(key=lambda r: r.get('birthday',''))
                self._cjson({
                    'count': len(results),
                    'today': today.strftime('%m-%d'),
                    'wanted_days': ['(all)'] if wanted is None else sorted(wanted),
                    'results': results,
                    'note': 'Birthday is extracted from order meta_data (key contains birthday/dob/etc). Customers without a birthday on file are not returned.',
                })
            elif path == '/winners':
                self._cjson({ 'comps': WOO_CACHE.list_winner_files() })
            elif path == '/winners/generate':
                # POST { product_id: N } — manually trigger generation
                ln = int(self.headers.get('Content-Length') or 0)
                body = json.loads(self.rfile.read(ln)) if ln else {}
                pid = int(body.get('product_id') or 0)
                p = next((x for x in WOO_CACHE.products if x['id'] == pid), None)
                if not p:
                    self._cjson({ 'error': 'product not in cache (refresh products first)' }, 404); return
                # Force-delete any existing file (the 24h skip would prevent regen otherwise)
                try: os.remove(WOO_CACHE._winner_file(pid))
                except FileNotFoundError: pass
                WOO_CACHE._generate_winner_list(pid, p.get('name',''))
                # Reload winnings index so the new file is searchable immediately
                if BULK_DB._loaded: BULK_DB.reload_winnings()
                self._cjson({ 'ok': True, 'file': WOO_CACHE._winner_file(pid),
                              'exists': os.path.exists(WOO_CACHE._winner_file(pid)) })
            elif path.startswith('/winners/'):
                # /winners/<id>           - full winner file for one comp
                # /winners/search?q=...   - search across ALL comps
                rest = path[len('/winners/'):]
                qs = dict(urllib.parse.parse_qsl(urllib.parse.urlparse(self.path).query))
                if rest == 'search':
                    self._cjson({ 'q': qs.get('q',''), 'results': WOO_CACHE.search_winner_by(qs.get('q','')) })
                else:
                    try:
                        pid = int(rest)
                        d = WOO_CACHE.get_winner_file(pid)
                        if d: self._cjson(d)
                        else: self._cjson({ 'error': 'no winner file (comp may not have ended yet, or list not generated)', 'product_id': pid }, 404)
                    except ValueError:
                        self._cjson({ 'error': 'invalid product id' }, 400)
            elif path == '/proxies':
                # GET → list proxies + health · POST → { add: 'http://...' } / { remove: 'http://...' } / { reload: true }
                if self.command == 'GET':
                    self._cjson({ 'proxies': PROXY_POOL.status(), 'rate_limiter': RATE_LIMITER.stats() })
                else:
                    ln = int(self.headers.get('Content-Length') or 0)
                    body = json.loads(self.rfile.read(ln)) if ln else {}
                    if body.get('add'):
                        PROXY_POOL.add(body['add'].strip())
                        self._cjson({ 'ok': True, 'proxies': PROXY_POOL.status() })
                    elif body.get('remove'):
                        PROXY_POOL.remove(body['remove'].strip())
                        self._cjson({ 'ok': True, 'proxies': PROXY_POOL.status() })
                    elif body.get('reload'):
                        PROXY_POOL.reload()
                        self._cjson({ 'ok': True, 'proxies': PROXY_POOL.status() })
                    else:
                        self._cjson({ 'error': 'expected { add | remove | reload }' }, 400)
            elif path == '/limiter':
                self._cjson(RATE_LIMITER.stats())
            else:
                self.send_error(404, f"unknown cache path: {path}")
        except Exception as e:
            self._cjson({ 'error': str(e) }, 500)

    def _age(self, key):
        v = WOO_CACHE.last_refresh.get(key, 0)
        return (int(time.time()) - v) if v else None

    def _cjson(self, obj, code=200):
        body = json.dumps(obj).encode()
        self.send_response(code)
        self.send_header('Content-Type', 'application/json')
        self.send_header('Access-Control-Allow-Origin', '*')
        self.send_header('Access-Control-Allow-Headers', '*')
        self.send_header('Content-Length', str(len(body)))
        self.end_headers()
        self.wfile.write(body)

    def _wd_snapshot(self):
        # Same checks as run_once but never sends a WA message — used by the bot to
        # answer questions with live data (revenue, comps, last hour orders).
        try:
            result = WD_DAEMON.run_once(send=False)
            return self._json(200, result)
        except Exception as e:
            return self._json(500, {"error": str(e)})

    def _wd_history_get(self):
        return self._json(200, {"history": self._wd_load_hist()})

    # ---------- WhatsApp inbound + outbound + auto-reply ----------
    def _wa(self):
        # /wa/webhook | /wa/inbox | /wa/send | /wa/auto-reply | /wa/clear
        parts = self.path[len(WA_PREFIX) + 1:].split("/", 2)
        op = (parts[0] if parts else '').split('?')[0]
        if op == 'webhook':    return self._wa_webhook()
        if op == 'inbox':      return self._wa_inbox_get()
        if op == 'send':       return self._wa_send()
        if op == 'auto-reply': return self._wa_auto_reply_config()
        if op == 'clear':      return self._wa_clear()
        return self._json(404, {"error": "unknown wa endpoint", "got": op})

    def _wa_load_inbox(self):
        try:
            with open(WA_INBOX_FILE, 'r') as f: return json.load(f)
        except Exception: return []
    def _wa_save_inbox(self, msgs):
        try:
            with open(WA_INBOX_FILE, 'w') as f: json.dump(msgs[:500], f)   # cap to 500
        except Exception as e: sys.stderr.write(f"wa-inbox save error: {e}\n")
    def _wa_load_cfg(self):
        try:
            with open(WA_CFG_FILE, 'r') as f: return json.load(f)
        except Exception: return {"auto_reply_enabled": False, "system_prompt": "", "claude_key": "", "twilio_sid": "", "twilio_token": "", "twilio_wa_from": ""}
    def _wa_save_cfg(self, cfg):
        try:
            with open(WA_CFG_FILE, 'w') as f: json.dump(cfg, f)
        except Exception as e: sys.stderr.write(f"wa-cfg save error: {e}\n")

    def _wa_webhook(self):
        """Public POST endpoint for the WA provider to deliver incoming messages.
        Accepts Twilio's form-encoded webhook OR 360dialog/Meta WBA's JSON. Stores
        a normalised record and (if auto-reply is on) replies via Claude+Twilio."""
        ctype = (self.headers.get("Content-Type") or "").split(";")[0].lower()
        length = int(self.headers.get("Content-Length", "0") or 0)
        raw = self.rfile.read(length) if length > 0 else b''
        msg = None
        try:
            if "application/x-www-form-urlencoded" in ctype:
                # Twilio
                from urllib.parse import parse_qs
                d = {k: v[0] for k, v in parse_qs(raw.decode('utf-8', errors='replace')).items()}
                msg = {
                    "provider":"twilio",
                    "from":   (d.get("From")    or "").replace("whatsapp:", ""),
                    "to":     (d.get("To")      or "").replace("whatsapp:", ""),
                    "body":   d.get("Body", ""),
                    "name":   d.get("ProfileName", ""),
                    "msg_id": d.get("SmsSid") or d.get("MessageSid", ""),
                    "raw":    d,
                }
            else:
                d = json.loads(raw or b'{}')
                # 360dialog / Meta WBA shape
                ent = (d.get("entry") or [{}])[0]
                ch  = (ent.get("changes") or [{}])[0].get("value", {})
                m   = (ch.get("messages") or [{}])[0] if ch.get("messages") else {}
                contacts = ch.get("contacts") or [{}]
                msg = {
                    "provider": "meta",
                    "from":   m.get("from") or d.get("from", ""),
                    "to":     ((ch.get("metadata") or {}).get("display_phone_number")) or d.get("to", ""),
                    "body":   ((m.get("text") or {}).get("body")) or d.get("body", ""),
                    "name":   (contacts[0].get("profile") or {}).get("name", ""),
                    "msg_id": m.get("id") or d.get("id", ""),
                    "raw":    d,
                }
        except Exception as e:
            return self._json(400, {"error": f"could not parse webhook body: {e}"})

        if not msg or not (msg.get("from") and msg.get("body")):
            return self._json(200, {"ok": True, "ignored": True})    # status updates etc.

        msg["t"] = int(__import__("time").time() * 1000)
        msg["dir"] = "in"
        inbox = self._wa_load_inbox()
        inbox.insert(0, msg)
        self._wa_save_inbox(inbox)
        sys.stderr.write(f"[wa] in: {msg.get('from')} → {msg.get('body','')[:80]}\n")

        # ============ Watchdog status command ============
        # If the message body is a status keyword AND the sender phone is whitelisted,
        # run the watchdog and reply with the current summary instead of the generic auto-reply.
        wd_cfg = self._wd_load_cfg()
        body_lc = (msg.get("body","") or "").strip().lower()
        if body_lc in ("status","watchdog","report","check","health"):
            allowed_phones = [p.strip().replace(" ","") for p in (wd_cfg.get("allowed_phones") or "").split(",") if p.strip()]
            from_clean = (msg.get("from","") or "").strip().replace(" ","")
            if not allowed_phones or any(from_clean.endswith(p[-9:]) for p in allowed_phones):
                try:
                    result = WD_DAEMON.run_once(send=False)
                    summary = result.get('summary', '🐶 Watchdog summary unavailable')
                    self._wa_twilio_send(msg["from"], "🐶 " + summary[:1400], {
                        "twilio_sid":   wd_cfg.get("twilio_sid"),
                        "twilio_token": wd_cfg.get("twilio_token"),
                        "twilio_wa_from": wd_cfg.get("twilio_wa_from"),
                    })
                    inbox = self._wa_load_inbox()
                    inbox.insert(0, {"provider":"twilio","from":wd_cfg.get("twilio_wa_from",""),"to":msg["from"],"body":"🐶 " + summary[:300],"t":int(__import__("time").time()*1000),"dir":"out","auto":True,"watchdog":True})
                    self._wa_save_inbox(inbox)
                    return self._json(200, {"ok": True, "watchdog_replied": True})
                except Exception as e:
                    sys.stderr.write(f"[wa] watchdog reply failed: {e}\n")

        # Auto-reply (best effort, fire-and-forget on this thread)
        cfg = self._wa_load_cfg()
        if cfg.get("auto_reply_enabled") and cfg.get("claude_key") and cfg.get("twilio_sid") and cfg.get("twilio_token") and cfg.get("twilio_wa_from"):
            try:
                reply = self._wa_claude_reply(msg["body"], msg.get("name"), cfg)
                if reply:
                    self._wa_twilio_send(msg["from"], reply, cfg)
                    inbox = self._wa_load_inbox()
                    inbox.insert(0, {"provider":"twilio","from":cfg["twilio_wa_from"],"to":msg["from"],"body":reply,"t":int(__import__("time").time()*1000),"dir":"out","auto":True})
                    self._wa_save_inbox(inbox)
            except Exception as e:
                sys.stderr.write(f"[wa] auto-reply failed: {e}\n")
        return self._json(200, {"ok": True})

    def _wa_inbox_get(self):
        inbox = self._wa_load_inbox()
        return self._json(200, {"messages": inbox[:200]})

    def _wa_send(self):
        # Browser POSTs { provider, to, body, twilio_sid, twilio_token, twilio_from }
        length = int(self.headers.get("Content-Length", "0") or 0)
        try: body = json.loads(self.rfile.read(length))
        except Exception as e: return self._json(400, {"error": f"bad json: {e}"})
        prov = body.get("provider") or "twilio"
        if prov != "twilio":
            return self._json(400, {"error": "only twilio is wired server-side; for Meta WBA call the Graph API directly from the browser"})
        try:
            sid = self._wa_twilio_send(body.get("to"), body.get("body"), {
                "twilio_sid":   body.get("twilio_sid"),
                "twilio_token": body.get("twilio_token"),
                "twilio_wa_from": body.get("twilio_from"),
            })
            inbox = self._wa_load_inbox()
            inbox.insert(0, {"provider":"twilio","from":body.get("twilio_from",""),"to":body.get("to",""),"body":body.get("body",""),"t":int(__import__("time").time()*1000),"dir":"out","msg_id":sid})
            self._wa_save_inbox(inbox)
            return self._json(200, {"ok": True, "sid": sid})
        except Exception as e:
            return self._json(500, {"error": str(e)})

    def _wa_twilio_send(self, to, body, cfg):
        from urllib.parse import urlencode
        from base64 import b64encode
        sid = cfg.get("twilio_sid"); tok = cfg.get("twilio_token"); from_ = cfg.get("twilio_wa_from")
        if not (sid and tok and from_): raise Exception("twilio creds missing")
        if not (to and body): raise Exception("to + body required")
        to_ = to if to.startswith("whatsapp:") else "whatsapp:" + to
        from_n = from_ if from_.startswith("whatsapp:") else "whatsapp:" + from_
        data = urlencode({"From": from_n, "To": to_, "Body": body[:1500]}).encode()
        auth = b64encode(f"{sid}:{tok}".encode()).decode()
        req = urllib.request.Request(
            f"https://api.twilio.com/2010-04-01/Accounts/{sid}/Messages.json",
            data=data, method="POST",
            headers={"Authorization": "Basic " + auth, "Content-Type": "application/x-www-form-urlencoded"},
        )
        with urllib.request.urlopen(req, timeout=15) as r:
            d = json.loads(r.read())
            return d.get("sid")

    def _wa_claude_reply(self, body, name, cfg):
        sys_prompt = cfg.get("system_prompt") or "You are a friendly Nitrous Competitions WhatsApp assistant. Keep replies under 300 chars. Always include a free postal entry mention. Never make legal claims."
        user_msg = f"Customer{' '+name if name else ''} just sent: {body!r}\n\nWrite a single brief, helpful WhatsApp reply (≤ 300 chars). If you can't help, point them to support@nitrouscompetitions.com."
        req = urllib.request.Request(
            "https://api.anthropic.com/v1/messages",
            data=json.dumps({
                "model": "claude-haiku-4-5",
                "max_tokens": 256,
                "system": sys_prompt,
                "messages": [{"role": "user", "content": user_msg}],
            }).encode(),
            method="POST",
            headers={
                "x-api-key": cfg["claude_key"],
                "anthropic-version": "2023-06-01",
                "content-type": "application/json",
            },
        )
        with urllib.request.urlopen(req, timeout=20) as r:
            d = json.loads(r.read())
            try: return d["content"][0]["text"][:1500]
            except Exception: return None

    def _wa_auto_reply_config(self):
        # POST: { auto_reply_enabled, system_prompt, claude_key, twilio_sid, twilio_token, twilio_wa_from }
        length = int(self.headers.get("Content-Length", "0") or 0)
        try: body = json.loads(self.rfile.read(length) or b'{}')
        except Exception as e: return self._json(400, {"error": f"bad json: {e}"})
        cfg = self._wa_load_cfg(); cfg.update(body)
        self._wa_save_cfg(cfg)
        # Don't return secrets back
        safe = {k:v for k,v in cfg.items() if not any(s in k for s in ("token","key","sid"))}
        safe["has_claude_key"]   = bool(cfg.get("claude_key"))
        safe["has_twilio_creds"] = bool(cfg.get("twilio_sid") and cfg.get("twilio_token") and cfg.get("twilio_wa_from"))
        return self._json(200, safe)

    def _wa_clear(self):
        self._wa_save_inbox([])
        return self._json(200, {"ok": True})

    # ---------- GA4 Realtime via Service Account ----------
    # Token cache so we don't mint a new JWT every 30s (lifetime ~1h)
    _ga_token_cache = {}   # key: sha256(SA email) -> { token, exp }

    def _ga_realtime(self):
        # /ga/realtime/{propertyId}
        parts = self.path[len(GA_PREFIX) + 1:].split("/", 2)
        if len(parts) < 2 or parts[0] != "realtime" or not parts[1]:
            return self._json(400, {"error": "expected /ga/realtime/{propertyId}"})
        prop_id = parts[1]

        sa_json = self.headers.get("X-GA-Service-Account")
        if not sa_json:
            return self._json(401, {"error": "missing X-GA-Service-Account header (paste the full service-account JSON)"})
        try:
            sa = json.loads(sa_json)
        except Exception as e:
            return self._json(400, {"error": f"service-account JSON invalid: {e}"})

        try:
            from google.oauth2 import service_account
            from google.auth.transport.requests import Request as GAuthReq
        except Exception as e:
            return self._json(500, {"error": f"google.auth not importable: {e}"})

        try:
            creds = service_account.Credentials.from_service_account_info(
                sa, scopes=["https://www.googleapis.com/auth/analytics.readonly"]
            )
            creds.refresh(GAuthReq())
            token = creds.token
        except Exception as e:
            return self._json(500, {"error": f"failed to mint access token: {e}"})

        # Call GA4 Realtime
        try:
            target = f"https://analyticsdata.googleapis.com/v1beta/properties/{prop_id}:runRealtimeReport"
            body = json.dumps({
                "metrics": [{"name": "activeUsers"}],
                "minuteRanges": [{"startMinutesAgo": 29, "endMinutesAgo": 0}],
            }).encode()
            req = urllib.request.Request(
                target, data=body, method="POST",
                headers={"Authorization": f"Bearer {token}", "Content-Type": "application/json"},
            )
            with urllib.request.urlopen(req, timeout=15) as r:
                resp = json.loads(r.read())
                # Extract activeUsers — first row, first metric
                active = 0
                rows = resp.get("rows") or []
                if rows:
                    try:
                        active = int(rows[0]["metricValues"][0]["value"])
                    except Exception:
                        active = 0
                return self._json(200, {"active_users": active, "raw": resp})
        except urllib.error.HTTPError as e:
            return self._json(e.code, {"error": "GA4 Realtime error",
                                       "detail": e.read().decode("utf-8", errors="replace")[:300]})
        except Exception as e:
            return self._json(502, {"error": f"GA4 call failed: {type(e).__name__}: {e}"})

    # ---------- Live-site proxy for the iPhone preview ----------
    def _proxy_site(self):
        path = self.path[len(SITE_PREFIX):] or "/"
        target = f"https://{SITE_HOST}{path}"
        req = urllib.request.Request(
            target, method="GET",
            headers={
                "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 17_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.0 Mobile/15E148 Safari/604.1",
                "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8",
                "Accept-Language": "en-GB,en;q=0.9",
            },
        )
        # Strip headers that would prevent the iframe from rendering
        STRIP = {
            "x-frame-options", "content-security-policy", "content-security-policy-report-only",
            "transfer-encoding", "content-encoding", "connection",
            "access-control-allow-origin", "access-control-allow-headers", "access-control-allow-methods",
        }
        # 🚦 Rate-limit + 🌐 proxy-pool rotation — same machinery the Woo cache uses.
        # This is what stops the live-site preview from getting 403'd by Wordfence.
        try: RATE_LIMITER.acquire()
        except NameError: pass
        proxy_url = None
        try: proxy_url = PROXY_POOL.next()
        except NameError: pass

        def _open():
            if proxy_url:
                ph = urllib.request.ProxyHandler({ 'http': proxy_url, 'https': proxy_url })
                return urllib.request.build_opener(ph).open(req, timeout=30)
            return urllib.request.urlopen(req, timeout=30)

        try:
            with _open() as r:
                self.send_response(r.status)
                ct = r.getheader("content-type", "text/html")
                for k, v in r.getheaders():
                    if k.lower() in STRIP:
                        continue
                    self.send_header(k, v)
                self._cors()
                self.end_headers()
                data = r.read()
                # Inject <base> so relative URLs resolve to the live site (CSS, images, fonts, JS)
                if "text/html" in ct.lower() and b"<head" in data[:5000]:
                    base_tag = f'<base href="https://{SITE_HOST}/">'.encode("utf-8")
                    # Insert right after the first <head ...>
                    idx_open = data.lower().find(b"<head")
                    if idx_open != -1:
                        idx_close = data.find(b">", idx_open)
                        if idx_close != -1:
                            data = data[:idx_close+1] + base_tag + data[idx_close+1:]
                self.wfile.write(data)
        except urllib.error.HTTPError as e:
            # If the chosen proxy got rate-limited, mark it bad so the next request rotates
            if e.code in (403, 429) and proxy_url:
                try: PROXY_POOL.mark_bad(proxy_url, cooldown_sec=300)
                except NameError: pass
            self.send_response(e.code)
            self._cors()
            self.send_header("content-type", e.headers.get("content-type", "text/html"))
            self.end_headers()
            try:
                self.wfile.write(e.read())
            except Exception:
                pass
        except Exception as e:
            if proxy_url:
                try: PROXY_POOL.mark_bad(proxy_url, cooldown_sec=120)
                except NameError: pass
            self.send_response(502)
            self._cors()
            self.send_header("content-type", "text/plain")
            self.end_headers()
            self.wfile.write(f"site proxy error: {e}".encode())

    # ---------- Klaviyo passthrough proxy ----------
    def _proxy_klaviyo(self):
        path = self.path[len(KLAVIYO_PREFIX):] or "/"
        target = f"https://{KLAVIYO_HOST}{path}"
        length = int(self.headers.get("Content-Length", "0") or 0)
        body = self.rfile.read(length) if length > 0 else None
        req = urllib.request.Request(target, data=body, method=self.command)
        for h in ("Authorization", "revision", "Content-Type", "Accept"):
            v = self.headers.get(h)
            if v:
                req.add_header(h, v)
        try:
            with urllib.request.urlopen(req, timeout=30) as r:
                self.send_response(r.status)
                for k, v in r.getheaders():
                    if k.lower() in ("transfer-encoding", "content-encoding", "connection",
                                     "access-control-allow-origin", "access-control-allow-headers",
                                     "access-control-allow-methods"):
                        continue
                    self.send_header(k, v)
                self._cors()
                self.end_headers()
                self.wfile.write(r.read())
        except urllib.error.HTTPError as e:
            self.send_response(e.code)
            self._cors()
            self.send_header("content-type", e.headers.get("content-type", "application/json"))
            self.end_headers()
            self.wfile.write(e.read())
        except Exception as e:
            self.send_response(502)
            self._cors()
            self.send_header("content-type", "text/plain")
            self.end_headers()
            self.wfile.write(f"proxy error: {e}".encode())

    # ---------- AWS service-call gateway ----------
    def _aws(self):
        # /aws/{service}/{operation}  e.g. /aws/ses/quota, /aws/sns/publish
        parts = self.path[len(AWS_PREFIX) + 1:].split("/", 2)
        if len(parts) < 2 or not parts[0] or not parts[1]:
            return self._json(400, {"error": "expected /aws/{service}/{operation}"})
        service, operation = parts[0].lower(), parts[1].lower()

        akid   = self.headers.get("X-AWS-Access-Key-Id")
        sak    = self.headers.get("X-AWS-Secret-Access-Key")
        region = self.headers.get("X-AWS-Region") or "eu-west-2"
        if not akid or not sak:
            return self._json(401, {
                "error": "missing X-AWS-Access-Key-Id / X-AWS-Secret-Access-Key headers"
            })

        body = {}
        length = int(self.headers.get("Content-Length", "0") or 0)
        if length > 0:
            try:
                raw = self.rfile.read(length)
                body = json.loads(raw) if raw else {}
            except Exception as e:
                return self._json(400, {"error": f"invalid JSON body: {e}"})

        try:
            boto3 = _get_boto3()
            client = boto3.client(
                service,
                region_name=region,
                aws_access_key_id=akid,
                aws_secret_access_key=sak,
            )
        except Exception as e:
            return self._json(500, {"error": f"boto3 client init: {type(e).__name__}: {e}"})

        try:
            if service == "ses":
                result = self._aws_ses(client, operation, body)
            elif service == "sns":
                result = self._aws_sns(client, operation, body)
            elif service == "sts":
                # Cheap probe: returns the caller's account/ARN/UserId
                result = client.get_caller_identity()
            else:
                return self._json(404, {"error": f"unknown service: {service}"})
            return self._json(200, _normalise(result))
        except Exception as e:
            # botocore.exceptions.ClientError carries response['Error']
            resp = getattr(e, "response", None)
            if isinstance(resp, dict) and "Error" in resp:
                err = resp["Error"]
                http_status = resp.get("ResponseMetadata", {}).get("HTTPStatusCode", 400)
                return self._json(http_status, {
                    "error": err.get("Code", "AWSError"),
                    "message": err.get("Message", str(e)),
                })
            return self._json(500, {"error": f"{type(e).__name__}: {e}"})

    def _aws_ses(self, client, operation, body):
        if operation == "quota":
            quota = client.get_send_quota()
            try:
                ids = client.list_identities(MaxItems=50).get("Identities", [])
            except Exception:
                ids = []
            verifs = (client.get_identity_verification_attributes(Identities=ids)
                      .get("VerificationAttributes", {})) if ids else {}
            return {
                "quota": quota,
                "identities": [
                    {"email": i,
                     "verified": verifs.get(i, {}).get("VerificationStatus") == "Success",
                     "status": verifs.get(i, {}).get("VerificationStatus", "Unknown")}
                    for i in ids
                ],
            }
        if operation == "send":
            src = body.get("from")
            to  = body.get("to")
            subj = body.get("subject", "")
            html = body.get("html"); text = body.get("text")
            if not src or not to or (not html and not text):
                raise ValueError("send requires from + to + (html or text)")
            if isinstance(to, str): to = [to]
            msg = {"Subject": {"Data": subj}, "Body": {}}
            if html: msg["Body"]["Html"] = {"Data": html}
            if text: msg["Body"]["Text"] = {"Data": text}
            args = {"Source": src, "Destination": {"ToAddresses": to}, "Message": msg}
            if body.get("reply_to"):
                args["ReplyToAddresses"] = [body["reply_to"]]
            return client.send_email(**args)
        if operation == "verify-email":
            email = body.get("email")
            if not email: raise ValueError("verify-email requires { email }")
            client.verify_email_identity(EmailAddress=email)
            return {"verification_email_sent": email}
        if operation == "send-statistics":
            return client.get_send_statistics()
        raise ValueError(f"unknown SES operation: {operation}")

    def _aws_sns(self, client, operation, body):
        if operation == "sms-attributes":
            return client.get_sms_attributes()
        if operation == "publish":
            phone   = body.get("phone_number")
            message = body.get("message")
            if not phone or not message:
                raise ValueError("publish requires { phone_number, message }")
            args = {"PhoneNumber": phone, "Message": message}
            attrs = {}
            if body.get("sender_id"):
                attrs["AWS.SNS.SMS.SenderID"] = {"DataType": "String",
                                                 "StringValue": str(body["sender_id"])}
            attrs["AWS.SNS.SMS.SMSType"] = {"DataType": "String",
                                            "StringValue": body.get("sms_type", "Transactional")}
            if attrs:
                args["MessageAttributes"] = attrs
            return client.publish(**args)
        if operation == "list-topics":
            return client.list_topics()
        raise ValueError(f"unknown SNS operation: {operation}")

    def _json(self, status, data):
        body = json.dumps(data).encode()
        self.send_response(status)
        self._cors()
        self.send_header("Content-Type", "application/json")
        self.send_header("Content-Length", str(len(body)))
        self.send_header("Cache-Control", "no-store")
        self.end_headers()
        self.wfile.write(body)

    # ---------- HTTP method dispatch ----------
    def do_GET(self):
        # /health — supervisor heartbeat (uptime, daemons alive, last heal, recent incidents)
        if self.path == '/health' or self.path.startswith('/health?'):
            try:
                body = json.dumps(SUPERVISOR.status()).encode()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-store')
                self.send_header('Content-Length', str(len(body)))
                self.end_headers()
                self.wfile.write(body)
            except Exception as e:
                self.send_error(500, f"health err: {e}")
            return
        if self._is_klaviyo():
            return self._proxy_klaviyo()
        if self._is_aws():
            return self._aws()
        if self._is_site():
            return self._proxy_site()
        if self._is_ga():
            return self._ga_realtime()
        if self._is_wa():
            return self._wa()
        if self._is_wd():
            return self._wd()
        if self._is_wa_bot():
            return self._wa_bot()
        if self._is_cache():
            return self._cache_handle()
        return super().do_GET()

    def do_POST(self):
        if self._is_klaviyo():
            return self._proxy_klaviyo()
        if self._is_aws():
            return self._aws()
        if self._is_ga():
            return self._ga_realtime()
        if self._is_wa():
            return self._wa()
        if self._is_wd():
            return self._wd()
        if self._is_wa_bot():
            return self._wa_bot()
        if self._is_cache():
            return self._cache_handle()
        self.send_error(405, "POST only on /klaviyo/*, /aws/*, /ga/*, /wa/*, /wd/*, /wa-bot/* or /cache/*")

    def do_PUT(self):
        if self._is_klaviyo():
            return self._proxy_klaviyo()
        if self._is_aws():
            return self._aws()
        self.send_error(405)

    def do_PATCH(self):
        if self._is_klaviyo():
            return self._proxy_klaviyo()
        if self._is_aws():
            return self._aws()
        self.send_error(405)

    def do_DELETE(self):
        if self._is_klaviyo():
            return self._proxy_klaviyo()
        if self._is_aws():
            return self._aws()
        self.send_error(405)

    def end_headers(self):
        if self._is_klaviyo() or self._is_aws() or self._is_ga() or self._is_wd():
            self.send_header("Cache-Control", "no-store")
        super().end_headers()

    # site proxy doesn't override end_headers because we set headers in _proxy_site directly

    def log_message(self, fmt, *a):
        sys.stderr.write(f"[{self.log_date_time_string()}] {self.command} {self.path} -> {a[1] if len(a) > 1 else '?'}\n")


class WatchdogDaemon:
    """Background thread that runs the watchdog checks every N seconds.
    Loads config from watchdog-config.json on every tick (so live updates from the UI
    are picked up without restart). Sends WhatsApp pings to staff via Twilio."""
    def __init__(self):
        import threading
        self._thread = None
        self._stop = threading.Event()
        self.running = False
        self.last_run = 0
        self.next_run = 0

    def reload(self):
        cfg = self._cfg()
        if cfg.get('enabled') and not self.running: self.start()
        elif not cfg.get('enabled') and self.running: self.stop()

    def start(self):
        import threading
        if self.running: return
        self._stop.clear()
        self._thread = threading.Thread(target=self._loop, daemon=True)
        self._thread.start()
        self.running = True
        sys.stderr.write("[watchdog] daemon started\n")

    def stop(self):
        self.running = False
        self._stop.set()
        sys.stderr.write("[watchdog] daemon stopped\n")

    def _cfg(self):
        try:
            with open(WD_CFG_FILE, 'r') as f: return json.load(f)
        except Exception: return {}

    def _hist_save(self, entry):
        try:
            with open(WD_HIST_FILE, 'r') as f: h = json.load(f)
        except Exception: h = []
        h.insert(0, entry)
        try:
            with open(WD_HIST_FILE, 'w') as f: json.dump(h[:30], f)
        except Exception: pass

    def _loop(self):
        import time
        while not self._stop.is_set():
            cfg = self._cfg()
            if not cfg.get('enabled'):
                self.running = False; return
            try:
                self.run_once(send=True)
            except Exception as e:
                sys.stderr.write(f"[watchdog] tick error: {e}\n")
            interval = max(60, int(cfg.get('interval_sec') or 300))
            self.next_run = int(time.time()) + interval
            # Sleep in 5s chunks so stop() takes effect quickly
            slept = 0
            while slept < interval and not self._stop.is_set():
                time.sleep(min(5, interval - slept))
                slept += 5

    def run_once(self, send=True):
        import time
        cfg = self._cfg()
        checks = {}
        # 1. Site uptime
        try:
            req = urllib.request.Request(f"https://{SITE_HOST}/", headers={"User-Agent":"NitrousWatchdog/1.0"})
            t0 = time.time()
            with urllib.request.urlopen(req, timeout=15) as r:
                checks['site_ok'] = (200 <= r.status < 400)
                checks['site_status'] = r.status
                checks['site_ms'] = int((time.time()-t0)*1000)
        except Exception as e:
            checks['site_ok'] = False; checks['site_status'] = 0; checks['site_ms'] = -1
            checks['site_err'] = str(e)[:120]

        # 2. Today's revenue (Woo) — only if creds present
        rev = orders_today = 0; last_hr = 0
        if cfg.get('woo_ck') and cfg.get('woo_cs'):
            try:
                from base64 import b64encode
                from datetime import datetime, timezone, timedelta
                # WooCommerce REST is strict — 'after' must be ISO with Z suffix, NOT +00:00
                today_start = datetime.now(timezone.utc).replace(hour=0,minute=0,second=0,microsecond=0).strftime('%Y-%m-%dT%H:%M:%SZ')
                woo_url = (cfg.get('woo_url') or f"https://{SITE_HOST}/wp-json/wc/v3").rstrip('/')
                auth = b64encode(f"{cfg['woo_ck']}:{cfg['woo_cs']}".encode()).decode()
                hr_ago = (datetime.now(timezone.utc) - timedelta(hours=1)).strftime('%Y-%m-%dT%H:%M:%SZ')
                # Fetch today's orders across the 3 live statuses, cap 5 pages each
                # cap 15 pages × 100 = 1500 per status (4500 total) — big enough for any day
                for st in ('completed','processing','on-hold'):
                    page = 1
                    while page <= 15:
                        qs = urllib.parse.urlencode({"per_page":100, "after":today_start, "status":st, "page":page})
                        req = urllib.request.Request(f"{woo_url}/orders?{qs}", headers={"Authorization":"Basic "+auth, "User-Agent":"NitrousWatchdog/1.0"})
                        try:
                            with urllib.request.urlopen(req, timeout=20) as r:
                                d = json.loads(r.read())
                        except urllib.error.HTTPError as he:
                            checks['woo_err'] = f"{st} p{page}: HTTP {he.code}"
                            d = []
                        if not d: break
                        for o in d:
                            try: rev += float(o.get('total') or 0)
                            except: pass
                            if (o.get('date_created_gmt') or o.get('date_created') or '') >= hr_ago.replace('Z',''): last_hr += 1
                        orders_today += len(d)
                        if len(d) < 100: break
                        page += 1
                checks['rev_today'] = rev; checks['orders_today'] = orders_today; checks['last_hr_orders'] = last_hr
                # Clear woo_err only if we actually pulled some data
                if rev > 0 or orders_today > 0:
                    checks.pop('woo_err', None)
            except Exception as e:
                checks['woo_err'] = str(e)[:120]

        # Pace
        target = float(cfg.get('daily_target') or 25000)
        from datetime import datetime as _dt
        now = _dt.now(); hr = now.hour + now.minute/60
        pace_pct = (rev/target*100) if target else 0
        expected_pct = (hr/24)*100

        # Severity
        if not checks.get('site_ok'):                         severity = 'critical'
        elif last_hr == 0 and 8 <= now.hour <= 23:            severity = 'critical'
        elif pace_pct < expected_pct - 25:                    severity = 'warn'
        else:                                                  severity = 'ok'

        # AI summaries via Claude — TWO versions:
        #   staff_summary    (% + arrows only · NO £ figures · NO order counts)
        #   directors_summary (full data with £)
        summary = ''                # the redacted one — what 'summary' field returns and what gets saved/displayed
        directors_summary = ''      # only sent if a directors group is configured on the WA bot
        if cfg.get('claude_key'):
            arrow_pace = '▲' if pace_pct >= expected_pct else '▼'
            gap_pp = pace_pct - expected_pct
            data_err = bool(checks.get('woo_err'))
            try:
                # --- STAFF prompt: STRICT no-money, percentages + arrows only ---
                staff_prompt = (
                    f"You are the Nitrous Competitions watchdog. Write a SHORT WhatsApp STAFF brief (≤ 320 chars) starting with one of: "
                    f"🟢 HEALTHY / 🟡 WATCH / 🔴 ACT NOW.\n\n"
                    f"🚫 ABSOLUTE RULES — break ANY of these and the message is rejected:\n"
                    f"1. NEVER mention pounds (£), GBP, dollars, money, revenue figures, sales figures, target amounts, or order counts.\n"
                    f"2. ONLY use percentages, gap (in pp), and trend arrows ▲ ▼ ►. NO numbers other than the percentages and pp gap.\n"
                    f"3. If asked about money, redirect to the directors group.\n\n"
                    f"USE THESE EXACT FIGURES (do not invent):\n"
                    f"- Site: {'UP ' + str(checks.get('site_status')) if checks.get('site_ok') else 'DOWN'} ({checks.get('site_ms',0)}ms)\n"
                    f"- Pace: {int(pace_pct)}% of target {arrow_pace}\n"
                    f"- Expected by now: {int(expected_pct)}%\n"
                    f"- Gap: {int(gap_pp):+d}pp {arrow_pace}\n"
                    f"- Last-hour activity: {'no orders' if last_hr == 0 else 'steady'}\n"
                    f"- Severity: {severity}\n"
                    + (f"- ⚠ Data feed error — say 'data feed broken — Joshua please check the Woo API key'.\n" if data_err else "")
                    + f"\nEnd with one specific action assigned to a named teammate from: Karis (presenter), Joshua (web ops), "
                    f"Kyrell (promoter), Aliyah (social), Conrad (director), Alex (camera). Be terse. UK English. Sparing emoji."
                )
                creq = urllib.request.Request(
                    "https://api.anthropic.com/v1/messages",
                    data=json.dumps({"model":"claude-haiku-4-5","max_tokens":256,"messages":[{"role":"user","content":staff_prompt}]}).encode(),
                    method="POST",
                    headers={"x-api-key":cfg['claude_key'],"anthropic-version":"2023-06-01","content-type":"application/json"},
                )
                with urllib.request.urlopen(creq, timeout=20) as r:
                    d = json.loads(r.read())
                    summary = d['content'][0]['text']

                # --- DIRECTORS prompt: full data, only sent if directors group exists ---
                directors_prompt = (
                    f"You are the Nitrous Competitions watchdog briefing the DIRECTORS group. Write a tight brief (≤ 380 chars) "
                    f"starting with: 🟢 HEALTHY / 🟡 WATCH / 🔴 ACT NOW.\n\n"
                    f"USE THESE EXACT NUMBERS (do not round / invent):\n"
                    f"- Site: {'UP ' + str(checks.get('site_status')) if checks.get('site_ok') else 'DOWN'} ({checks.get('site_ms',0)}ms)\n"
                    f"- Revenue today: £{int(rev)}\n"
                    f"- Target: £{int(target)} → pace {int(pace_pct)}%, expected {int(expected_pct)}%\n"
                    f"- Orders: {orders_today} today, {last_hr} last hour\n"
                    + (f"- ⚠ Woo data error: {checks.get('woo_err')}\n" if data_err else "")
                    + f"\nEnd with the recommended director-level action — strategic, not a per-staff task. Direct, no fluff."
                )
                creq2 = urllib.request.Request(
                    "https://api.anthropic.com/v1/messages",
                    data=json.dumps({"model":"claude-haiku-4-5","max_tokens":256,"messages":[{"role":"user","content":directors_prompt}]}).encode(),
                    method="POST",
                    headers={"x-api-key":cfg['claude_key'],"anthropic-version":"2023-06-01","content-type":"application/json"},
                )
                with urllib.request.urlopen(creq2, timeout=20) as r:
                    d2 = json.loads(r.read())
                    directors_summary = d2['content'][0]['text']
            except Exception as e:
                sys.stderr.write(f"[watchdog] claude err: {e}\n")

        if not summary:
            # Fallback STAFF — % only
            tag = '🔴 ACT NOW' if severity=='critical' else '🟡 WATCH' if severity=='warn' else '🟢 HEALTHY'
            arrow = '▲' if pace_pct >= expected_pct else '▼'
            summary = f"{tag}\nSite {'up' if checks.get('site_ok') else 'DOWN'} · pace {int(pace_pct)}% {arrow} (expected {int(expected_pct)}%) · last hour {'quiet' if last_hr==0 else 'steady'}"
        if not directors_summary:
            tag = '🔴 ACT NOW' if severity=='critical' else '🟡 WATCH' if severity=='warn' else '🟢 HEALTHY'
            directors_summary = f"{tag}\nSite {'up' if checks.get('site_ok') else 'DOWN'} · £{int(rev)}/£{int(target)} ({int(pace_pct)}%) · last hr {last_hr} orders"

        # Belt-and-braces: scrub any stray £ from the staff summary in case Claude slipped up.
        # Replace £NNN with "%-only" placeholder, but only when used as a money figure.
        import re as _re
        summary = _re.sub(r'£\s?[\d,]+(?:\.\d+)?', '[redacted]', summary)
        # Also strip any "£0" leftovers
        summary = summary.replace('£', '')

        # Send via the WA bot. STAFF gets the redacted version; DIRECTORS (if configured) gets the full one.
        sent = False
        channel = None
        send_err = None
        if send:
            try:
                bot_status = json.loads(urllib.request.urlopen(
                    f"http://127.0.0.1:{WA_BOT_PORT}/status", timeout=4
                ).read())
                if not bot_status.get('connected'):
                    send_err = "WA bot not connected — Settings → 📱 WhatsApp Bot → Show QR + scan"
                else:
                    staff_group = bot_status.get('default_group') or ''
                    directors_group = bot_status.get('directors_group') or ''
                    if not staff_group:
                        send_err = "No staff group picked yet — Settings → 📱 WhatsApp Bot → pick a group + Save"
                    else:
                        # Send REDACTED summary to STAFF
                        bot_req = urllib.request.Request(
                            f"http://127.0.0.1:{WA_BOT_PORT}/send-group",
                            data=json.dumps({"groupId": staff_group, "body": "🐶 " + summary[:1400]}).encode(),
                            headers={"Content-Type": "application/json"},
                            method="POST",
                        )
                        with urllib.request.urlopen(bot_req, timeout=15) as r:
                            sent = (r.status == 200)
                            channel = 'wa-bot'
                    # Also send FULL summary to DIRECTORS group, if configured
                    if directors_group:
                        try:
                            dir_req = urllib.request.Request(
                                f"http://127.0.0.1:{WA_BOT_PORT}/send-group",
                                data=json.dumps({"groupId": directors_group, "body": "🐶 " + directors_summary[:1400]}).encode(),
                                headers={"Content-Type": "application/json"},
                                method="POST",
                            )
                            urllib.request.urlopen(dir_req, timeout=15).read()
                            channel = (channel or '') + '+directors'
                        except Exception as e:
                            sys.stderr.write(f"[watchdog] directors send failed: {e}\n")
            except Exception as e:
                send_err = f"WA bot unreachable: {e}"
                sys.stderr.write(f"[watchdog] wa-bot path failed: {e}\n")

        self.last_run = int(time.time())
        result = {
            "t": self.last_run, "severity": severity, "summary": summary,
            "checks": checks, "rev_today": rev, "pace_pct": pace_pct,
            "expected_pct": expected_pct, "last_hr_orders": last_hr,
            "sent": sent, "channel": channel, "send_err": send_err,
        }
        self._hist_save(result)
        sys.stderr.write(f"[watchdog] tick · {severity} · sent={sent} · channel={channel} · £{int(rev)} · {last_hr} last-hr\n")
        return result


WD_DAEMON = WatchdogDaemon()


# ====================================================================
# 📚 BULK CUSTOMER DATABASE — lazy-loads the 442K-record Woo customer dump
# from /media/sol1/1TB-Drive1/data/customers/page_*.json into memory once,
# then merges with the live customer DB (which has freshly-joined customers
# from today's orders). Provides fast in-memory search.
# ====================================================================
class BulkCustomerDB:
    def __init__(self):
        import threading
        self.lock = threading.Lock()
        self._loaded = False
        self.records = []         # full list, each with normalised fields
        self.by_email = {}        # email → record (lowercased key)
        self.loaded_at = 0
    def _flatten(self, c):
        """Convert a Woo customer record into the flat shape used by the UI."""
        b = c.get('billing') or {}
        s = c.get('shipping') or {}
        email = (c.get('email') or b.get('email') or '').lower().strip()
        return {
            'id':         c.get('id'),
            'email':      email,
            'name':       (c.get('first_name','') + ' ' + c.get('last_name','')).strip() or (b.get('first_name','') + ' ' + b.get('last_name','')).strip(),
            'first_name': c.get('first_name') or b.get('first_name',''),
            'last_name':  c.get('last_name')  or b.get('last_name',''),
            'phone':      b.get('phone') or s.get('phone') or '',
            'city':       b.get('city')  or s.get('city')  or '',
            'postcode':   b.get('postcode') or s.get('postcode') or '',
            'country':    b.get('country') or s.get('country') or '',
            'address':    b.get('address_1') or '',
            'username':   c.get('username') or '',
            'role':       c.get('role') or '',
            'created':    c.get('date_created') or '',
            'modified':   c.get('date_modified') or '',
            'is_paying':  bool(c.get('is_paying_customer')),
            'avatar':     c.get('avatar_url') or '',
            'birthday':   '',          # filled from live DB merge below if available
        }
    def _load_12mo_winnings_index(self):
        """Build email → 12-month index from /media/sol1/1TB-Drive1/data/orders/*.json (365 daily files).
           Stores BOTH:
             - aggregated comp summary (for fast "all time" view)
             - individual entries with dates (so we can filter by any time range on demand)
           Returns: { email: { comp_count, tickets, spent, comps:[...], entries:[...] } }
        """
        import glob, time
        files = sorted(glob.glob(os.path.join(ORDERS_PAGES_DIR, '*.json')))
        if not files:
            return {}
        # Cache freshness — re-use disk cache if newer than the newest order file
        try:
            cache_mtime = os.path.getmtime(WINNINGS_12MO_CACHE)
            newest_order = max(os.path.getmtime(f) for f in files)
            if cache_mtime >= newest_order:
                with open(WINNINGS_12MO_CACHE, 'r') as f:
                    cached = json.load(f)
                # Schema check — if old cache lacks `entries`, force rebuild
                sample = next(iter(cached.values()), None) if cached else None
                if sample is None or 'entries' in sample:
                    sys.stderr.write(f"[bulk-db] 12mo winnings · loaded from cache · {len(cached):,} customers\n")
                    return cached
                sys.stderr.write(f"[bulk-db] 12mo winnings · cache is old schema, rebuilding\n")
        except FileNotFoundError: pass
        except Exception as e: sys.stderr.write(f"[bulk-db] 12mo cache read err: {e}\n")
        sys.stderr.write(f"[bulk-db] 12mo winnings · indexing {len(files)} day files...\n")
        t0 = time.time()
        # email → { comps: { pid → agg }, entries: [...] }
        agg = {}
        seen_orders = set()
        for fp in files:
            try:
                with open(fp, 'r') as f: arr = json.load(f)
            except Exception:
                continue
            if not isinstance(arr, list): arr = arr.get('orders') or arr.get('data') or []
            for o in arr:
                oid = o.get('id')
                if not oid or oid in seen_orders: continue
                seen_orders.add(oid)
                st = (o.get('status') or '').lower()
                if st not in ('completed', 'processing', 'on-hold'): continue
                b = o.get('billing') or {}
                email = (b.get('email') or '').lower().strip()
                if not email: continue
                date = (o.get('date_created') or '')[:10]
                customer_agg = agg.setdefault(email, {'comps': {}, 'entries': []})
                for li in (o.get('line_items') or []):
                    pid = li.get('product_id') or li.get('variation_id')
                    if not pid: continue
                    name = li.get('name') or ''
                    qty = int(li.get('quantity') or 0)
                    total = float(li.get('total') or 0)
                    # Aggregate per-comp summary
                    comp_agg = customer_agg['comps'].setdefault(pid, {'comp_id': pid, 'comp_name': name, 'tickets': 0, 'total': 0.0, 'last_date': date, 'count': 0})
                    comp_agg['tickets'] += qty; comp_agg['total'] += total; comp_agg['count'] += 1
                    if not comp_agg.get('comp_name') and name: comp_agg['comp_name'] = name
                    if date > (comp_agg.get('last_date') or ''): comp_agg['last_date'] = date
                    # Individual entry (so we can filter by date on demand)
                    customer_agg['entries'].append({
                        'date': date, 'comp_id': pid, 'comp_name': name,
                        'tickets': qty, 'total': total, 'order_id': oid,
                    })
        # Flatten
        out = {}
        for email, ca in agg.items():
            comps_list = sorted(ca['comps'].values(), key=lambda c: c['last_date'], reverse=True)
            for c in comps_list: c['total'] = round(c['total'], 2)
            entries = sorted(ca['entries'], key=lambda e: e['date'], reverse=True)[:500]   # keep top-500 most recent entries
            out[email] = {
                'comp_count': len(comps_list),
                'tickets': sum(c['tickets'] for c in comps_list),
                'spent': round(sum(c['total'] for c in comps_list), 2),
                'comps': comps_list[:50],
                'entries': entries,
            }
        try:
            with open(WINNINGS_12MO_CACHE, 'w') as f: json.dump(out, f)
        except Exception as e: sys.stderr.write(f"[bulk-db] 12mo cache write err: {e}\n")
        sys.stderr.write(f"[bulk-db] 12mo winnings · indexed {len(out):,} customers from {len(seen_orders):,} orders ({time.time()-t0:.1f}s)\n")
        return out

    def _load_winnings_index(self):
        """Build email → list of comp entries from BOTH:
             1. /cache orders (today + week + prev_today) — INSTANT, covers last 7 days
             2. winners/*.json files — historical, generated when comps finish
           Both sources are merged and de-duplicated by order_id.
        """
        out = {}
        # Build a product_id → name map for the orders-cache derivation
        product_name = {}
        try:
            for p in (WOO_CACHE.products or []):
                product_name[p['id']] = p.get('name','')
        except Exception: pass

        # === SOURCE 1: live orders cache (last 7 days, INSTANT) ===
        orders_seen = set()
        for period in ('today', 'week', 'prev_today'):
            for o in (WOO_CACHE.orders.get(period) or []):
                oid = o.get('id')
                if not oid or oid in orders_seen: continue
                orders_seen.add(oid)
                b = o.get('billing') or {}
                email = (b.get('email') or '').lower().strip()
                if not email: continue
                name = (b.get('first_name','') + ' ' + b.get('last_name','')).strip()
                # Each line item that's a competition counts as one entry for that comp
                for li in (o.get('line_items') or []):
                    pid = li.get('product_id') or li.get('variation_id')
                    if not pid: continue
                    entries = out.setdefault(email, [])
                    entries.append({
                        'comp_id':   pid,
                        'comp_name': li.get('name') or product_name.get(pid, ''),
                        'order_id':  oid,
                        'date':      o.get('date_created'),
                        'tickets':   int(li.get('quantity') or 0),
                        'total':     float(li.get('total') or 0),
                        'status':    o.get('status'),
                        'name':      name,
                        'source':    'orders-cache',
                    })

        # === SOURCE 2: winner files (historical, when generated) ===
        import glob
        files = sorted(glob.glob(os.path.join(WINNERS_DIR, '*.json')))
        for f in files:
            try:
                with open(f, 'r') as fh: d = json.load(fh)
            except Exception:
                continue
            comp_id = d.get('product_id')
            comp_name = d.get('product_name') or ''
            for o in (d.get('orders') or []):
                email = (o.get('email') or '').lower().strip()
                if not email: continue
                # De-dupe vs the orders-cache source by order_id
                oid = o.get('order_id')
                entries = out.setdefault(email, [])
                if oid and any(e.get('order_id') == oid for e in entries): continue
                entries.append({
                    'comp_id':   comp_id,
                    'comp_name': comp_name,
                    'order_id':  oid,
                    'date':      o.get('date'),
                    'tickets':   o.get('tickets', 0),
                    'total':     o.get('total', 0),
                    'status':    o.get('status'),
                    'name':      o.get('name',''),
                    'source':    'winners-file',
                })
        # Sort each customer's entries newest-first
        for email, entries in out.items():
            entries.sort(key=lambda e: e.get('date',''), reverse=True)
        sys.stderr.write(f"[bulk-db] winnings index · {len(out):,} customers ({len(orders_seen)} orders-cache + {len(files)} winner files)\n")
        return out

    def ensure_loaded(self):
        if self._loaded: return
        with self.lock:
            if self._loaded: return
            import time, glob
            t0 = time.time()
            files = sorted(glob.glob(os.path.join(CUSTOMERS_PAGES_DIR, 'page_*.json')))
            sys.stderr.write(f"[bulk-db] loading {len(files)} customer pages...\n")
            recs = []
            for f in files:
                try:
                    with open(f, 'r') as fh: arr = json.load(fh)
                    if not isinstance(arr, list): arr = arr.get('customers') or arr.get('data') or []
                    for c in arr:
                        if not isinstance(c, dict): continue
                        recs.append(self._flatten(c))
                except Exception as e:
                    sys.stderr.write(f"[bulk-db] skip {os.path.basename(f)}: {e}\n")
            # Merge in live customer DB (freshly-joined buyers from today's orders, plus any birthdays)
            try:
                live = WOO_CACHE._load_customer_db() if 'WOO_CACHE' in globals() else {}
            except Exception:
                live = {}
            by_email = {}
            for r in recs:
                e = r.get('email')
                if e: by_email[e] = r
            for email, lr in (live or {}).items():
                if email.startswith('_'): continue
                if email in by_email:
                    # enrich existing — birthday + recent revenue/orders
                    if lr.get('birthday'): by_email[email]['birthday'] = lr['birthday']
                    if lr.get('total_spent') is not None: by_email[email]['revenue'] = lr['total_spent']
                    if lr.get('orders'): by_email[email]['orders'] = lr['orders']
                    if lr.get('last_seen'): by_email[email]['last_seen'] = lr['last_seen']
                else:
                    # NEW customer (joined since the May-8 dump)
                    nr = {
                        'id': None, 'email': email, 'name': lr.get('name',''),
                        'first_name': '', 'last_name': '',
                        'phone': lr.get('phone',''), 'city': lr.get('city',''),
                        'postcode': lr.get('postcode',''), 'country': lr.get('country',''),
                        'address': '', 'username': '', 'role': '',
                        'created': lr.get('first_seen') or '',
                        'modified': lr.get('last_seen') or '',
                        'is_paying': True, 'avatar': '',
                        'birthday': lr.get('birthday',''),
                        'revenue': lr.get('total_spent', 0) or 0,
                        'orders':  lr.get('orders', 0) or 0,
                        'last_seen': lr.get('last_seen',''),
                        'new_since_dump': True,
                    }
                    by_email[email] = nr
                    recs.append(nr)
            # Attach winnings (per-customer competition entry history) — last 7 days
            self.winnings = self._load_winnings_index()
            for email, entries in self.winnings.items():
                rec = by_email.get(email)
                if not rec: continue
                rec['winnings_count']   = len(entries)
                rec['winnings_tickets'] = sum(int(e.get('tickets') or 0) for e in entries)
                rec['winnings_spent']   = round(sum(float(e.get('total') or 0) for e in entries), 2)
                rec['winnings_comps']   = [e['comp_name'] for e in entries[:5]]
            # Attach 12-month aggregates from the daily order dump
            self.winnings_12mo = self._load_12mo_winnings_index()
            for email, agg in self.winnings_12mo.items():
                rec = by_email.get(email)
                if not rec: continue
                rec['winnings_12mo_count']   = agg.get('comp_count', 0)
                rec['winnings_12mo_tickets'] = agg.get('tickets', 0)
                rec['winnings_12mo_spent']   = agg.get('spent', 0)
            self.records = recs
            self.by_email = by_email
            self._loaded = True
            self.loaded_at = int(time.time())
            sys.stderr.write(f"[bulk-db] loaded · {len(recs):,} records · {len(self.winnings):,} with winnings ({time.time()-t0:.1f}s)\n")
    def reload_winnings(self):
        """Re-scan winner files and update in-memory state without re-reading 442K customer pages."""
        self.ensure_loaded()        # initialise winnings dict if first call
        with self.lock:
            self.winnings = self._load_winnings_index()
            # Reset and re-attach
            for rec in self.records:
                rec.pop('winnings_count', None); rec.pop('winnings_tickets', None)
                rec.pop('winnings_spent', None); rec.pop('winnings_comps', None)
            for email, entries in self.winnings.items():
                rec = self.by_email.get(email)
                if not rec: continue
                rec['winnings_count']   = len(entries)
                rec['winnings_tickets'] = sum(int(e.get('tickets') or 0) for e in entries)
                rec['winnings_spent']   = round(sum(float(e.get('total') or 0) for e in entries), 2)
                rec['winnings_comps']   = [e['comp_name'] for e in entries[:5]]
    def get_winnings(self, email):
        if not self._loaded: self.ensure_loaded()
        return self.winnings.get((email or '').lower(), [])
    def get_winnings_12mo(self, email):
        if not self._loaded: self.ensure_loaded()
        return self.winnings_12mo.get((email or '').lower(), {'comp_count': 0, 'tickets': 0, 'spent': 0, 'comps': [], 'entries': []})
    def get_winnings_range(self, email, from_date, to_date):
        """Filter the 12-month entries by an arbitrary date range (YYYY-MM-DD).
           Re-aggregates per-comp on the fly for the slice."""
        if not self._loaded: self.ensure_loaded()
        rec = self.winnings_12mo.get((email or '').lower())
        if not rec: return {'comp_count': 0, 'tickets': 0, 'spent': 0, 'comps': [], 'entries': []}
        entries = rec.get('entries') or []
        if from_date: entries = [e for e in entries if e.get('date','') >= from_date]
        if to_date:   entries = [e for e in entries if e.get('date','') <= to_date]
        # Re-aggregate by comp
        comps = {}
        for e in entries:
            pid = e['comp_id']
            c = comps.setdefault(pid, {'comp_id': pid, 'comp_name': e['comp_name'], 'tickets': 0, 'total': 0.0, 'last_date': e['date'], 'count': 0})
            c['tickets'] += e['tickets']; c['total'] += e['total']; c['count'] += 1
            if e['date'] > c['last_date']: c['last_date'] = e['date']
        comps_list = sorted(comps.values(), key=lambda c: c['last_date'], reverse=True)
        for c in comps_list: c['total'] = round(c['total'], 2)
        return {
            'comp_count': len(comps_list),
            'tickets':    sum(c['tickets'] for c in comps_list),
            'spent':      round(sum(c['total'] for c in comps_list), 2),
            'comps':      comps_list,
            'entries':    entries,
            'from':       from_date or None,
            'to':         to_date or None,
        }
    def search(self, q, limit=200, offset=0, sort='name', filter=''):
        self.ensure_loaded()
        q = (q or '').strip().lower()
        results = self.records
        if q:
            def matches(r):
                return (q in (r.get('email','') or '').lower()
                     or q in (r.get('name','') or '').lower()
                     or q in (r.get('phone','') or '').lower()
                     or q in (r.get('postcode','') or '').lower()
                     or q in (r.get('city','') or '').lower()
                     or q in (r.get('username','') or '').lower())
            results = [r for r in results if matches(r)]
        # Server-side filters
        if filter == 'winners':
            results = [r for r in results if (r.get('winnings_count') or 0) > 0 or (r.get('winnings_12mo_count') or 0) > 0]
        elif filter == 'birthdays':
            results = [r for r in results if r.get('birthday')]
        if sort == 'recent':
            results.sort(key=lambda r: r.get('modified') or r.get('created') or '', reverse=True)
        elif sort == 'spend':
            results.sort(key=lambda r: r.get('revenue') or 0, reverse=True)
        elif sort == 'orders':
            results.sort(key=lambda r: r.get('orders') or 0, reverse=True)
        elif sort == 'winnings':
            results.sort(key=lambda r: r.get('winnings_count') or 0, reverse=True)
        else:
            results.sort(key=lambda r: r.get('name') or '')
        total = len(results)
        return {'total': total, 'count': min(limit, max(0, total - offset)), 'results': results[offset:offset+limit]}
    def get_by_email(self, email):
        self.ensure_loaded()
        return self.by_email.get((email or '').lower())
    def stats(self):
        self.ensure_loaded()
        new = sum(1 for r in self.records if r.get('new_since_dump'))
        with_bd = sum(1 for r in self.records if r.get('birthday'))
        return { 'total': len(self.records), 'new_since_dump': new, 'with_birthday': with_bd, 'loaded_at': self.loaded_at }


BULK_DB = BulkCustomerDB()


# ====================================================================
# 🚦 RATE LIMITER — token bucket, capped at RATE_LIMIT_PER_MIN globally.
# Every call to Woo waits until the bucket has a token. Guarantees we never
# burst > N requests/minute and so never trip Wordfence even unwhitelisted.
# ====================================================================
class RateLimiter:
    def __init__(self, max_per_minute=RATE_LIMIT_PER_MIN):
        import threading
        self.max_per_minute = max_per_minute
        self.min_interval = 60.0 / max(1, max_per_minute)
        self.last_at = 0.0
        self.lock = threading.Lock()
        self.calls_in_minute = []         # rolling list of recent timestamps for visibility
    def acquire(self):
        import time
        with self.lock:
            now = time.time()
            wait = self.min_interval - (now - self.last_at)
            if wait > 0:
                time.sleep(wait)
            self.last_at = time.time()
            self.calls_in_minute = [t for t in self.calls_in_minute if t > self.last_at - 60] + [self.last_at]
    def stats(self):
        import time
        now = time.time()
        recent = [t for t in self.calls_in_minute if t > now - 60]
        return { 'cap_per_min': self.max_per_minute, 'last_60s': len(recent), 'min_interval_sec': round(self.min_interval, 2) }


# ====================================================================
# 🌐 PROXY POOL — rotates through user-configured outbound proxies.
# Rationale: when calling Woo, we don't want our server's IP to be the
# only outbound identity. The user can drop a list of HTTP/HTTPS/SOCKS
# proxies into proxies.json and the pool will round-robin through them
# transparently, marking bad ones as down for 5 min after a failure.
# ====================================================================
class ProxyPool:
    """Round-robin proxy rotation with per-proxy backoff on failure.
       File format (proxies.json on disk):
         { \"proxies\": [\"http://user:pw@host:port\", \"http://...\"] }
       Empty list / missing file = direct connection (current behaviour)."""
    def __init__(self):
        import threading
        self.lock = threading.Lock()
        self.proxies = []
        self.bad_until = {}      # proxy -> unix_ts when it can be retried
        self.idx = 0
        self.reload()
    def reload(self):
        try:
            with open(PROXY_CONFIG_FILE, 'r') as f:
                d = json.load(f)
            self.proxies = list(d.get('proxies') or [])
            sys.stderr.write(f"[proxy-pool] loaded {len(self.proxies)} proxies from disk\n")
        except FileNotFoundError:
            self.proxies = []
        except Exception as e:
            sys.stderr.write(f"[proxy-pool] load err: {e}\n")
            self.proxies = []
    def save(self):
        try:
            os.makedirs(os.path.dirname(PROXY_CONFIG_FILE), exist_ok=True)
            with open(PROXY_CONFIG_FILE, 'w') as f:
                json.dump({ 'proxies': self.proxies }, f, indent=2)
        except Exception as e:
            sys.stderr.write(f"[proxy-pool] save err: {e}\n")
    def add(self, proxy_url):
        with self.lock:
            if proxy_url not in self.proxies:
                self.proxies.append(proxy_url)
                self.save()
    def remove(self, proxy_url):
        with self.lock:
            if proxy_url in self.proxies:
                self.proxies.remove(proxy_url)
                self.bad_until.pop(proxy_url, None)
                self.save()
    def next(self):
        """Returns next healthy proxy URL, or None for direct."""
        import time
        with self.lock:
            if not self.proxies: return None
            now = time.time()
            for _ in range(len(self.proxies)):
                p = self.proxies[self.idx % len(self.proxies)]
                self.idx += 1
                if self.bad_until.get(p, 0) <= now:
                    return p
            # All proxies are flagged bad — reset and use the oldest-bad one
            oldest = min(self.proxies, key=lambda p: self.bad_until.get(p, 0))
            self.bad_until[oldest] = 0
            return oldest
    def mark_bad(self, proxy_url, cooldown_sec=300):
        import time
        if not proxy_url: return
        with self.lock:
            self.bad_until[proxy_url] = time.time() + cooldown_sec
            sys.stderr.write(f"[proxy-pool] marked bad: {proxy_url} for {cooldown_sec}s\n")
    def status(self):
        import time
        now = time.time()
        return [
            { 'url': p, 'bad_for_sec': max(0, int((self.bad_until.get(p, 0) - now))), 'healthy': self.bad_until.get(p, 0) <= now }
            for p in self.proxies
        ]


RATE_LIMITER = RateLimiter()
PROXY_POOL   = ProxyPool()


# ====================================================================
# 🗄️ WOO CACHE — server-side prefetch + throttle + persist
# ====================================================================
class WooCache:
    """Background-thread WooCommerce data cache.
    The browser fetches /cache/snapshot which is instant — never blocks on Woo.
    Uses a semaphore + retry to never exceed 2 concurrent requests, with backoff on 403.
    """
    def __init__(self):
        import threading
        self._stop = threading.Event()
        self._thread = None
        self.running = False
        # Concurrency limit so we never trip Wordfence rate-limiting again
        self._sem = threading.Semaphore(2)
        # In-memory state
        self.products = []                  # full product list
        self.orders = { 'today': [], 'week': [], 'prev_today': [] }  # period → orders[]
        self.lifetime = {}                  # product_id → { revenue, items, source, byStatus, at }
        self.last_refresh = { 'products': 0, 'today': 0, 'week': 0, 'prev_today': 0, 'lifetime_active': 0, 'lifetime_old': 0 }
        self.errors = {}                    # last error per category, cleared on success
        self._load_disk()

    # -------- disk persistence --------
    def _load_disk(self):
        try:
            with open(CACHE_FILE, 'r') as f:
                d = json.load(f)
            self.products = d.get('products', [])
            self.orders = d.get('orders', self.orders)
            self.last_refresh = d.get('last_refresh', self.last_refresh)
            self.errors = d.get('errors', {})
            sys.stderr.write(f"[woo-cache] loaded from disk · {len(self.products)} comps · {len(self.orders.get('today',[]))} today orders\n")
        except Exception:
            pass
        try:
            with open(CACHE_LIFETIME_FILE, 'r') as f:
                self.lifetime = json.load(f)
            sys.stderr.write(f"[woo-cache] lifetime cache · {len(self.lifetime)} comps\n")
        except Exception:
            pass

    def _save_disk(self):
        try:
            os.makedirs(os.path.dirname(CACHE_FILE), exist_ok=True)
            with open(CACHE_FILE, 'w') as f:
                json.dump({
                    'products': self.products,
                    'orders': self.orders,
                    'last_refresh': self.last_refresh,
                    'errors': self.errors,
                }, f)
        except Exception as e:
            sys.stderr.write(f"[woo-cache] disk save err: {e}\n")
    def _save_lifetime(self):
        try:
            os.makedirs(os.path.dirname(CACHE_LIFETIME_FILE), exist_ok=True)
            with open(CACHE_LIFETIME_FILE, 'w') as f:
                json.dump(self.lifetime, f)
        except Exception as e:
            sys.stderr.write(f"[woo-cache] lifetime disk save err: {e}\n")

    # -------- throttled, retried Woo fetch --------
    def _woo_creds(self):
        try:
            with open(WD_CFG_FILE, 'r') as f:
                c = json.load(f)
            return c.get('woo_url',''), c.get('woo_ck',''), c.get('woo_cs','')
        except Exception:
            return '', '', ''

    def _fetch(self, path, params=None, max_retries=3):
        """Throttled GET with proxy rotation + exp-backoff on 403/429/5xx.
           Pipeline:
             1. RATE_LIMITER.acquire()      — global cap (35/min default)
             2. PROXY_POOL.next()           — pick a rotating proxy, or direct
             3. urllib via opener           — make the request
             4. on 403/429/5xx → mark proxy bad, retry through next proxy
        """
        from base64 import b64encode
        import time
        url_base, ck, cs = self._woo_creds()
        if not (ck and cs):
            raise RuntimeError("woo creds missing in watchdog-config.json")
        url = url_base.rstrip('/') + path
        if params:
            qs = urllib.parse.urlencode(params)
            url += '?' + qs
        auth = b64encode(f"{ck}:{cs}".encode()).decode()
        delay = 1.0
        last_err = None
        with self._sem:                     # respect concurrency limit
            for attempt in range(max_retries):
                # 1. Wait for a rate-limit token
                RATE_LIMITER.acquire()
                # 2. Pick a proxy (or None for direct)
                proxy_url = PROXY_POOL.next()
                try:
                    req = urllib.request.Request(url, headers={
                        "Authorization": "Basic " + auth,
                        "User-Agent": "NitrousCache/1.0",
                    })
                    if proxy_url:
                        proxy_handler = urllib.request.ProxyHandler({ 'http': proxy_url, 'https': proxy_url })
                        opener = urllib.request.build_opener(proxy_handler)
                        with opener.open(req, timeout=20) as r:
                            return json.loads(r.read())
                    else:
                        with urllib.request.urlopen(req, timeout=20) as r:
                            return json.loads(r.read())
                except urllib.error.HTTPError as e:
                    last_err = f"HTTP {e.code}"
                    if e.code in (403, 429):
                        # The CURRENT proxy (or our direct IP) is being rate-limited — flag it & try another
                        if proxy_url:
                            PROXY_POOL.mark_bad(proxy_url, cooldown_sec=300)
                        time.sleep(delay)
                        delay *= 2
                        continue
                    if e.code >= 500:
                        time.sleep(delay)
                        delay *= 2
                        continue
                    raise
                except Exception as e:
                    last_err = str(e)
                    # Don't punish a proxy for a slow upstream (timeout) — only network/conn errors
                    if proxy_url and 'time' not in str(e).lower() and 'timeout' not in str(e).lower():
                        PROXY_POOL.mark_bad(proxy_url, cooldown_sec=120)
                    time.sleep(delay)
                    delay *= 2
            raise RuntimeError(last_err or "max retries")

    # -------- refreshers --------
    def _refresh_products(self):
        all_products = []
        page = 1
        while page <= 30:
            try:
                batch = self._fetch('/products', { 'per_page': 100, 'status': 'publish', 'orderby': 'date', 'order': 'desc', 'page': page })
            except Exception as e:
                self.errors['products'] = str(e)[:200]
                break
            if not batch: break
            all_products.extend(batch)
            if len(batch) < 100: break
            page += 1
        if all_products:
            self.products = all_products
            self.last_refresh['products'] = int(time.time())
            self.errors.pop('products', None)
            sys.stderr.write(f"[woo-cache] products refreshed · {len(all_products)}\n")

    def _refresh_period_orders(self, period_key, after_iso, before_iso):
        all_orders = []
        seen_ids = set()
        max_pages = 25 if period_key == 'today' else 80
        for status in ('processing', 'completed', 'on-hold'):
            page = 1
            while page <= max_pages:
                # Build params — only include `before` if non-empty (WP rejects empty value as 400)
                params = {
                    'per_page': 100, 'status': status, 'after': after_iso,
                    'orderby': 'date', 'order': 'desc', 'page': page,
                }
                if before_iso: params['before'] = before_iso
                try:
                    batch = self._fetch('/orders', params)
                except Exception as e:
                    self.errors[f'orders_{period_key}_{status}'] = str(e)[:200]
                    break
                if not batch: break
                for o in batch:
                    if o['id'] in seen_ids: continue
                    seen_ids.add(o['id'])
                    all_orders.append(o)
                if len(batch) < 100: break
                page += 1
        self.orders[period_key] = all_orders
        self.last_refresh[period_key] = int(time.time())
        # Clear errors for any status that succeeded this run (we got at least some orders)
        if all_orders:
            for st in ('processing', 'completed', 'on-hold'):
                self.errors.pop(f'orders_{period_key}_{st}', None)
        sys.stderr.write(f"[woo-cache] orders[{period_key}] refreshed · {len(all_orders)} orders\n")

    def _is_active(self, p):
        """Drawing today / drawing in next 7 days / created in last 14 days = active."""
        import re as _re
        name = (p.get('name') or '').lower()
        if 'today' in name or 'tonight' in name: return True
        try:
            created = datetime.fromisoformat((p.get('date_created') or '').replace('Z','+00:00'))
            if (datetime.now(created.tzinfo) - created).days < 14: return True
        except Exception: pass
        return False

    def _refresh_lifetime_one(self, product_id):
        all_orders = []
        page = 1
        rev = 0.0
        items = 0
        by_status = {}
        for status in ('processing', 'completed', 'on-hold'):
            page = 1
            while page <= 200:
                try:
                    batch = self._fetch('/orders', {
                        'per_page': 100, 'status': status, 'product': product_id,
                        'orderby': 'date', 'order': 'desc', 'page': page,
                    })
                except Exception as e:
                    self.errors[f'lifetime_{product_id}'] = str(e)[:200]
                    return
                if not batch: break
                by_status[status] = by_status.get(status, 0) + len(batch)
                for o in batch:
                    for li in (o.get('line_items') or []):
                        if li.get('product_id') == product_id or li.get('variation_id') == product_id:
                            try: rev += float(li.get('total') or 0)
                            except: pass
                            try: items += int(li.get('quantity') or 0)
                            except: pass
                if len(batch) < 100: break
                page += 1
        self.lifetime[str(product_id)] = {
            'revenue': rev, 'items': items, 'byStatus': by_status,
            'source': 'orders', 'at': int(time.time() * 1000),
        }

    def _refresh_lifetimes(self, only_active=False):
        if not self.products: return
        candidates = [p for p in self.products if (not only_active or self._is_active(p))]
        for p in candidates:
            if self._stop.is_set(): break
            self._refresh_lifetime_one(p['id'])
        self.last_refresh['lifetime_active' if only_active else 'lifetime_old'] = int(time.time())
        self._save_lifetime()
        sys.stderr.write(f"[woo-cache] lifetimes refreshed ({'active' if only_active else 'all'}) · {len(candidates)} comps\n")

    # -------- customer database (auto-updates as new orders land) --------
    def _load_customer_db(self):
        try:
            with open(CACHE_CUSTOMERS, 'r') as f: return json.load(f)
        except Exception: return {}
    def _save_customer_db(self, db):
        try:
            os.makedirs(os.path.dirname(CACHE_CUSTOMERS), exist_ok=True)
            with open(CACHE_CUSTOMERS, 'w') as f: json.dump(db, f)
        except Exception as e: sys.stderr.write(f"[woo-cache] customer db save err: {e}\n")
    def _extract_birthday(self, order):
        """Find a birthday-like field anywhere in the order meta. Returns 'MM-DD' or None.
           Checks billing.meta_data + order.meta_data for common keys (birthday/dob/date_of_birth/etc).
           Year is intentionally stripped — only month-day matters for birthday wishes + privacy."""
        import re
        candidates = []
        for src in (order.get('meta_data') or []):
            k = (src.get('key') or '').lower()
            if any(t in k for t in ('birthday','birth_day','dob','date_of_birth','birthdate','date-of-birth')):
                candidates.append(src.get('value'))
        b = order.get('billing') or {}
        for src in (b.get('meta_data') or []):
            k = (src.get('key') or '').lower()
            if any(t in k for t in ('birthday','birth_day','dob','date_of_birth','birthdate')):
                candidates.append(src.get('value'))
        for v in candidates:
            if not v: continue
            s = str(v).strip()
            # Try common formats: 1990-05-15, 15/05/1990, 15-05, 05/15, May 15
            m = re.search(r'(\d{1,4})[-/.\s](\d{1,2})[-/.\s](\d{1,4})', s)
            if m:
                a, b_, c = m.groups()
                # If first piece is 4-digit year → YYYY-MM-DD
                if len(a) == 4: month, day = int(b_), int(c)
                # Last piece is 4-digit year → DD/MM/YYYY (UK default)
                elif len(c) == 4: month, day = int(b_), int(a)
                # Else assume DD/MM
                else: month, day = int(b_), int(a)
                if 1 <= month <= 12 and 1 <= day <= 31:
                    return f"{month:02d}-{day:02d}"
            m2 = re.search(r'^(\d{1,2})[-/](\d{1,2})$', s)   # MM-DD or DD-MM
            if m2:
                a, b_ = int(m2.group(1)), int(m2.group(2))
                # heuristic: assume DD-MM (UK) unless first is >12
                month, day = (a, b_) if a > 12 else (b_, a)
                if 1 <= month <= 12 and 1 <= day <= 31:
                    return f"{month:02d}-{day:02d}"
        return None

    def _upsert_customer(self, db, order):
        b = order.get('billing') or {}
        email = (b.get('email') or '').strip().lower()
        if not email: return False
        rec = db.get(email) or { 'first_seen': order.get('date_created') }
        rec['name']      = (b.get('first_name','') + ' ' + b.get('last_name','')).strip()
        rec['phone']     = b.get('phone') or rec.get('phone','')
        rec['city']      = b.get('city')  or rec.get('city','')
        rec['postcode']  = b.get('postcode') or rec.get('postcode','')
        rec['country']   = b.get('country') or rec.get('country','')
        rec['orders']    = (rec.get('orders') or 0) + 1
        try:    rec['total_spent'] = round((rec.get('total_spent') or 0.0) + float(order.get('total') or 0), 2)
        except: pass
        rec['last_seen'] = order.get('date_created') or rec.get('last_seen')
        bd = self._extract_birthday(order)
        if bd: rec['birthday'] = bd               # MM-DD
        db[email] = rec
        return True
    def _process_orders_for_customers(self, period_orders):
        """Fold every order in the given list into the customer DB. Idempotent — uses order.id as a seen-set."""
        db = self._load_customer_db()
        # Track seen order IDs so re-processing the same period doesn't double-count
        seen_ids = set(db.get('_seen_order_ids', []))
        added = 0
        for o in period_orders:
            oid = str(o.get('id'))
            if not oid or oid in seen_ids: continue
            if self._upsert_customer(db, o): added += 1
            seen_ids.add(oid)
        # Cap the seen-set so it doesn't grow unbounded — only need to remember the last 50k orders
        if len(seen_ids) > 50000:
            seen_ids = set(list(seen_ids)[-50000:])
        db['_seen_order_ids'] = list(seen_ids)
        if added:
            self._save_customer_db(db)
            sys.stderr.write(f"[woo-cache] customer db: +{added} new customers (total {len([k for k in db if not k.startswith('_')])})\n")

    # -------- winner-list generation (when a comp's draw date passes) --------
    def _winner_file(self, product_id):
        return os.path.join(WINNERS_DIR, f"comp_{product_id}.json")
    def _generate_winner_list(self, product_id, product_name):
        """Walk every order for this product across all statuses + dump ticket numbers + buyer details.
           Result: searchable JSON file the user can grep for a winner."""
        os.makedirs(WINNERS_DIR, exist_ok=True)
        out_file = self._winner_file(product_id)
        # Skip if already generated AND file is recent (<24h) — re-run only if user deletes it
        if os.path.exists(out_file):
            age_h = (time.time() - os.path.getmtime(out_file)) / 3600
            if age_h < 24: return
        all_entries = []
        for status in ('processing', 'completed', 'on-hold'):
            page = 1
            while page <= 200:
                try:
                    batch = self._fetch('/orders', {
                        'per_page': 100, 'status': status, 'product': product_id,
                        'orderby': 'date', 'order': 'asc', 'page': page,
                    })
                except Exception as e:
                    sys.stderr.write(f"[winners] fetch err for {product_id}: {e}\n")
                    break
                if not batch: break
                for o in batch:
                    b = o.get('billing') or {}
                    name = (b.get('first_name','') + ' ' + b.get('last_name','')).strip()
                    tickets = 0
                    for li in (o.get('line_items') or []):
                        if li.get('product_id') == product_id or li.get('variation_id') == product_id:
                            try: tickets += int(li.get('quantity') or 0)
                            except: pass
                    all_entries.append({
                        'order_id': o.get('id'),
                        'order_number': o.get('number'),
                        'date': o.get('date_created'),
                        'name': name,
                        'email': (b.get('email') or '').lower(),
                        'phone': b.get('phone',''),
                        'postcode': b.get('postcode',''),
                        'city': b.get('city',''),
                        'tickets': tickets,
                        'total': float(o.get('total') or 0),
                        'status': o.get('status'),
                    })
                if len(batch) < 100: break
                page += 1
        # Number the tickets sequentially in order of purchase (oldest = ticket 1, etc.)
        # WooCommerce doesn't store per-ticket numbers natively, so we reconstruct: order entry → expand to N rows
        ticket_rows = []
        ticket_no = 1
        for e in all_entries:
            for _ in range(max(1, e['tickets'])):
                ticket_rows.append({ 'ticket_no': ticket_no, **e })
                ticket_no += 1
        # If the walk returned nothing (rate limit / network issues / actually empty), don't
        # persist a 0-ticket placeholder — it would block re-generation for 24h.
        if ticket_no <= 1 and not all_entries:
            sys.stderr.write(f"[winners] {product_id} returned 0 — not saving, will retry next scan\n")
            return
        result = {
            'product_id': product_id,
            'product_name': product_name,
            'generated_at': datetime.now().isoformat(),
            'total_tickets': ticket_no - 1,
            'total_orders': len(all_entries),
            'orders': all_entries,
            'tickets': ticket_rows,
        }
        try:
            with open(out_file, 'w') as f: json.dump(result, f, indent=2)
            sys.stderr.write(f"[winners] generated {os.path.basename(out_file)} · {ticket_no-1} tickets · {len(all_entries)} orders\n")
        except Exception as e:
            sys.stderr.write(f"[winners] save err for {product_id}: {e}\n")
    def _check_finished_comps_for_winners(self):
        """Scan products → if draw date is in the past (1-14 days ago) AND no winner file exists, generate one."""
        if not self.products: return
        now = datetime.now()
        for p in self.products:
            name = (p.get('name') or '')
            # Look for [Draw <Month> <day>] pattern
            import re
            m = re.search(r'(?:Draw|draw)\s+(Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\s+(\d{1,2})', name)
            if not m: continue
            month_map = {'jan':1,'feb':2,'mar':3,'apr':4,'may':5,'jun':6,'jul':7,'aug':8,'sep':9,'oct':10,'nov':11,'dec':12}
            month = month_map.get(m.group(1)[:3].lower())
            day = int(m.group(2))
            if not month: continue
            # Pick the year — past 14 days takes priority
            for year in (now.year, now.year - 1):
                try:
                    draw = datetime(year, month, day)
                    days_ago = (now - draw).days
                    if 0 <= days_ago <= 14:
                        if not os.path.exists(self._winner_file(p['id'])):
                            sys.stderr.write(f"[winners] {p['id']} '{name[:50]}' drew {days_ago}d ago — generating winner list\n")
                            self._generate_winner_list(p['id'], name)
                        break
                except ValueError: pass
    def list_winner_files(self):
        """Returns metadata about every generated winner file. Used by the browser to show a list."""
        if not os.path.isdir(WINNERS_DIR): return []
        out = []
        for fn in sorted(os.listdir(WINNERS_DIR)):
            if not fn.endswith('.json'): continue
            try:
                with open(os.path.join(WINNERS_DIR, fn), 'r') as f:
                    d = json.load(f)
                out.append({
                    'product_id': d.get('product_id'),
                    'product_name': d.get('product_name'),
                    'generated_at': d.get('generated_at'),
                    'total_tickets': d.get('total_tickets'),
                    'total_orders': d.get('total_orders'),
                    'file': fn,
                })
            except Exception: pass
        return out
    def get_winner_file(self, product_id):
        try:
            with open(self._winner_file(product_id), 'r') as f: return json.load(f)
        except Exception: return None
    def search_winner_by(self, query):
        """Search across ALL winner files for a name/email/postcode/ticket# match. Returns matching rows."""
        if not os.path.isdir(WINNERS_DIR): return []
        q = (query or '').strip().lower()
        if not q: return []
        hits = []
        for fn in os.listdir(WINNERS_DIR):
            if not fn.endswith('.json'): continue
            try:
                with open(os.path.join(WINNERS_DIR, fn), 'r') as f:
                    d = json.load(f)
                for row in (d.get('tickets') or []):
                    if (q in str(row.get('ticket_no','')) or
                        q in (row.get('name','') or '').lower() or
                        q in (row.get('email','') or '').lower() or
                        q in (row.get('postcode','') or '').lower() or
                        q in str(row.get('order_id',''))):
                        hits.append({ 'comp': d.get('product_name'), 'product_id': d.get('product_id'), **row })
                        if len(hits) >= 200: return hits
            except Exception: pass
        return hits

    # -------- thread loop --------
    def _loop(self):
        import time
        # Period boundaries (today + this week, + previous-day for comparison)
        while not self._stop.is_set():
            now = datetime.now(tz=None)
            today_start = now.replace(hour=0,minute=0,second=0,microsecond=0).strftime('%Y-%m-%dT%H:%M:%SZ')
            week_start = now.replace(hour=0,minute=0,second=0,microsecond=0)
            from datetime import timedelta
            week_start = (week_start - timedelta(days=7)).strftime('%Y-%m-%dT%H:%M:%SZ')
            prev_day_start = (now.replace(hour=0,minute=0,second=0,microsecond=0) - timedelta(days=1)).strftime('%Y-%m-%dT%H:%M:%SZ')
            prev_day_end = now.replace(hour=0,minute=0,second=0,microsecond=0).strftime('%Y-%m-%dT%H:%M:%SZ')

            t = int(time.time())
            # Each step is its own try/except + each followed by _save_disk so a slow
            # lifetime refresh can never starve the customer DB or disk persistence.
            def step(name, fn):
                try: fn()
                except Exception as e: sys.stderr.write(f"[woo-cache] {name} err: {e}\n")
                try: self._save_disk()
                except Exception as e: sys.stderr.write(f"[woo-cache] save after {name} err: {e}\n")
            # 1. products (5 min)
            if t - self.last_refresh.get('products', 0) > 300:
                step('refresh_products', self._refresh_products)
            # 2. today orders (60s) — most time-sensitive
            if t - self.last_refresh.get('today', 0) > 60:
                step('refresh_today', lambda: self._refresh_period_orders('today', today_start, ''))
            # 3. roll new orders into customer DB BEFORE the slow lifetime refresh
            if self.orders.get('today'):
                step('customers_today', lambda: self._process_orders_for_customers(self.orders['today']))
            # 3b. refresh the in-memory winnings index so newly-arrived orders show in the customer panel
            try:
                if 'BULK_DB' in globals() and BULK_DB._loaded: BULK_DB.reload_winnings()
            except Exception as e: sys.stderr.write(f"[woo-cache] reload_winnings err: {e}\n")
            # 4. week + prev_today (5 min)
            if t - self.last_refresh.get('week', 0) > 300:
                step('refresh_week', lambda: self._refresh_period_orders('week', week_start, ''))
            if t - self.last_refresh.get('prev_today', 0) > 300:
                step('refresh_prev_today', lambda: self._refresh_period_orders('prev_today', prev_day_start, prev_day_end))
            if self.orders.get('week'):
                step('customers_week', lambda: self._process_orders_for_customers(self.orders['week']))
            # 5. winners scan (1h) — quick, do before slow lifetime walk
            if t - self.last_refresh.get('winners_scan', 0) > 3600:
                step('winners_scan', self._check_finished_comps_for_winners)
                self.last_refresh['winners_scan'] = t
            # 6. lifetimes — SLOWEST, last (rate-limited so this can take many minutes)
            if t - self.last_refresh.get('lifetime_active', 0) > 60:
                step('lifetime_active', lambda: self._refresh_lifetimes(only_active=True))
            if t - self.last_refresh.get('lifetime_old', 0) > 3600:
                step('lifetime_old', lambda: self._refresh_lifetimes(only_active=False))
            # Sleep ~30s between checks (lightweight — most ticks just check timestamps)
            self._stop.wait(30)

    def start(self):
        import threading, time as _time
        if self.running: return
        self._stop.clear()
        self._thread = threading.Thread(target=self._loop, daemon=True)
        self._thread.start()
        self.running = True
        sys.stderr.write("[woo-cache] daemon started\n")

    def stop(self):
        self._stop.set()
        self.running = False
        sys.stderr.write("[woo-cache] daemon stopping\n")

    # -------- view (used by /cache/* endpoints) --------
    def snapshot(self):
        import time
        now = int(time.time())
        return {
            'products':       self.products,
            'orders':         self.orders,
            'lifetime':       self.lifetime,
            'last_refresh':   self.last_refresh,
            'errors':         self.errors,
            'age_sec': {
                k: (now - v) if v else None for k, v in self.last_refresh.items()
            },
            't': now,
            'count': {
                'products':   len(self.products),
                'today':      len(self.orders.get('today', [])),
                'week':       len(self.orders.get('week', [])),
                'prev_today': len(self.orders.get('prev_today', [])),
                'lifetime':   len(self.lifetime),
            },
            'rate_limiter':   RATE_LIMITER.stats(),
            'proxies':        PROXY_POOL.status(),
        }


WOO_CACHE = WooCache()
import time   # used by WooCache._loop


# ====================================================================
# 🩺 SELF-HEALING SUPERVISOR
# Monitors the WooCache thread, the watchdog daemon, and the proxy pool.
# If the WooCache stops ticking (no orders refresh in > 5min) the
# supervisor force-restarts the daemon. If every proxy is unhealthy,
# the supervisor logs a warning to the supervisor history. If the
# WhatsApp watchdog falls over, it's restarted (only if it was meant
# to be running per its config). Heartbeat is exposed at /health.
# ====================================================================
class SelfHealingSupervisor:
    HEARTBEAT_FILE = os.path.join(DATA_ROOT, "mc-server-heartbeat.json")
    STALE_THRESHOLD = 300   # 5 min — if today-orders haven't refreshed in this long, restart WooCache loop
    INTERVAL = 60           # check every 60s

    def __init__(self):
        import threading
        self._stop = threading.Event()
        self.history = []   # ring buffer of last 50 incidents
        self.started_at = int(time.time())
        self.last_heal_t = 0
        self.heal_count = 0

    def start(self):
        import threading
        t = threading.Thread(target=self._loop, daemon=True, name="self-healing")
        t.start()
        sys.stderr.write("[supervisor] self-healing online · 60s tick\n")

    def stop(self): self._stop.set()

    def status(self):
        now = int(time.time())
        return {
            "uptime_sec":    now - self.started_at,
            "heal_count":    self.heal_count,
            "last_heal_t":   self.last_heal_t,
            "last_heal_ago": (now - self.last_heal_t) if self.last_heal_t else None,
            "incidents":     self.history[-20:],
            "checks": self._check_all(),
        }

    def _record(self, kind, detail):
        ev = { "t": int(time.time()), "kind": kind, "detail": detail }
        self.history.append(ev)
        self.history = self.history[-50:]
        sys.stderr.write(f"[supervisor] {kind}: {detail}\n")

    def _check_all(self):
        """Read-only checks — no mutation. Returns a dict the /health endpoint can serialise."""
        import time as _t
        now = int(_t.time())
        woo_age = (now - WOO_CACHE.last_refresh.get('today', 0)) if WOO_CACHE.last_refresh.get('today') else None
        woo_alive = bool(getattr(WOO_CACHE, '_thread', None) and WOO_CACHE._thread.is_alive())
        proxies = PROXY_POOL.status()
        good_proxies = sum(1 for p in proxies if p.get('healthy'))
        wd_alive = bool(getattr(WD_DAEMON, '_thread', None) and WD_DAEMON._thread.is_alive())
        wd_should_run = False
        try:
            with open(WD_CFG_FILE, 'r') as f:
                wd_should_run = bool(json.load(f).get('enabled'))
        except Exception: pass
        return {
            "woo_cache":     {"alive": woo_alive, "today_age_sec": woo_age, "stale": woo_age is not None and woo_age > self.STALE_THRESHOLD},
            "proxy_pool":    {"total": len(proxies), "healthy": good_proxies, "all_dead": len(proxies) > 0 and good_proxies == 0},
            "watchdog":      {"alive": wd_alive, "should_run": wd_should_run, "needs_restart": wd_should_run and not wd_alive},
            "customer_db":   {"loaded": getattr(globals().get('BULK_DB', None), '_loaded', False), "count": len(getattr(globals().get('BULK_DB', None), 'records', []) or [])},
        }

    def _heal(self):
        """Single mutation pass — restart anything that's down."""
        import time as _t
        ch = self._check_all()
        healed_anything = False

        # Heal: WooCache thread is dead
        if not ch['woo_cache']['alive']:
            try:
                WOO_CACHE.start()
                self._record('heal', 'WooCache thread restarted')
                healed_anything = True
            except Exception as e:
                self._record('heal-fail', f'WooCache restart err: {e}')

        # Heal: today-orders age too stale (likely thread alive but stuck)
        elif ch['woo_cache']['stale']:
            try:
                # Mark caches stale to force a refresh on next tick
                WOO_CACHE.last_refresh = {k: 0 for k in WOO_CACHE.last_refresh}
                self._record('heal', f"WooCache stale {ch['woo_cache']['today_age_sec']}s — forced refresh")
                healed_anything = True
            except Exception as e:
                self._record('heal-fail', f'WooCache refresh err: {e}')

        # Heal: proxy pool fully dead (just log — can't auto-source new proxies safely)
        if ch['proxy_pool']['all_dead']:
            # Reset bad-until timers so the pool retries — one of them might be transient
            try:
                with PROXY_POOL.lock:
                    PROXY_POOL.bad_until = {}
                self._record('heal', 'All proxies bad — reset cooldowns to retry')
                healed_anything = True
            except Exception as e:
                self._record('heal-fail', f'proxy reset err: {e}')

        # Heal: watchdog daemon should be running but isn't
        if ch['watchdog']['needs_restart']:
            try:
                WD_DAEMON.start()
                self._record('heal', 'Watchdog daemon restarted')
                healed_anything = True
            except Exception as e:
                self._record('heal-fail', f'watchdog restart err: {e}')

        if healed_anything:
            self.heal_count += 1
            self.last_heal_t = int(_t.time())

    def _heartbeat(self):
        try:
            with open(self.HEARTBEAT_FILE, 'w') as f:
                json.dump({
                    "t": int(time.time()),
                    "uptime_sec": int(time.time()) - self.started_at,
                    "checks": self._check_all(),
                    "heal_count": self.heal_count,
                }, f, indent=2)
        except Exception as e:
            sys.stderr.write(f"[supervisor] heartbeat write err: {e}\n")

    def _loop(self):
        import time as _t
        # Wait a bit so other daemons have a chance to come up before first heal pass
        _t.sleep(15)
        while not self._stop.is_set():
            try:
                self._heal()
                self._heartbeat()
            except Exception as e:
                sys.stderr.write(f"[supervisor] tick err: {e}\n")
            _t.sleep(self.INTERVAL)


SUPERVISOR = SelfHealingSupervisor()


def main():
    server = ThreadingHTTPServer((HOST, PORT), Handler)
    # Auto-start the watchdog if config says enabled
    try:
        with open(WD_CFG_FILE, 'r') as f:
            if json.load(f).get('enabled'):
                WD_DAEMON.start()
    except Exception:
        pass
    # Start the Woo cache daemon — auto-fetches all WooCommerce data in the background
    WOO_CACHE.start()
    # Start the self-healing supervisor — monitors and auto-restarts failing subsystems
    SUPERVISOR.start()
    sys.stderr.write(f"Mission Control: http://{HOST}:{PORT}\n")
    sys.stderr.write(f"  static:        http://{HOST}:{PORT}/Mission-Control.html\n")
    sys.stderr.write(f"  klaviyo proxy: http://{HOST}:{PORT}{KLAVIYO_PREFIX}/api/...  -> {KLAVIYO_HOST}\n")
    sys.stderr.write(f"  aws gateway:   http://{HOST}:{PORT}{AWS_PREFIX}/{{ses,sns,sts}}/{{operation}}  (boto3, creds in headers)\n")
    sys.stderr.write(f"  site mirror:   http://{HOST}:{PORT}{SITE_PREFIX}/...                     -> {SITE_HOST} (X-Frame-Options stripped)\n")
    sys.stderr.write(f"  GA4 Realtime:  http://{HOST}:{PORT}{GA_PREFIX}/realtime/{{propertyId}}      (service-account JSON in X-GA-Service-Account header)\n")
    sys.stderr.write(f"  WhatsApp:      http://{HOST}:{PORT}{WA_PREFIX}/{{webhook,inbox,send,auto-reply,clear}}\n")
    sys.stderr.write(f"  Watchdog:      http://{HOST}:{PORT}{WD_PREFIX}/{{configure,status,start,stop,run-now,history}}  (background daemon · auto-pings WA)\n")
    sys.stderr.write(f"  WooCache:      http://{HOST}:{PORT}{CACHE_PREFIX}/{{snapshot,products,orders,lifetime,status,refresh}}  (server-side prefetch · zero browser-direct Woo)\n")
    sys.stderr.write("Ctrl-C to stop.\n")

    def stop(*_):
        sys.stderr.write("\nstopping mc-server\n")
        server.shutdown()
    signal.signal(signal.SIGINT, stop)
    signal.signal(signal.SIGTERM, stop)
    server.serve_forever()


if __name__ == "__main__":
    main()
