import logging
import re
import threading
import time
import os
import http.server
import socketserver
from datetime import datetime, timedelta
from telegram import Update
from telegram.ext import Application, MessageHandler, filters, ContextTypes
import gspread
from google.oauth2.service_account import Credentials

# ── CONFIGURACION ──────────────────────────────────────────
TELEGRAM_TOKEN     = "8706444113:AAF_j10RvQSc8XFr0o9TjGeDxabfjd8ZNWE"
SPREADSHEET_ID     = "134NWJuOOeZBb81nnOx8Muwc1jvRJz7RK-I2XzQEb0PQ"
PUBLICIDAD_SHEET_ID = "1nsn8DFcYyssKhfEhFrQoYpXsbvnLArWdU4iqcTT3P9g"  # Sheet historial publicidad
ALFREDO_TELEGRAM_ID = 8696668999
JACIEL_TELEGRAM_ID  = 1865088361  # Compras — supervisor de todas las áreas
CONSECUTIVO_MINIMO  = 1160        # Solo alertas para pedidos >= este consecutivo

EMPLEADOS = {
    "8696668999": ["Captura", "Compras", "Corte", "Costura", "Empaque", "Logistica"],
    "8601602361": ["Costura"],
    "7229360395": ["Costura"],
    "1628645940": ["Costura"],
    "6791755744": ["Empaque"],
    "1865088361": ["Compras"],
    "8307673268": ["Corte"],
    "8782057464": ["Captura"],       # Lupita — solo Captura
    "6960600670": ["Logistica"],     # Nataly — Logística
}

# IDs de Telegram por área (para enviar alertas)
TELEGRAM_IDS_POR_AREA = {
    "Captura":   [8782057464],            # Lupita
    "Compras":   [1865088361],            # Jaciel
    "Corte":     [8307673268],            # Sergio
    "Costura":   [1865088361],            # Jaciel recibe alertas de Costura
    "Empaque":   [6791755744],            # Luis
    "Logistica": [6960600670],            # Nataly
}

FLUJO_AREAS = ["Captura", "Compras", "Corte", "Costura", "Empaque", "Logistica"]

COLUMNA_CONSECUTIVO = 0
COLUMNA_CANCELADO   = 19   # Col T base 0
COLUMNA_FECHA_PED   = 5    # Col F base 0 — Fecha Pedido
COLUMNA_FECHA_ENT   = 6    # Col G base 0 — Fecha Entrega
COLUMNA_CANAL       = 7    # Col H base 0 — Canal
COLUMNA_ARTICULO    = 4    # Col E base 0 — Nombre artículo
COLUMNA_UTILIDAD    = 17   # Col R base 0

COLUMNAS_AREA = {
    "Captura":   {"status": 23, "fecha": 24},
    "Compras":   {"status": 25, "fecha": 26},
    "Corte":     {"status": 27, "fecha": 28},
    "Costura":   {"status": 29, "fecha": 30},
    "Empaque":   {"status": 31, "fecha": 32},
    "Logistica": {"status": 33, "fecha": 34},
}

VALORES_CANCELADO = ["SI", "SÍ", "S", "CANCELADO", "CANCEL"]

# Horarios de alerta (hora, minuto) en hora México
HORARIOS_ALERTA = [(9, 0), (13, 0), (18, 30)]

logging.basicConfig(
    format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
    level=logging.INFO
)
logger = logging.getLogger(__name__)

GOOGLE_CREDENTIALS = {
  "type": "service_account",
  "project_id": "deep-wares-248517",
  "private_key_id": "5b11eb0f2ce05232bb38d3a17471c565a255283b",
  "private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQD1ek+vY228B5Ft\nE4rH7g4ujwQu1nN2ZdWPU7vnKVxz7WqVE3UqNG46ROojDqaWLjexJPbzUk/RuL2h\nAI7+VlFen1M3rS/hxP/1m4fs7Nxh2bBHPovJn9iAUf+VOOHSj5b7/pyMfGI/x6i8\nt3jc8rp+v95jxLXUZ3QWG6v+8kzDl1C/7bqwkUiR8MdWQzzPLToS8MRbml8blB7M\n2kz2cVSriaZRPFg+KX/LphHXoYHDEOW0p3aJC5GC+JGDNlvo7degA6bPWrgWU+I0\nbNzq7VouYXQtpOcOZntnlY1rU/6jrzoAWViny5+xA4Ub+/YygKE7P2z/ZluOLJuZ\nZRNg0aqbAgMBAAECggEAAOacgT/DqNN8SKKwOTCZ87jJoqRd0xrnkDmV8HC56qD9\n6aMjJG8B0ovrMgq9OMLdqHRCxbzRFytu4rbeT3egn9DE3OjxSopPCCc0df2eh2tl\nlrVXjPtdr4bdgb4uMfgd7TnPeEKCl8pTR7ICqwjm/00T81jG9AsXJXefh3V7K6KS\nLbID+Lo5yIwQgiP0OvGr8NyaocMdtKK2s8oSTl0StWxWmCc7TQRizYSFYcDL3V75\nXBIfspGXNndrEgJNLtkM+jqHI0okBRvRsHsSL9jg2oOcDoudas7af20Tg4ZXb0+M\nncVh4HAT2Vb4hxuN94VRGabBHEU+CtryZEBJ9amN7QKBgQD/b4IUWdptFG4fSUWg\n46RSm5tvvC6txU7XKkSPS4RmQL99d9kouVBIbAELXeTKpWMgXG5Fpwd9T9ACvlao\naBelERQFo/JjJ+E5VMV3EeRJ3izLmn0smzzY3DxEAFSS8PffB0qLBTHMUdIJh651\n7GvTFbwLVJbmbKopAwqszevgjwKBgQD2BSua50TzJmMyNgMRpnM0mmmP15KUVDpG\nkUMk/QRFtLgLlfrrIR+pX0zIchiqKhdwJBU/DEmALjqdN3BDC3+hfvawzGAE1RKK\nv/m21T2KTqXyY5q6iCnXeBwlHoLsVZgIIxrjHZfseXbQHpfu5XJZF0xIoFKVfiDJ\nqVIxZz2DNQKBgQDifP78CIfyfFEAkLmaZP6I+MykEFnbXr3/bs0HudAV0od4OtJk\nl5wl+w4aBBEf1fHE+fAU9ntfk2s/Y4H3JQUCKdh0/9+mJ/A+I3pLj6O6E7PofH1N\nFgCn0O/m49wZxmwiZ5ajGeID5vCz3ojEqvMYYYcxDLoYanWek8HdY2Eo5QKBgDyR\nw02M2L3gWpW3FIRY7Y4q4ghm85CPOBW9l8FYhZ+/wXwgvbj727awXtAOvyHN4R1b\n1NtD5CipuQaR6pKnFxIklwmqF425A6qjXxBzaYZOnK6TdTmtYlfJG2zMGUGkRRzj\nv6/gzlGLCgRtM8y+Mqvaf4514j8p4Vqk1DaTi2JtAoGAU/v9qGeQt6ZO8k9MBFtJ\npvNw62Ks6OfaPiftF1UD1wTY0EYeZ+TQAwRM781lcJAnHP13a0l7m+0s/XBx0yMB\n26Otv5r2lAPOWjh53nV4VtP7N0W4d+3dJrzC8rW7NYcvaqxkzxWR8Ed4wcdasu2O\n+JWaXeUPVonjYuhh77mEgvY=\n-----END PRIVATE KEY-----\n",
  "client_email": "puffbot@deep-wares-248517.iam.gserviceaccount.com",
  "client_id": "110159887510256792779",
  "auth_uri": "https://accounts.google.com/o/oauth2/auth",
  "token_uri": "https://oauth2.googleapis.com/token",
  "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
  "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/puffbot%40deep-wares-248517.iam.gserviceaccount.com",
  "universe_domain": "googleapis.com"
}

MESES = {
    1: "Enero", 2: "Febrero", 3: "Marzo", 4: "Abril",
    5: "Mayo", 6: "Junio", 7: "Julio", 8: "Agosto",
    9: "Septiembre", 10: "Octubre", 11: "Noviembre", 12: "Diciembre"
}

# ── GOOGLE SHEETS ──────────────────────────────────────────
def get_client():
    scopes = [
        "https://www.googleapis.com/auth/spreadsheets",
        "https://www.googleapis.com/auth/drive"
    ]
    creds = Credentials.from_service_account_info(GOOGLE_CREDENTIALS, scopes=scopes)
    client = gspread.authorize(creds)
    return client.open_by_key(SPREADSHEET_ID)

def buscar_en_todas_las_pestanas(consecutivo):
    spreadsheet = get_client()
    ahora = datetime.now()
    for i in range(7):
        mes = ahora.month - i
        anio = ahora.year
        if mes <= 0:
            mes += 12
            anio -= 1
        nombre = f"{MESES[mes]} {anio}"
        try:
            sheet = spreadsheet.worksheet(nombre)
            fila = buscar_pedido(sheet, consecutivo)
            if fila:
                return sheet, fila, nombre
        except:
            continue
    return None, None, None

def buscar_pedido(sheet, consecutivo):
    col_a = sheet.col_values(1)
    for i, val in enumerate(col_a):
        if str(val).strip() == str(consecutivo).strip():
            return i + 1
    return None

def esta_cancelado(sheet, fila):
    valor = sheet.cell(fila, COLUMNA_CANCELADO + 1).value
    return valor and str(valor).strip().upper() in VALORES_CANCELADO

def actualizar_estado(sheet, fila, area, nuevo_status):
    cols = COLUMNAS_AREA[area]
    fecha_str = datetime.now().strftime("%d/%m/%Y %H:%M")
    sheet.update_cell(fila, cols["status"] + 1, nuevo_status)
    sheet.update_cell(fila, cols["fecha"] + 1, fecha_str)

# ── LÓGICA DE ALERTAS ──────────────────────────────────────

def parsear_fecha(fecha_str):
    """Parsea dd/mm/yy o dd/mm/yyyy"""
    if not fecha_str:
        return None
    fecha_str = str(fecha_str).strip()
    for fmt in ("%d/%m/%y", "%d/%m/%Y", "%d-%m-%Y", "%d-%m-%y"):
        try:
            return datetime.strptime(fecha_str, fmt)
        except:
            continue
    return None

def determinar_area_actual(row):
    """
    Determina el área que actualmente tiene el pedido.
    Regla: primera área en el flujo que esté vacía.
    Si un área posterior tiene Listo pero una anterior está vacía → alerta a la anterior.
    """
    statuses = {}
    for area in FLUJO_AREAS:
        col = COLUMNAS_AREA[area]["status"]
        val = str(row[col]).strip().upper() if col < len(row) else ""
        statuses[area] = val

    # Buscar primera área vacía
    for area in FLUJO_AREAS:
        if statuses[area] not in ["LISTO", "EN PROCESO"]:
            return area, False  # (área_actual, es_saltada)

    return None, False  # Todas completas

def detectar_areas_saltadas(row):
    """
    Detecta áreas que están vacías pero áreas posteriores ya tienen Listo.
    Retorna lista de (area_saltada, area_posterior_con_listo)
    """
    saltadas = []
    statuses = {}
    for area in FLUJO_AREAS:
        col = COLUMNAS_AREA[area]["status"]
        val = str(row[col]).strip().upper() if col < len(row) else ""
        statuses[area] = val

    for i, area in enumerate(FLUJO_AREAS):
        if statuses[area] not in ["LISTO", "EN PROCESO"]:
            # Buscar si alguna área posterior tiene Listo
            for area_posterior in FLUJO_AREAS[i+1:]:
                if statuses[area_posterior] == "LISTO":
                    saltadas.append((area, area_posterior))
                    break
    return saltadas

def es_meli_full(row):
    canal = str(row[COLUMNA_CANAL]).strip().upper() if COLUMNA_CANAL < len(row) else ""
    return "FULL" in canal

def revisar_pedidos_y_alertar(bot_app):
    """Función principal que revisa todos los pedidos y envía alertas."""
    import asyncio

    async def enviar(chat_id, mensaje):
        try:
            await bot_app.bot.send_message(chat_id=chat_id, text=mensaje, parse_mode="HTML")
        except Exception as e:
            logger.error(f"Error enviando mensaje a {chat_id}: {e}")

    def send(chat_id, mensaje):
        asyncio.run_coroutine_threadsafe(
            enviar(chat_id, mensaje),
            bot_app.update_queue._loop if hasattr(bot_app.update_queue, '_loop') else asyncio.get_event_loop()
        )

    try:
        logger.info("Iniciando revisión de pedidos...")
        spreadsheet = get_client()
        ahora = datetime.now()
        nombre_mes = f"{MESES[ahora.month]} {ahora.year}"

        try:
            sheet = spreadsheet.worksheet(nombre_mes)
            data  = sheet.get_all_values()
        except Exception as e:
            logger.error(f"No se pudo leer el Sheet: {e}")
            return

        alertas_tiempo  = {}  # area -> [(consecutivo, articulo, fecha_ent, horas_restantes)]
        alertas_saltada = {}  # area_saltada -> [(consecutivo, articulo, area_posterior)]

        for i, row in enumerate(data[1:], start=2):
            if not row or not row[0]: continue

            # Filtro consecutivo mínimo
            try:
                consec = int(float(str(row[0]).strip()))
            except:
                continue
            if consec < CONSECUTIVO_MINIMO:
                continue

            # Ignorar cancelados
            cancelado = str(row[COLUMNA_CANCELADO]).strip().upper() if COLUMNA_CANCELADO < len(row) else ""
            if cancelado in VALORES_CANCELADO:
                continue

            # Ignorar MELI FULL
            if es_meli_full(row):
                continue

            articulo  = str(row[COLUMNA_ARTICULO]).strip() if COLUMNA_ARTICULO < len(row) else f"#{consec}"
            fecha_ent = parsear_fecha(row[COLUMNA_FECHA_ENT] if COLUMNA_FECHA_ENT < len(row) else "")

            if not fecha_ent:
                continue

            # ── TIPO 1: Alertas de tiempo ──
            horas_restantes = (fecha_ent - ahora).total_seconds() / 3600
            area_actual, _ = determinar_area_actual(row)

            if area_actual:  # pedido no completado
                nivel = None
                if horas_restantes < 0:
                    nivel = "critico"
                elif horas_restantes <= 24:
                    nivel = "rojo"
                elif horas_restantes <= 48:
                    nivel = "amarillo"

                if nivel:
                    if area_actual not in alertas_tiempo:
                        alertas_tiempo[area_actual] = []
                    alertas_tiempo[area_actual].append({
                        "consec": consec,
                        "articulo": articulo,
                        "fecha_ent": fecha_ent,
                        "horas": horas_restantes,
                        "nivel": nivel
                    })

            # ── TIPO 2: Alertas de área saltada ──
            saltadas = detectar_areas_saltadas(row)
            for area_saltada, area_posterior in saltadas:
                if area_saltada not in alertas_saltada:
                    alertas_saltada[area_saltada] = []
                alertas_saltada[area_saltada].append({
                    "consec": consec,
                    "articulo": articulo,
                    "area_posterior": area_posterior
                })

        # ── ENVIAR ALERTAS DE TIEMPO ──
        if alertas_tiempo:
            # Resumen para admin y Jaciel
            resumen_admin = f"📊 <b>RESUMEN ALERTAS — {ahora.strftime('%d/%m/%Y %H:%M')}</b>\n\n"
            for area, pedidos in alertas_tiempo.items():
                criticos  = [p for p in pedidos if p["nivel"] == "critico"]
                rojos     = [p for p in pedidos if p["nivel"] == "rojo"]
                amarillos = [p for p in pedidos if p["nivel"] == "amarillo"]
                resumen_admin += f"<b>📍 {area}</b> — {len(pedidos)} pedido(s)\n"
                if criticos:  resumen_admin += f"  💥 Vencidos: {len(criticos)}\n"
                if rojos:     resumen_admin += f"  🔴 &lt;24hrs: {len(rojos)}\n"
                if amarillos: resumen_admin += f"  ⚠️ &lt;48hrs: {len(amarillos)}\n"
                resumen_admin += "\n"

            send(ALFREDO_TELEGRAM_ID, resumen_admin)
            send(JACIEL_TELEGRAM_ID, resumen_admin)

            # Alertas por área
            for area, pedidos in alertas_tiempo.items():
                ids_area = TELEGRAM_IDS_POR_AREA.get(area, [])
                if not ids_area:
                    continue

                # Agrupar por nivel
                for nivel, emoji, titulo in [
                    ("critico", "💥", "VENCIDO — URGENTE"),
                    ("rojo",    "🔴", "MENOS DE 24 HRS"),
                    ("amarillo","⚠️", "MENOS DE 48 HRS"),
                ]:
                    grupo = [p for p in pedidos if p["nivel"] == nivel]
                    if not grupo:
                        continue

                    msg = f"{emoji} <b>{titulo}</b>\n"
                    msg += f"📍 Área: <b>{area}</b>\n"
                    msg += f"🕐 {ahora.strftime('%d/%m/%Y %H:%M')}\n\n"
                    for p in grupo:
                        hrs = p["horas"]
                        if hrs < 0:
                            tiempo_str = f"Venció hace {abs(int(hrs))}h"
                        else:
                            tiempo_str = f"{int(hrs)}h restantes"
                        msg += f"• <b>#{p['consec']}</b> — {p['articulo'][:40]}\n"
                        msg += f"  📅 Entrega: {p['fecha_ent'].strftime('%d/%m/%Y')} ({tiempo_str})\n"

                    for uid in ids_area:
                        if uid not in [ALFREDO_TELEGRAM_ID, JACIEL_TELEGRAM_ID]:
                            send(uid, msg)

        # ── ENVIAR ALERTAS DE ÁREA SALTADA ──
        if alertas_saltada:
            for area_saltada, pedidos in alertas_saltada.items():
                ids_area = TELEGRAM_IDS_POR_AREA.get(area_saltada, [])
                msg = f"🔔 <b>RECORDATORIO — {area_saltada}</b>\n\n"
                msg += "Los siguientes pedidos parecen no haber sido marcados como Listo en tu área,\n"
                msg += "pero un área posterior ya los tiene completados:\n\n"
                for p in pedidos:
                    msg += f"• <b>#{p['consec']}</b> — {p['articulo'][:40]}\n"
                    msg += f"  ✅ {p['area_posterior']} ya lo tiene Listo\n"
                msg += "\n¿Olvidaste marcarlo? Escríbeme: <code>{consecutivo} listo</code>"

                for uid in ids_area:
                    send(uid, msg)

                # También al admin y Jaciel
                send(ALFREDO_TELEGRAM_ID, msg)
                if area_saltada != "Compras":
                    send(JACIEL_TELEGRAM_ID, msg)

        total = sum(len(v) for v in alertas_tiempo.values()) + sum(len(v) for v in alertas_saltada.values())
        logger.info(f"Revisión completada. {total} alertas enviadas.")

        # Alertas 9am: sin guia y sin paqueteria
        if ahora.hour == 9:
            NATALY_ID      = 6960600670
            sin_guia       = []
            sin_paqueteria = []
            for i, row in enumerate(data[1:], start=2):
                if not row or not row[0]: continue
                try:
                    consec = int(float(str(row[0]).strip()))
                except:
                    continue
                if consec < CONSECUTIVO_MINIMO: continue
                cancelado = str(row[19]).strip().upper() if 19 < len(row) else ""
                if cancelado in VALORES_CANCELADO: continue
                logistica_st = str(row[33]).strip().upper() if 33 < len(row) else ""
                guia         = str(row[9]).strip() if 9 < len(row) else ""
                paqueteria   = str(row[8]).strip() if 8 < len(row) else ""
                articulo     = str(row[4]).strip() if 4 < len(row) else f"#{consec}"
                guia_vacia = not guia or guia.lower() in [
                    "no encontrado", "pendiente", "no encontrado o pendiente", ""]
                if logistica_st == "LISTO" and guia_vacia:
                    sin_guia.append({"consec": consec, "articulo": articulo})
                if not guia_vacia and not paqueteria:
                    sin_paqueteria.append({"consec": consec, "articulo": articulo, "guia": guia})
            if sin_guia:
                msg = f"\U0001f4e6 <b>PEDIDOS SIN GU\u00cdA</b> \u2014 {len(sin_guia)} pedido(s)\n\n"
                msg += "Log\u00edstica Listo pero sin n\u00famero de gu\u00eda:\n\n"
                for p in sin_guia:
                    msg += f"  \u2022 <b>#{p['consec']}</b> \u2014 {p['articulo'][:40]}\n"
                msg += f"\n\U0001f550 {ahora.strftime('%d/%m/%Y %H:%M')}"
                for uid in [NATALY_ID, ALFREDO_TELEGRAM_ID]:
                    send(uid, msg)
            if sin_paqueteria:
                msg = f"\U0001f69a <b>PEDIDOS SIN PAQUETER\u00cdA</b> \u2014 {len(sin_paqueteria)} pedido(s)\n\n"
                msg += "Tienen gu\u00eda pero falta capturar paqueter\u00eda (col I):\n\n"
                for p in sin_paqueteria:
                    msg += f"  \u2022 <b>#{p['consec']}</b> \u2014 {p['articulo'][:40]}\n"
                    msg += f"    Gu\u00eda: {p['guia'][:30]}\n"
                msg += f"\n\U0001f550 {ahora.strftime('%d/%m/%Y %H:%M')}"
                for uid in [NATALY_ID, ALFREDO_TELEGRAM_ID]:
                    send(uid, msg)

    except Exception as e:
        logger.error(f"Error en revisi\u00f3n de pedidos: {e}")

# ── SCHEDULER DE ALERTAS ───────────────────────────────────

def iniciar_scheduler_alertas(bot_app):
    """Corre la revisión en los horarios definidos (9am, 1pm, 6:30pm hora México)"""
    def loop():
        time.sleep(10)  # Esperar que el bot arranque
        ultima_ejecucion = {}

        while True:
            ahora = datetime.now()
            hora_actual = (ahora.hour, ahora.minute)

            for horario in HORARIOS_ALERTA:
                clave = f"{horario[0]}:{horario[1]:02d}-{ahora.date()}"
                # Ejecutar si estamos en el minuto exacto y no se ha corrido hoy
                if (ahora.hour == horario[0] and
                    ahora.minute == horario[1] and
                    clave not in ultima_ejecucion):
                    logger.info(f"Ejecutando revisión programada {horario[0]}:{horario[1]:02d}")
                    ultima_ejecucion[clave] = True
                    revisar_pedidos_y_alertar(bot_app)

            time.sleep(30)  # Revisar cada 30 segundos

    hilo = threading.Thread(target=loop, daemon=True)
    hilo.start()
    logger.info("Scheduler de alertas iniciado — horarios: 9:00, 13:00, 18:30")

# ── MONITOR DE UTILIDADES ─────────────────────────────────
_alertados = set()

def revisar_utilidades_negativas(bot_app):
    import asyncio
    try:
        ahora = datetime.now()
        nombre_pestana = f"{MESES[ahora.month]} {ahora.year}"
        spreadsheet = get_client()
        sheet = spreadsheet.worksheet(nombre_pestana)
        utilidades   = sheet.col_values(COLUMNA_UTILIDAD + 1)
        consecutivos = sheet.col_values(COLUMNA_CONSECUTIVO + 1)
        ventas_neto  = sheet.col_values(17)  # Col Q base 0 = 16, gspread base 1 = 17
        nuevas_alertas = []
        for i, utilidad in enumerate(utilidades[1:], start=2):
            if not utilidad: continue
            # No alertar si venta neto está vacía (pedido sin datos completos)
            venta = ventas_neto[i-1] if (i-1) < len(ventas_neto) else ""
            if not venta or str(venta).strip() in ("", "0", "$0", "$0.00"): continue
            try:
                valor_num = float(str(utilidad).replace("$","").replace(",","").replace(" ",""))
                if valor_num < 0:
                    consecutivo = consecutivos[i-1] if (i-1) < len(consecutivos) else f"fila {i}"
                    clave = f"{nombre_pestana}-{consecutivo}"
                    if clave not in _alertados:
                        nuevas_alertas.append((consecutivo, valor_num))
                        _alertados.add(clave)
            except: continue

        if nuevas_alertas:
            lineas = "\n".join(f"  • #{c}: ${v:,.2f}" for c, v in nuevas_alertas)
            mensaje = f"💸 <b>ALERTA — UTILIDAD NEGATIVA</b>\n\n{lineas}\n\n🕐 {ahora.strftime('%d/%m/%Y %H:%M')}"
            async def enviar():
                for chat_id in [ALFREDO_TELEGRAM_ID, 1535805840]:  # Alfredo + Carlos
                    try:
                        await bot_app.bot.send_message(chat_id=chat_id, text=mensaje, parse_mode="HTML")
                    except Exception as ex:
                        logger.error(f"Error alerta utilidad a {chat_id}: {ex}")
            asyncio.run_coroutine_threadsafe(
                enviar(),
                bot_app.update_queue._loop if hasattr(bot_app.update_queue, '_loop') else asyncio.get_event_loop()
            )
    except Exception as e:
        logger.error(f"Error revisando utilidades: {e}")

def iniciar_loop_utilidades(bot_app):
    def loop():
        time.sleep(60)
        ultima_ejecucion_util = {}
        while True:
            ahora = datetime.now()
            clave_dia = ahora.strftime('%Y-%m-%d')
            # Solo ejecutar una vez al dia a las 10am
            if ahora.hour == 10 and clave_dia not in ultima_ejecucion_util:
                ultima_ejecucion_util[clave_dia] = True
                revisar_utilidades_negativas(bot_app)
            time.sleep(60)
    threading.Thread(target=loop, daemon=True).start()

# IDs ya alertados por pedido no capturado — incluye fecha para resetear diario
_pedidos_alertados = set()
_pedidos_alertados_fecha = ""

def revisar_pedidos_no_capturados(bot_app):
    import asyncio, re
    from datetime import datetime, timedelta
    global _pedidos_alertados, _pedidos_alertados_fecha
    try:
        ahora   = datetime.now()
        # Resetear alertados cada día para que vuelva a alertar pedidos del día nuevo
        hoy_str = ahora.strftime('%Y-%m-%d')
        if hoy_str != _pedidos_alertados_fecha:
            _pedidos_alertados = set()
            _pedidos_alertados_fecha = hoy_str
        ayer    = (ahora - timedelta(hours=24)).strftime("%m/%d/%Y")
        hoy     = ahora.strftime("%m/%d/%Y")
        data    = venti_get_orders(ayer, hoy, max_results=200)
        orders  = data.get('results', [])

        puff_sku = re.compile(r'^P\d')
        estados_cancelados = {'CANCELED','CANCELLED','REFUSED','REFUNDED',
                              'PARTIALLY_CANCELED','RETURNED','PARTIALLY_RETURNED'}
        estados_ignorar = {'PENDING_PAYMENT','PENDING','WAITING_PAYMENT','PAYMENT_PENDING',
                           'WAITING','CREATED'}

        ordenes_activas    = {}
        ordenes_canceladas = {}

        for o in orders:
            items = o.get('items', [])
            if not any(puff_sku.match(str(i.get('sku',''))) for i in items):
                continue
            ext_id    = str(o.get('externalId','') or '').strip()
            order_ref = str(o.get('orderRef','') or '').strip()
            cart_id   = str(o.get('cartId','') or '').strip()
            canal     = str(o.get('channel','')).lower()
            # Shopify: orderRef (PF_XXXXX) | MELI: cartId | otros: externalId
            if canal == 'shopify' and order_ref:
                id_principal = order_ref
            elif canal == 'meli' and cart_id:
                id_principal = cart_id
            else:
                id_principal = ext_id
            if not id_principal:
                continue
            o['_ids'] = {x for x in [ext_id, order_ref, cart_id] if x}
            status = o.get('globalStatus','').upper()
            if status in estados_cancelados:
                ordenes_canceladas[id_principal] = o
            elif status in estados_ignorar:
                pass  # pago pendiente
            else:
                ordenes_activas[id_principal] = o

        if not ordenes_activas and not ordenes_canceladas:
            return

        # Leer Sheet del mes actual
        spreadsheet    = get_client()
        nombre_pestana = f"{MESES[ahora.month]} {ahora.year}"
        try:
            sheet = spreadsheet.worksheet(nombre_pestana)
        except Exception:
            return

        num_pedidos = sheet.col_values(4)   # Col D — número pedido
        cancelados  = sheet.col_values(20)  # Col T — Cancelado SI/NO

        # ── 1. Marcar cancelados en col T ──────────────────────────────
        celdas_actualizar = []
        for i, num in enumerate(num_pedidos[1:], start=2):  # fila 2 en adelante
            num_str = str(num).strip()
            if num_str in ordenes_canceladas:
                # Solo actualizar si aún no dice SI
                val_actual = cancelados[i-1] if (i-1) < len(cancelados) else ''
                if str(val_actual).upper().strip() != 'SI':
                    celdas_actualizar.append({'range': f'T{i}', 'values': [['SI']]})

        if celdas_actualizar:
            sheet.batch_update(celdas_actualizar)
            logger.info(f"Marcados {len(celdas_actualizar)} pedidos como cancelados en Sheet")

        # ── 2. Alertar pedidos no capturados ──────────────────────────
        pedidos_sheet = set(str(v).strip() for v in num_pedidos[1:] if v)
        no_capturadas = []
        for ext_id, orden in ordenes_activas.items():
            # Verificar si alguno de los IDs de la orden ya está en el Sheet
            ids_orden = orden.get('_ids', {ext_id})
            ya_capturado = bool(ids_orden & pedidos_sheet)
            ya_alertado  = bool(ids_orden & _pedidos_alertados)
            if not ya_capturado and not ya_alertado:
                canal  = orden.get('channel','').upper()
                total  = orden.get('total', 0)
                skus   = ', '.join(set(
                    i.get('sku','') for i in orden.get('items',[])
                    if puff_sku.match(str(i.get('sku','')))
                ))
                no_capturadas.append((ext_id, canal, skus, total))
                _pedidos_alertados.update(ids_orden)

        if no_capturadas:
            lineas = '\n'.join(
                f"  • <b>{canal}</b> | {skus} | ${total:,.0f}\n    {ext_id}"
                for ext_id, canal, skus, total in no_capturadas
            )
            msg = (f"📋 <b>PEDIDOS SIN CAPTURAR</b> — {len(no_capturadas)} orden(es)\n\n"
                   f"{lineas}\n\n"
                   f"🕐 {ahora.strftime('%d/%m/%Y %H:%M')}")

            async def enviar_alertas():
                for chat_id in [8782057464, ALFREDO_TELEGRAM_ID]:
                    try:
                        await bot_app.bot.send_message(chat_id=chat_id, text=msg, parse_mode="HTML")
                    except Exception as e:
                        logger.error(f"Error enviando alerta captura a {chat_id}: {e}")

            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)
            loop.run_until_complete(enviar_alertas())
            loop.close()

    except Exception as e:
        logger.error(f"Error revisando pedidos no capturados: {e}")

def iniciar_loop_captura(bot_app):
    def loop():
        time.sleep(120)  # esperar 2 min al arrancar
        while True:
            revisar_pedidos_no_capturados(bot_app)
            time.sleep(1800)  # revisar cada 30 minutos
    threading.Thread(target=loop, daemon=True).start()

# ── SERVIDOR WEB ───────────────────────────────────────────
APPS_SCRIPT_VENTAS     = "https://script.google.com/macros/s/AKfycbx1mRQQZTpqzlgMdkM2UAV4pIQkLk0GAAA_iixK-7ItUHPy8AdibJtbWWsbxyiTAvig/exec"
APPS_SCRIPT_INVENTARIO = "https://script.google.com/macros/s/AKfycbyE3uyllLtIM8nTZyMMg1vzqXC4KGhWlwdNVcWx6aX99MBxpcUQ5jxfbFL2q8XJofltww/exec"
APPS_SCRIPT_CALENDARIO = "https://script.google.com/macros/s/AKfycbx89mYlauj-vkiP-6BU3LkfhJoJut6vB3kvCBdKOZokExmFjbMcqrSQ457cLQ6yXqCd/exec"

# META ADS
META_APP_ID     = "1634566567742613"
META_APP_SECRET = "b87c2339aa3ecd0735cb4496b4f0625b"
META_TOKEN      = "EAAXOoSZBhzJUBQzkUG2hsZBZALS0ZBIjeELuI1ofKucoNw87ECNX0HjyTZAnDCaQtvjdXgKQoZAOxIPxv7N0kAqgrhcNbixn0odqNPZCIgQvZAztfgxvMe0Q8usPDd26AES8rKa48axvOIreVLAabwupzUB0ct86J76ohOlctrDjWzZCqlcVC63CIEhD9U3zlhSq2swZDZD"  # Token permanente BM — no expira
META_AD_ACCOUNT = "act_248042726855292"

# ── AMAZON ADS ──────────────────────────────────────────────
AMAZON_CLIENT_ID     = "amzn1.application-oa2-client.45554f85de5b4e41b40506d60644ef72"
AMAZON_CLIENT_SECRET = "amzn1.oa2-cs.v1.72905914f42e8609803df2df101926588aa41dd5cb14da1dd53456697209fbfb"
AMAZON_REFRESH_TOKEN = "Atzr|IwEBIAFf_jlbthouizybfgtgn3taFGfPPdYyJA8hd2l5JWBuscnWsLWHMxm9WX4s8Uqu3oyWbC7iGVDQBt8s84BlwSFTmbXB2Tc8FFJxPGJmYlXv3qMQRqFtZ5nt3PRRLckp0zCIXTM_sxNaekiA_dN70H8470lOGXIKX5vhenO9QrTojcwytREDu7ySDNjTRpG4nNpOtkrYmgg2Seou58KOdpd7hm2T998VS-BgYzVNswhZjgthX_Jz6hfHyGg1G0JZZ8qOedWmigMw90dcHTIf9Zvm20F3foeu8iE_mJl7sNrTolZAJP0C1tfQ1j5qGvClzaKEDJxYMD2mLcPLKe4_E7ylaGXt7aaUak1biS22mjtCgr_syEFuGvD9GoWupT0zz8IC8xsieNJ1SQsFkKJy1VcqkVhdhtLCcv_l-Mj1mVLPnmkmXaE-t9brIGhY-S0RU9JHQgYXO8i9gE5CIU9XOSo19GSBX_74y_6RsvzgRydnMAWsa145lZIm6HWy6bWGksU0CT_KMOM4yrTEHqNDghM4oJ_F4m2h57vDe0oDpeXQAA"
AMAZON_PROFILE_ID    = "3379718167592355"  # Puff MX · MXN · Mexico
_amazon_token        = {"token": None, "expires": 0}

# ── VENTIAPP API ────────────────────────────────────────────
VENTI_BASE     = "https://ventiapi.azurewebsites.net"
VENTI_EMAIL    = "alrof01@gmail.com"
VENTI_PASSWORD = "Claude2026"
_venti_token   = {"token": None, "expires": 0}

def venti_get_token():
    import urllib.request, urllib.parse, json, time
    if _venti_token["token"] and time.time() < _venti_token["expires"]:
        return _venti_token["token"]
    url  = f"{VENTI_BASE}/login"
    body = urllib.parse.urlencode({
        "username":   VENTI_EMAIL,
        "password":   VENTI_PASSWORD,
        "grant_type": "password"
    }).encode("utf-8")
    req = urllib.request.Request(url, data=body,
          headers={"Content-Type": "application/x-www-form-urlencoded"}, method="POST")
    with urllib.request.urlopen(req, timeout=15) as r:
        data = json.loads(r.read())
    token = data.get("access_token")
    expires = data.get("expires_in", 604799)
    _venti_token["token"]   = token
    _venti_token["expires"] = time.time() + expires - 60
    return token

def venti_get_orders(date_from, date_to, status=None, channel=None, max_results=200):
    import urllib.request, urllib.parse, json
    token  = venti_get_token()
    # API acepta date=MM/DD/YYYY (fecha desde) y date2=MM/DD/YYYY (fecha hasta)
    params = {"date": date_from, "date2": date_to,
              "dateType": "created", "max": max_results}
    if status:  params["status"]  = status
    if channel: params["channel"] = channel
    url = f"{VENTI_BASE}/api/orders/orders?{urllib.parse.urlencode(params)}"
    req = urllib.request.Request(url, headers={"Authorization": f"Bearer {token}"})
    with urllib.request.urlopen(req, timeout=30) as r:
        return json.loads(r.read())

def iniciar_servidor_web():
    PORT = int(os.environ.get("PORT", 8080))

    class Handler(http.server.SimpleHTTPRequestHandler):
        def do_GET(self):
            if self.path.startswith('/api/ventas'):
                self.proxy_ventas()
            elif self.path.startswith('/api/liverpool'):
                self.proxy_liverpool()
            elif self.path.startswith('/api/venti'):
                self.proxy_venti()
            elif self.path.startswith('/api/inventario'):
                self.proxy_inventario()
            elif self.path.startswith('/api/calendario'):
                self.proxy_calendario()
            elif self.path.startswith('/api/meta'):
                self.proxy_meta()
            elif self.path.startswith('/api/google'):
                self.proxy_google()
            elif self.path.startswith('/api/publicidad-historial'):
                self.proxy_publicidad_historial()
            elif self.path.startswith('/api/cierre-manual'):
                self.proxy_cierre_manual()
            elif self.path.startswith('/api/amazon-sync'):
                self.proxy_amazon_sync()
            elif self.path.startswith('/api/amazon'):
                self.proxy_amazon()
            else:
                super().do_GET()

        def proxy_ventas(self):
            import urllib.request
            try:
                req = urllib.request.Request(APPS_SCRIPT_VENTAS, headers={'User-Agent': 'Mozilla/5.0'})
                with urllib.request.urlopen(req, timeout=30) as resp:
                    data = resp.read()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(data)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_inventario(self):
            import urllib.request
            try:
                req = urllib.request.Request(APPS_SCRIPT_INVENTARIO, headers={'User-Agent': 'Mozilla/5.0'})
                with urllib.request.urlopen(req, timeout=60) as resp:
                    data = resp.read()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(data)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_calendario(self):
            import urllib.request, urllib.parse
            try:
                parsed = urllib.parse.urlparse(self.path)
                params = urllib.parse.parse_qs(parsed.query)
                mes    = params.get('mes',  [''])[0]
                anio   = params.get('anio', [''])[0]
                url    = APPS_SCRIPT_CALENDARIO
                if mes:  url += f'?mes={mes}'
                if anio: url += f'&anio={anio}' if mes else f'?anio={anio}'
                req = urllib.request.Request(url, headers={'User-Agent': 'Mozilla/5.0'})
                with urllib.request.urlopen(req, timeout=45) as resp:
                    data = resp.read()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(data)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_meta(self):
            import urllib.request, urllib.parse, json
            try:
                parsed = urllib.parse.urlparse(self.path)
                params = urllib.parse.parse_qs(parsed.query)
                since       = params.get('since', [''])[0]
                until       = params.get('until', [''])[0]
                ad_account  = params.get('account', [META_AD_ACCOUNT])[0]

                fields = 'campaign_name,spend,impressions,clicks,cpc,cpm,ctr,actions,action_values,date_start,date_stop'
                if since and until:
                    time_range = json.dumps({"since": since, "until": until})
                else:
                    from datetime import datetime
                    hoy = datetime.now()
                    time_range = json.dumps({
                        "since": f"{hoy.year}-{str(hoy.month).zfill(2)}-01",
                        "until": hoy.strftime("%Y-%m-%d")
                    })

                url = (f'https://graph.facebook.com/v19.0/{ad_account}/insights'
                       f'?fields={fields}'
                       f'&time_range={urllib.parse.quote(time_range)}'
                       f'&level=campaign'
                       f'&access_token={META_TOKEN}')

                req = urllib.request.Request(url)
                with urllib.request.urlopen(req, timeout=30) as r:
                    data = json.loads(r.read())

                # Calcular ROAS si hay action_values
                for c in data.get('data', []):
                    spend = float(c.get('spend', 0) or 0)
                    av = c.get('action_values', [])
                    purchase_val = sum(float(a.get('value', 0)) for a in av
                                      if a.get('action_type') == 'purchase')
                    c['roas'] = round(purchase_val / spend, 2) if spend > 0 else 0
                    c['spend'] = spend

                result = json.dumps({"ok": True, "account": ad_account,
                                     "data": data.get('data', []),
                                     "paging": data.get('paging', {})}).encode()

                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_publicidad_historial(self):
            """Devuelve lista de meses disponibles y datos de un mes específico"""
            import urllib.parse, json
            try:
                parsed = urllib.parse.urlparse(self.path)
                params = urllib.parse.parse_qs(parsed.query)
                mes = params.get('mes', [None])[0]

                creds = Credentials.from_service_account_info(GOOGLE_CREDENTIALS, scopes=[
                    "https://www.googleapis.com/auth/spreadsheets",
                    "https://www.googleapis.com/auth/drive"
                ])
                client = gspread.authorize(creds)
                pub_sheet = client.open_by_key(PUBLICIDAD_SHEET_ID)
                pestanas = [ws.title for ws in pub_sheet.worksheets()]

                if mes and mes in pestanas:
                    ws = pub_sheet.worksheet(mes)
                    raw = ws.cell(1, 1).value
                    data = json.loads(raw) if raw else {}
                    result = json.dumps({"ok": True, "mes": mes, "meses": pestanas, "data": data}).encode()
                else:
                    result = json.dumps({"ok": True, "meses": pestanas, "data": None}).encode()

                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_cierre_manual(self):
            """Endpoint para forzar cierre mensual manualmente (solo admin)"""
            import json, threading
            try:
                threading.Thread(target=cerrar_mes_publicidad, daemon=True).start()
                result = json.dumps({"ok": True, "msg": "Cierre mensual iniciado en background"}).encode()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_amazon_sync(self):
            """Endpoint de diagnóstico — fuerza sincronización y devuelve resultado/error"""
            import urllib.request, urllib.parse, json, time as time_mod, gzip
            try:
                global _amazon_token
                # Renovar token
                if not _amazon_token["token"] or time_mod.time() > _amazon_token["expires"]:
                    body = urllib.parse.urlencode({
                        "grant_type": "refresh_token",
                        "refresh_token": AMAZON_REFRESH_TOKEN,
                        "client_id": AMAZON_CLIENT_ID,
                        "client_secret": AMAZON_CLIENT_SECRET,
                    }).encode()
                    req = urllib.request.Request(
                        "https://api.amazon.com/auth/o2/token",
                        data=body,
                        headers={"Content-Type": "application/x-www-form-urlencoded"},
                        method="POST"
                    )
                    with urllib.request.urlopen(req, timeout=15) as r:
                        tk = json.loads(r.read())
                    _amazon_token["token"]   = tk["access_token"]
                    _amazon_token["expires"] = time_mod.time() + tk.get("expires_in", 3600) - 60

                access_token = _amazon_token["token"]
                from datetime import datetime
                hoy   = datetime.now()
                since = f"{hoy.year}-{str(hoy.month).zfill(2)}-01"
                until = hoy.strftime("%Y-%m-%d")

                headers_ads = {
                    "Authorization":                   f"Bearer {access_token}",
                    "Amazon-Advertising-API-ClientId": AMAZON_CLIENT_ID,
                    "Amazon-Advertising-API-Scope":    AMAZON_PROFILE_ID,
                    "Content-Type":                    "application/json",
                    "Accept":                          "application/json",
                }

                # Paso 1: crear reporte
                report_body = json.dumps({
                    "startDate": since, "endDate": until,
                    "configuration": {
                        "adProduct": "SPONSORED_PRODUCTS",
                        "groupBy": ["campaign"],
                        "columns": ["campaignName","spend","impressions","clicks","purchases30d","sales30d"],
                        "reportTypeId": "spCampaigns",
                        "timeUnit": "SUMMARY",
                        "format": "GZIP_JSON",
                    }
                }).encode()

                req = urllib.request.Request(
                    "https://advertising-api.amazon.com/reporting/reports",
                    data=report_body, headers=headers_ads, method="POST"
                )
                with urllib.request.urlopen(req, timeout=30) as r:
                    report_resp = json.loads(r.read())

                report_id = report_resp.get("reportId")
                if not report_id:
                    result = json.dumps({"step": "create_report", "response": report_resp}).encode()
                    self.send_response(200)
                    self.send_header("Content-Type", "application/json")
                    self.send_header("Access-Control-Allow-Origin", "*")
                    self.end_headers()
                    self.wfile.write(result)
                    return

                # Paso 2: check status una vez (no esperar — solo diagnóstico)
                time_mod.sleep(5)
                req2 = urllib.request.Request(
                    f"https://advertising-api.amazon.com/reporting/reports/{report_id}",
                    headers=headers_ads
                )
                with urllib.request.urlopen(req2, timeout=15) as r2:
                    status_resp = json.loads(r2.read())

                result = json.dumps({
                    "ok": True,
                    "report_id": report_id,
                    "status": status_resp.get("status"),
                    "full_response": status_resp,
                    "date_start": since,
                    "date_stop": until,
                }).encode()
                self.send_response(200)
                self.send_header("Content-Type", "application/json")
                self.send_header("Access-Control-Allow-Origin", "*")
                self.end_headers()
                self.wfile.write(result)

            except Exception as e:
                import traceback
                result = json.dumps({"ok": False, "error": str(e), "trace": traceback.format_exc()}).encode()
                self.send_response(200)
                self.send_header("Content-Type", "application/json")
                self.send_header("Access-Control-Allow-Origin", "*")
                self.end_headers()
                self.wfile.write(result)

        def proxy_amazon(self):
            import json
            try:
                # Leer datos pre-calculados del Sheet (igual que Google Ads)
                creds = Credentials.from_service_account_info(GOOGLE_CREDENTIALS, scopes=[
                    "https://www.googleapis.com/auth/spreadsheets",
                    "https://www.googleapis.com/auth/drive"
                ])
                client = gspread.authorize(creds)
                sheet = client.open_by_key(SPREADSHEET_ID).worksheet('AmazonAds')
                raw = sheet.cell(1, 1).value
                if not raw:
                    raise Exception('Sin datos en pestaña AmazonAds — espera la primera sincronización')
                data = json.loads(raw)
                result = json.dumps(data).encode()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_amazon_OLD(self):
            import urllib.request, urllib.parse, json, time as time_mod
            try:
                global _amazon_token
                # Renovar access token si expiró
                if not _amazon_token["token"] or time_mod.time() > _amazon_token["expires"]:
                    body = urllib.parse.urlencode({
                        "grant_type":    "refresh_token",
                        "refresh_token": AMAZON_REFRESH_TOKEN,
                        "client_id":     AMAZON_CLIENT_ID,
                        "client_secret": AMAZON_CLIENT_SECRET,
                    }).encode()
                    req = urllib.request.Request(
                        "https://api.amazon.com/auth/o2/token",
                        data=body,
                        headers={"Content-Type": "application/x-www-form-urlencoded"},
                        method="POST"
                    )
                    with urllib.request.urlopen(req, timeout=15) as r:
                        tk = json.loads(r.read())
                    _amazon_token["token"]   = tk["access_token"]
                    _amazon_token["expires"] = time_mod.time() + tk.get("expires_in", 3600) - 60

                access_token = _amazon_token["token"]
                from datetime import datetime
                hoy   = datetime.now()
                since = f"{hoy.year}-{str(hoy.month).zfill(2)}-01"
                until = hoy.strftime("%Y-%m-%d")

                headers_ads = {
                    "Authorization":                   f"Bearer {access_token}",
                    "Amazon-Advertising-API-ClientId": AMAZON_CLIENT_ID,
                    "Amazon-Advertising-API-Scope":    AMAZON_PROFILE_ID,
                    "Content-Type":                    "application/json",
                    "Accept":                          "application/json",
                }

                # Usar API v3 — reporte sponsored products con endpoint correcto
                report_body = json.dumps({
                    "startDate":      since,
                    "endDate":        until,
                    "configuration": {
                        "adProduct":       "SPONSORED_PRODUCTS",
                        "groupBy":         ["campaign"],
                        "columns":         ["campaignName","spend","impressions","clicks","purchases30d","sales30d"],
                        "reportTypeId":    "spCampaigns",
                        "timeUnit":        "SUMMARY",
                        "format":          "GZIP_JSON",
                    }
                }).encode()

                req = urllib.request.Request(
                    "https://advertising-api.amazon.com/reporting/reports",
                    data=report_body, headers=headers_ads, method="POST"
                )
                with urllib.request.urlopen(req, timeout=30) as r:
                    report_resp = json.loads(r.read())

                report_id = report_resp.get("reportId")
                if not report_id:
                    raise Exception(f"No reportId: {json.dumps(report_resp)}")

                # Polling hasta que el reporte esté listo
                raw_data = None
                for _ in range(15):
                    time_mod.sleep(4)
                    req2 = urllib.request.Request(
                        f"https://advertising-api.amazon.com/reporting/reports/{report_id}",
                        headers=headers_ads
                    )
                    with urllib.request.urlopen(req2, timeout=15) as r2:
                        status_resp = json.loads(r2.read())
                    status = status_resp.get("status", "")
                    if status == "COMPLETED":
                        dl_url = status_resp.get("url") or status_resp.get("location")
                        req3 = urllib.request.Request(dl_url)
                        with urllib.request.urlopen(req3, timeout=30) as r3:
                            import gzip
                            raw_data = json.loads(gzip.decompress(r3.read()))
                        break
                    elif status in ["FAILURE", "FAILED"]:
                        raise Exception(f"Reporte Amazon falló: {status_resp}")

                if raw_data is None:
                    raise Exception("Timeout esperando reporte Amazon Ads")

                # Procesar campañas
                campaigns = []
                for c in raw_data:
                    spend  = float(c.get("spend", 0) or 0)
                    sales  = float(c.get("sales30d", 0) or 0)
                    clicks = int(c.get("clicks", 0) or 0)
                    campaigns.append({
                        "campaign_name": c.get("campaignName", ""),
                        "spend":         spend,
                        "impressions":   int(c.get("impressions", 0) or 0),
                        "clicks":        clicks,
                        "sales":         sales,
                        "conversions":   int(c.get("purchases30d", 0) or 0),
                        "roas":          round(sales / spend, 2) if spend > 0 else 0,
                        "cpc":           round(spend / clicks, 2) if clicks > 0 else 0,
                    })

                result = json.dumps({
                    "ok": True, "profile_id": AMAZON_PROFILE_ID,
                    "date_start": since, "date_stop": until,
                    "data": campaigns
                }).encode()
                self.send_response(200)
                self.send_header("Content-Type", "application/json")
                self.send_header("Access-Control-Allow-Origin", "*")
                self.send_header("Cache-Control", "no-cache")
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header("Content-Type", "application/json")
                self.send_header("Access-Control-Allow-Origin", "*")
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_google(self):
            import json
            try:
                creds = Credentials.from_service_account_info(GOOGLE_CREDENTIALS, scopes=[
                    "https://www.googleapis.com/auth/spreadsheets",
                    "https://www.googleapis.com/auth/drive"
                ])
                client = gspread.authorize(creds)
                sheet = client.open_by_key(SPREADSHEET_ID).worksheet('GoogleAds')
                raw = sheet.cell(1, 1).value
                if not raw:
                    raise Exception('Sin datos en pestaña GoogleAds')
                data = json.loads(raw)
                result = json.dumps(data).encode()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_liverpool(self):
            import urllib.request, urllib.parse, json
            try:
                LIVERPOOL_API_KEY = "443d8a02-34c3-470b7ff-ac638c0ab963"
                LIVERPOOL_SHOP_ID = "3922"
                LIVERPOOL_BASE    = "https://liverpool-prod.mirakl.net"

                # Parámetros desde query string (?start_date=...&end_date=...&max=...)
                parsed = urllib.parse.urlparse(self.path)
                params = urllib.parse.parse_qs(parsed.query)
                start  = params.get('start_date', ['2025-01-01T00:00:00Z'])[0]
                end    = params.get('end_date',   ['2025-12-31T23:59:59Z'])[0]
                max_r  = params.get('max',        ['100'])[0]

                url = (f"{LIVERPOOL_BASE}/api/orders"
                       f"?shop_id={LIVERPOOL_SHOP_ID}"
                       f"&start_date={start}&end_date={end}"
                       f"&max={max_r}")

                req = urllib.request.Request(url, headers={
                    'Authorization': LIVERPOOL_API_KEY,
                    'Accept': 'application/json'
                })
                with urllib.request.urlopen(req, timeout=30) as resp:
                    raw = resp.read()

                # Filtrar solo pedidos Puff MX por SKU (empiezan con P)
                try:
                    data = json.loads(raw)
                    orders = data.get('orders', [])
                    puff_orders = []
                    for o in orders:
                        items = o.get('order_lines', [])
                        puff_items = [i for i in items if str(i.get('offer_sku','')).startswith('P')]
                        if puff_items:
                            o['order_lines'] = puff_items
                            puff_orders.append(o)
                    data['orders'] = puff_orders
                    data['total_count'] = len(puff_orders)
                    raw = json.dumps(data).encode()
                except Exception:
                    pass  # si falla el filtro, devolver raw

                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(raw)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def proxy_venti(self):
            import urllib.parse, json
            try:
                parsed = urllib.parse.urlparse(self.path)
                params = urllib.parse.parse_qs(parsed.query)
                date_from = params.get('dateFrom', ['01/01/2026'])[0]
                date_to   = params.get('dateTo',   ['12/31/2026'])[0]
                status    = params.get('status',   [None])[0]
                channel   = params.get('channel',  [None])[0]
                max_r     = int(params.get('max',  ['200'])[0])

                data = venti_get_orders(date_from, date_to, status, channel, max_r)

                # Extraer órdenes de la clave correcta
                orders = data.get('results', [])

                # Filtrar solo SKUs Puff MX
                # SKUs válidos: empiezan con P + dígito (P01683, P02204, etc.)
                # Excluir: PA... (otras marcas Quinio en Liverpool)
                import re
                puff_sku = re.compile(r'^P\d')
                puff = []
                for o in orders:
                    items = o.get('items', [])
                    puff_items = [i for i in items if puff_sku.match(str(i.get('sku', '')))]
                    if puff_items:
                        o_copy = dict(o); o_copy['items'] = puff_items
                        puff.append(o_copy)

                result = json.dumps({"ok": True, "total": len(puff),
                    "total_venti": data.get('total', 0), "orders": puff}).encode()
                self.send_response(200)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.send_header('Cache-Control', 'no-cache')
                self.end_headers()
                self.wfile.write(result)
            except Exception as e:
                self.send_response(500)
                self.send_header('Content-Type', 'application/json')
                self.send_header('Access-Control-Allow-Origin', '*')
                self.end_headers()
                self.wfile.write(f'{{"ok":false,"error":"{str(e)}"}}'.encode())

        def log_message(self, format, *args):
            pass  # Silenciar logs del servidor web

    httpd = socketserver.TCPServer(("", PORT), Handler)
    threading.Thread(target=httpd.serve_forever, daemon=True).start()
    logger.info(f"Servidor web iniciado en puerto {PORT}")

# ── HANDLERS DE TELEGRAM ───────────────────────────────────
async def handle_message(update: Update, context: ContextTypes.DEFAULT_TYPE):
    user_id = str(update.effective_user.id)
    texto   = update.message.text.strip().lower()
    nombre  = update.effective_user.first_name

    if texto in ["/inicio", "/start", "/id"]:
        await update.message.reply_text(
            f"Hola {nombre}!\n\nTu ID de Telegram es: {user_id}\n\nComparte este número con tu administrador."
        )
        return

    if texto == "/utilidades" and user_id == str(ALFREDO_TELEGRAM_ID):
        await update.message.reply_text("Revisando utilidades negativas...")
        _alertados.clear()
        revisar_utilidades_negativas(context.application)
        return

    # Comando manual para forzar revisión de alertas (solo admin)
    if texto == "/alertas" and user_id == str(ALFREDO_TELEGRAM_ID):
        await update.message.reply_text("🔍 Revisando pedidos ahora...")
        threading.Thread(target=revisar_pedidos_y_alertar, args=(context.application,), daemon=True).start()
        return

    if user_id not in EMPLEADOS:
        await update.message.reply_text(
            f"Tu usuario no está registrado.\nTu ID es: {user_id}\nCompártelo con tu administrador."
        )
        return

    areas_usuario = EMPLEADOS[user_id]

    match_listo   = re.search(r'(\d+)\s+(listo|terminado|ok|done)', texto)
    match_proceso = re.search(r'(\d+)\s+(en\s*proceso|proceso|trabajando|empezando)', texto)

    if not match_listo and not match_proceso:
        match_listo   = re.search(r'(listo|terminado|ok|done)\s+(\d+)', texto)
        match_proceso = re.search(r'(en\s*proceso|proceso)\s+(\d+)', texto)
        if match_listo:
            consecutivo  = match_listo.group(2)
            nuevo_status = "Listo"
        elif match_proceso:
            consecutivo  = match_proceso.group(2)
            nuevo_status = "En proceso"
        else:
            await update.message.reply_text("No entendí el mensaje.\n\nEscribe así:\n642 listo\n642 en proceso")
            return
    else:
        if match_listo:
            consecutivo  = match_listo.group(1)
            nuevo_status = "Listo"
        else:
            consecutivo  = match_proceso.group(1)
            nuevo_status = "En proceso"

    if isinstance(areas_usuario, list) and len(areas_usuario) == 1:
        await procesar_actualizacion(update, consecutivo, nuevo_status, areas_usuario[0])
    elif isinstance(areas_usuario, list) and len(areas_usuario) > 1:
        context.user_data["pendiente_consecutivo"] = consecutivo
        context.user_data["pendiente_status"]      = nuevo_status
        from telegram import InlineKeyboardButton, InlineKeyboardMarkup
        botones = [[InlineKeyboardButton(a, callback_data=f"area:{a}:{consecutivo}:{nuevo_status}")]
                   for a in areas_usuario]
        markup = InlineKeyboardMarkup(botones)
        await update.message.reply_text(
            f"¿En qué área quieres marcar el #{consecutivo} como {nuevo_status}?",
            reply_markup=markup
        )
    else:
        await procesar_actualizacion(update, consecutivo, nuevo_status, areas_usuario)

async def handle_callback(update: Update, context: ContextTypes.DEFAULT_TYPE):
    query  = update.callback_query
    await query.answer()
    partes = query.data.split(":")
    if partes[0] == "area":
        await procesar_actualizacion(query, partes[2], partes[3], partes[1], es_callback=True)

async def procesar_actualizacion(update_or_query, consecutivo, nuevo_status, area, es_callback=False):
    async def responder(msg):
        if es_callback:
            await update_or_query.edit_message_text(msg)
        else:
            await update_or_query.message.reply_text(msg)

    try:
        sheet, fila, nombre_pestana = buscar_en_todas_las_pestanas(consecutivo)
    except Exception as e:
        await responder(f"Error conectando a la hoja: {str(e)}")
        return

    if not fila:
        await responder(f"No encontré el consecutivo {consecutivo} en ninguna hoja.")
        return

    if esta_cancelado(sheet, fila):
        await responder(f"🚫 PEDIDO CANCELADO\n\nEl consecutivo {consecutivo} está cancelado.\nFavor de no continuar.")
        return

    try:
        actualizar_estado(sheet, fila, area, nuevo_status)
        emoji = "✅" if nuevo_status == "Listo" else "🔄"
        await responder(
            f"{emoji} Actualizado\n\n"
            f"Consecutivo: {consecutivo}\n"
            f"Hoja: {nombre_pestana}\n"
            f"Área: {area}\n"
            f"Estado: {nuevo_status}\n"
            f"Fecha: {datetime.now().strftime('%d/%m/%Y %H:%M')}"
        )
    except Exception as e:
        await responder(f"Error al actualizar: {str(e)}")


# ── AMAZON ADS SYNC ────────────────────────────────────────
def sincronizar_amazon_ads():
    """Genera reporte Amazon Ads y guarda en Sheet pestaña AmazonAds"""
    import urllib.request, urllib.parse, json, time as time_mod, gzip
    global _amazon_token
    try:
        logger.info("Iniciando sincronización Amazon Ads...")
        # Renovar token
        if not _amazon_token["token"] or time_mod.time() > _amazon_token["expires"]:
            body = urllib.parse.urlencode({
                "grant_type":    "refresh_token",
                "refresh_token": AMAZON_REFRESH_TOKEN,
                "client_id":     AMAZON_CLIENT_ID,
                "client_secret": AMAZON_CLIENT_SECRET,
            }).encode()
            req = urllib.request.Request(
                "https://api.amazon.com/auth/o2/token",
                data=body,
                headers={"Content-Type": "application/x-www-form-urlencoded"},
                method="POST"
            )
            with urllib.request.urlopen(req, timeout=15) as r:
                tk = json.loads(r.read())
            _amazon_token["token"]   = tk["access_token"]
            _amazon_token["expires"] = time_mod.time() + tk.get("expires_in", 3600) - 60

        access_token = _amazon_token["token"]
        from datetime import datetime
        hoy   = datetime.now()
        since = f"{hoy.year}-{str(hoy.month).zfill(2)}-01"
        until = hoy.strftime("%Y-%m-%d")

        headers_ads = {
            "Authorization":                   f"Bearer {access_token}",
            "Amazon-Advertising-API-ClientId": AMAZON_CLIENT_ID,
            "Amazon-Advertising-API-Scope":    AMAZON_PROFILE_ID,
            "Content-Type":                    "application/json",
            "Accept":                          "application/json",
        }

        # Crear reporte v3
        report_body = json.dumps({
            "startDate": since,
            "endDate":   until,
            "configuration": {
                "adProduct":    "SPONSORED_PRODUCTS",
                "groupBy":      ["campaign"],
                "columns":      ["campaignName","spend","impressions","clicks","purchases30d","sales30d"],
                "reportTypeId": "spCampaigns",
                "timeUnit":     "SUMMARY",
                "format":       "GZIP_JSON",
            }
        }).encode()

        req = urllib.request.Request(
            "https://advertising-api.amazon.com/reporting/reports",
            data=report_body, headers=headers_ads, method="POST"
        )
        with urllib.request.urlopen(req, timeout=30) as r:
            report_resp = json.loads(r.read())

        report_id = report_resp.get("reportId")
        if not report_id:
            raise Exception(f"No reportId: {json.dumps(report_resp)}")

        logger.info(f"Reporte Amazon creado: {report_id} — esperando...")

        # Polling hasta 5 minutos
        raw_data = None
        for attempt in range(30):
            time_mod.sleep(10)
            req2 = urllib.request.Request(
                f"https://advertising-api.amazon.com/reporting/reports/{report_id}",
                headers=headers_ads
            )
            with urllib.request.urlopen(req2, timeout=15) as r2:
                status_resp = json.loads(r2.read())
            status = status_resp.get("status", "")
            logger.info(f"Amazon reporte status [{attempt+1}]: {status}")
            if status == "COMPLETED":
                dl_url = status_resp.get("url") or status_resp.get("location")
                req3 = urllib.request.Request(dl_url)
                with urllib.request.urlopen(req3, timeout=30) as r3:
                    raw_data = json.loads(gzip.decompress(r3.read()))
                break
            elif status in ["FAILURE", "FAILED"]:
                raise Exception(f"Reporte Amazon falló: {status_resp}")

        if raw_data is None:
            raise Exception("Timeout 5min esperando reporte Amazon")

        # Procesar
        campaigns = []
        for c in raw_data:
            spend  = float(c.get("spend", 0) or 0)
            sales  = float(c.get("sales30d", 0) or 0)
            clicks = int(c.get("clicks", 0) or 0)
            campaigns.append({
                "campaign_name": c.get("campaignName", ""),
                "spend":         spend,
                "impressions":   int(c.get("impressions", 0) or 0),
                "clicks":        clicks,
                "sales":         sales,
                "conversions":   int(c.get("purchases30d", 0) or 0),
                "roas":          round(sales / spend, 2) if spend > 0 else 0,
                "cpc":           round(spend / clicks, 2) if clicks > 0 else 0,
            })

        # Guardar en Sheet pestaña AmazonAds
        payload = json.dumps({
            "ok": True, "profile_id": AMAZON_PROFILE_ID,
            "date_start": since, "date_stop": until,
            "data": campaigns
        })
        spreadsheet = get_client()
        try:
            sheet = spreadsheet.worksheet('AmazonAds')
        except:
            sheet = spreadsheet.add_worksheet('AmazonAds', rows=1, cols=1)
        sheet.update_cell(1, 1, payload)
        logger.info(f"Amazon Ads sincronizado: {len(campaigns)} campañas guardadas en Sheet")

    except Exception as e:
        logger.error(f"Error sincronizando Amazon Ads: {e}")


def iniciar_loop_amazon(bot_app):
    def loop():
        time.sleep(30)  # esperar 30 seg al arrancar
        while True:
            sincronizar_amazon_ads()
            time.sleep(3600)  # sincronizar cada hora
    threading.Thread(target=loop, daemon=True).start()
    logger.info("Loop Amazon Ads iniciado — sincroniza cada hora")


# ── CIERRE MENSUAL DE PUBLICIDAD ──────────────────────────
def cerrar_mes_publicidad():
    """Guarda datos finales de todos los canales conectados en Sheet histórico"""
    import urllib.request, urllib.parse, json, gzip, time as time_mod
    from datetime import datetime

    ahora = datetime.now()
    nombre_mes = f"{MESES[ahora.month]}_{ahora.year}"
    logger.info(f"Iniciando cierre mensual publicidad: {nombre_mes}")

    try:
        creds = Credentials.from_service_account_info(GOOGLE_CREDENTIALS, scopes=[
            "https://www.googleapis.com/auth/spreadsheets",
            "https://www.googleapis.com/auth/drive"
        ])
        client = gspread.authorize(creds)
        pub_sheet = client.open_by_key(PUBLICIDAD_SHEET_ID)
    except Exception as e:
        logger.error(f"Error abriendo Sheet publicidad: {e}")
        return

    datos_cierre = {
        "mes": nombre_mes,
        "fecha_cierre": ahora.strftime("%d/%m/%Y %H:%M"),
        "canales": {}
    }

    # ── META ADS ──
    try:
        token = META_TOKEN
        hoy = ahora
        time_range = json.dumps({
            "since": f"{hoy.year}-{str(hoy.month).zfill(2)}-01",
            "until": hoy.strftime("%Y-%m-%d")
        })
        fields = 'campaign_name,spend,impressions,clicks,cpc,cpm,ctr,actions,action_values,date_start,date_stop'
        url = (f'https://graph.facebook.com/v19.0/{META_AD_ACCOUNT}/insights'
               f'?fields={fields}&time_range={urllib.parse.quote(time_range)}'
               f'&level=campaign&access_token={token}')
        req = urllib.request.Request(url)
        with urllib.request.urlopen(req, timeout=30) as r:
            meta_data = json.loads(r.read())
        campaigns = meta_data.get('data', [])
        for c in campaigns:
            spend = float(c.get('spend', 0) or 0)
            av = c.get('action_values', [])
            purchase_val = sum(float(a.get('value', 0)) for a in av if a.get('action_type') == 'purchase')
            c['roas'] = round(purchase_val / spend, 2) if spend > 0 else 0
            c['spend'] = spend
        tSpend = sum(c['spend'] for c in campaigns)
        tImpr  = sum(int(c.get('impressions', 0)) for c in campaigns)
        tClicks= sum(int(c.get('clicks', 0)) for c in campaigns)
        datos_cierre["canales"]["meta"] = {
            "gasto": tSpend, "impresiones": tImpr, "clicks": tClicks,
            "campanas": len(campaigns), "detalle": campaigns
        }
        logger.info(f"Meta Ads cierre: ${tSpend:,.0f}")
    except Exception as e:
        logger.error(f"Error jalando Meta para cierre: {e}")
        datos_cierre["canales"]["meta"] = {"error": str(e)}

    # ── GOOGLE ADS ──
    try:
        main_sheet = client.open_by_key(SPREADSHEET_ID)
        google_ws = main_sheet.worksheet('GoogleAds')
        raw = google_ws.cell(1, 1).value
        if raw:
            google_data = json.loads(raw)
            datos_cierre["canales"]["google"] = google_data
            logger.info(f"Google Ads cierre: OK")
    except Exception as e:
        logger.error(f"Error jalando Google para cierre: {e}")
        datos_cierre["canales"]["google"] = {"error": str(e)}

    # ── AMAZON ADS ──
    try:
        main_sheet2 = client.open_by_key(SPREADSHEET_ID)
        amazon_ws = main_sheet2.worksheet('AmazonAds')
        raw2 = amazon_ws.cell(1, 1).value
        if raw2:
            amazon_data = json.loads(raw2)
            datos_cierre["canales"]["amazon"] = amazon_data
            logger.info(f"Amazon Ads cierre: OK")
    except Exception as e:
        logger.error(f"Error jalando Amazon para cierre: {e}")
        datos_cierre["canales"]["amazon"] = {"error": str(e)}

    # ── GUARDAR EN SHEET HISTÓRICO ──
    try:
        # Crear o reemplazar pestaña del mes
        try:
            ws_mes = pub_sheet.worksheet(nombre_mes)
            ws_mes.clear()
        except:
            ws_mes = pub_sheet.add_worksheet(title=nombre_mes, rows=100, cols=20)

        # Guardar JSON completo en A1
        ws_mes.update_cell(1, 1, json.dumps(datos_cierre))

        # Guardar tabla resumen legible desde fila 3
        headers = ['Canal', 'Gasto', 'Impresiones', 'Clicks', 'Campañas activas', 'Fecha cierre']
        ws_mes.update('A3:F3', [headers])

        resumen = []
        for canal, data in datos_cierre["canales"].items():
            if "error" not in data:
                if canal == "meta":
                    resumen.append([
                        'Meta Ads',
                        f"${data.get('gasto', 0):,.0f}",
                        f"{data.get('impresiones', 0):,}",
                        f"{data.get('clicks', 0):,}",
                        str(data.get('campanas', 0)),
                        datos_cierre["fecha_cierre"]
                    ])
                elif canal == "google":
                    camps = data.get('data', [])
                    activas = [c for c in camps if c.get('spend', 0) > 0]
                    tSp = sum(c.get('spend', 0) for c in activas)
                    resumen.append([
                        'Google Ads',
                        f"${tSp:,.0f}",
                        f"{sum(c.get('impressions', 0) for c in activas):,}",
                        f"{sum(c.get('clicks', 0) for c in activas):,}",
                        str(len(activas)),
                        datos_cierre["fecha_cierre"]
                    ])
                elif canal == "amazon":
                    camps = data.get('data', [])
                    activas = [c for c in camps if c.get('spend', 0) > 0]
                    tSp = sum(c.get('spend', 0) for c in activas)
                    resumen.append([
                        'Amazon Ads',
                        f"${tSp:,.0f}",
                        f"{sum(c.get('impressions', 0) for c in activas):,}",
                        f"{sum(c.get('clicks', 0) for c in activas):,}",
                        str(len(activas)),
                        datos_cierre["fecha_cierre"]
                    ])
            else:
                resumen.append([canal.title(), 'ERROR', '', '', '', data.get('error', '')])

        if resumen:
            ws_mes.update(f'A4:F{3+len(resumen)}', resumen)

        logger.info(f"Cierre mensual {nombre_mes} guardado en Sheet publicidad ✓")

    except Exception as e:
        logger.error(f"Error guardando cierre mensual: {e}")


def iniciar_loop_cierre_mensual():
    """Corre el cierre el último día del mes a las 23:30"""
    def loop():
        import calendar
        time.sleep(60)
        ultima_ejecucion = {}
        while True:
            ahora = datetime.now()
            ultimo_dia = calendar.monthrange(ahora.year, ahora.month)[1]
            clave = f"cierre-{ahora.year}-{ahora.month}"
            if (ahora.day == ultimo_dia and
                ahora.hour == 23 and
                ahora.minute >= 30 and
                clave not in ultima_ejecucion):
                ultima_ejecucion[clave] = True
                logger.info("Ejecutando cierre mensual de publicidad...")
                cerrar_mes_publicidad()
            time.sleep(60)
    threading.Thread(target=loop, daemon=True).start()
    logger.info("Loop cierre mensual publicidad iniciado — ejecuta el último día del mes a las 23:30")

# ── MAIN ───────────────────────────────────────────────────
def main():
    from telegram.ext import CallbackQueryHandler

    iniciar_servidor_web()

    app = Application.builder().token(TELEGRAM_TOKEN).build()
    app.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, handle_message))
    app.add_handler(MessageHandler(filters.COMMAND, handle_message))
    app.add_handler(CallbackQueryHandler(handle_callback))

    iniciar_loop_utilidades(app)
    iniciar_scheduler_alertas(app)
    iniciar_loop_captura(app)
    iniciar_loop_amazon(app)
    iniciar_loop_cierre_mensual()

    logger.info("Bot iniciado con sistema de alertas...")
    app.run_polling(allowed_updates=Update.ALL_TYPES)

if __name__ == "__main__":
    main()
