/ sales-pipeline / deal_resurrector.py
deal_resurrector.py
  1  #!/usr/bin/env python3
  2  """
  3  Deal Resurrector v2 — Three intelligence layers on dead deals:
  4    Layer 1: Time Decay Scoring (composite score with configurable decay windows)
  5    Layer 2: POC Expansion (verify contacts, find replacements)
  6    Layer 3: Follow the Champion (track departed POCs to new companies)
  7  
  8  Pulls closed-lost deals from HubSpot, scores them using a composite formula
  9  (time decay + deal value + loss reason + engagement triggers), then generates
 10  personalized revival emails per loss reason category.
 11  
 12  Usage:
 13      python3 deal_resurrector.py --top 10 --dry-run
 14      python3 deal_resurrector.py --top 5 --include-champion
 15      python3 deal_resurrector.py --add-exclusion "Acme Corp"
 16  """
 17  
 18  import argparse
 19  import json
 20  import os
 21  import random
 22  import re
 23  import subprocess
 24  import sys
 25  import time
 26  from datetime import datetime, timedelta, timezone
 27  from pathlib import Path
 28  
 29  import requests
 30  
 31  # ─── Configuration ───────────────────────────────────────────────────────────
 32  BASE_DIR = Path(os.environ.get("BASE_DIR", Path(__file__).resolve().parent))
 33  DATA_DIR = BASE_DIR / "data"
 34  EXCLUSIONS_FILE = DATA_DIR / "resurrector-exclusions.json"
 35  OUTPUT_FILE = DATA_DIR / "deal-resurrector-latest.json"
 36  
 37  # HubSpot API
 38  HUBSPOT_BASE_URL = "https://api.hubapi.com"
 39  HUBSPOT_TOKEN = os.environ.get("HUBSPOT_API_KEY", "")
 40  
 41  # ─── Closed-Lost Stage IDs ──────────────────────────────────────────────────
 42  # Map your HubSpot closed-lost stage IDs to pipeline names.
 43  # Find these in HubSpot → Settings → Objects → Deals → Pipelines
 44  CLOSED_LOST_STAGES = {
 45      # "stage_id_here": "Pipeline Name",
 46      # Example:
 47      # "1079884213": "Enterprise Pipeline",
 48      # "960522377": "ABM Pipeline",
 49  }
 50  
 51  # ─── HubSpot Properties to Fetch ────────────────────────────────────────────
 52  DEAL_PROPERTIES = [
 53      "dealname", "amount", "closedate", "dealstage",
 54      "closed_lost_reason", "hs_closed_amount", "pipeline",
 55      "hubspot_owner_id", "notes_last_updated",
 56  ]
 57  CONTACT_PROPERTIES = [
 58      "firstname", "lastname", "email", "jobtitle", "company",
 59      "hs_last_sales_activity_date", "notes_last_updated",
 60      "hs_email_last_open_date", "hs_email_last_click_date",
 61      "hs_analytics_last_visit_timestamp", "hs_analytics_num_page_views",
 62      "num_associated_deals", "recent_conversion_event_name",
 63  ]
 64  COMPANY_PROPERTIES = [
 65      "name", "domain", "industry", "numberofemployees",
 66      "annualrevenue", "hs_last_sales_activity_date",
 67      "notes_last_updated", "num_associated_deals",
 68      "hs_analytics_last_visit_timestamp",
 69  ]
 70  
 71  # ─── Time Decay Windows ─────────────────────────────────────────────────────
 72  # (min_days, max_days, weight)
 73  # Deals in the 60-90 day window get full weight; older deals decay.
 74  DECAY_WINDOWS = [
 75      (60, 90, 1.0),    # Sweet spot — enough time has passed, still fresh
 76      (91, 180, 0.8),   # Good window
 77      (181, 365, 0.6),  # Getting stale but still viable
 78      (366, 540, 0.4),  # Long shot unless trigger present
 79      (541, 99999, 0.2),  # Only if engagement trigger detected
 80  ]
 81  
 82  # ─── Loss Reason → Bonus Multiplier ─────────────────────────────────────────
 83  # Deals lost to "timing" are more likely to convert than "bad fit".
 84  LOSS_REASON_BONUS = {
 85      "timing": 1.3,
 86      "not ready": 1.25,
 87      "budget": 1.15,
 88      "price": 1.1,
 89      "internal": 1.05,
 90      "no decision": 1.0,
 91      "competitor": 0.7,
 92      "no need": 0.5,
 93      "bad fit": 0.3,
 94  }
 95  
 96  # Rate limit delay between HubSpot API calls (seconds)
 97  SEARCH_DELAY = float(os.environ.get("HUBSPOT_RATE_DELAY", "1.5"))
 98  
 99  # ─── Your Company Info (for email templates) ────────────────────────────────
100  YOUR_COMPANY_NAME = os.environ.get("YOUR_COMPANY_NAME", "Your Company")
101  YOUR_SENDER_NAME = os.environ.get("YOUR_SENDER_NAME", "Your Name")
102  YOUR_SENDER_TITLE = os.environ.get("YOUR_SENDER_TITLE", "CEO")
103  # A brief value prop to include in emails
104  YOUR_VALUE_PROP = os.environ.get("YOUR_VALUE_PROP",
105      "We've built new capabilities since we last talked that I think you'd find interesting.")
106  
107  
108  # ─── Exclusion List ──────────────────────────────────────────────────────────
109  
110  def load_exclusions() -> set:
111      """Load excluded company names (lowercased) from the exclusions file."""
112      if not EXCLUSIONS_FILE.exists():
113          return set()
114      try:
115          data = json.loads(EXCLUSIONS_FILE.read_text())
116          return {e["company"].lower() for e in data.get("excluded_deals", [])}
117      except Exception as ex:
118          print(f"⚠️ Could not load exclusions: {ex}", file=sys.stderr)
119          return set()
120  
121  
122  def add_exclusion(company: str, deal_id: str = "", reason: str = "manually_excluded") -> None:
123      """Append a company to the exclusions file."""
124      data = {"excluded_deals": []}
125      if EXCLUSIONS_FILE.exists():
126          try:
127              data = json.loads(EXCLUSIONS_FILE.read_text())
128          except Exception:
129              pass
130      existing = {e["company"].lower() for e in data["excluded_deals"]}
131      if company.lower() in existing:
132          print(f"ℹ️  {company} is already excluded.")
133          return
134      data["excluded_deals"].append({
135          "deal_id": deal_id or company.lower().replace(" ", "-"),
136          "company": company,
137          "reason": reason,
138          "excluded_date": datetime.now().strftime("%Y-%m-%d"),
139      })
140      DATA_DIR.mkdir(parents=True, exist_ok=True)
141      EXCLUSIONS_FILE.write_text(json.dumps(data, indent=2))
142      print(f"✅ Added {company} to exclusion list")
143  
144  
145  # ─── HubSpot Client ─────────────────────────────────────────────────────────
146  
147  class HubSpotClient:
148      def __init__(self, token: str):
149          self.token = token.strip()
150          self.session = requests.Session()
151          self.session.headers.update({
152              "Authorization": f"Bearer {self.token}",
153              "Content-Type": "application/json",
154          })
155          self._rate_wait = 0.12
156  
157      def _request(self, method, path, **kwargs):
158          url = f"{HUBSPOT_BASE_URL}{path}"
159          for attempt in range(4):
160              resp = self.session.request(method, url, **kwargs)
161              if resp.status_code == 429:
162                  wait = int(resp.headers.get("Retry-After", 2))
163                  print(f"  ⏳ Rate limited, waiting {wait}s…", file=sys.stderr)
164                  time.sleep(wait)
165                  continue
166              resp.raise_for_status()
167              time.sleep(self._rate_wait)
168              return resp.json()
169          raise RuntimeError(f"Too many retries for {path}")
170  
171      def get(self, path, **kwargs):
172          return self._request("GET", path, **kwargs)
173  
174      def post(self, path, **kwargs):
175          return self._request("POST", path, **kwargs)
176  
177      def search_closed_lost_deals(self, since_date: str):
178          """Search for all closed-lost deals across configured pipelines."""
179          all_deals = []
180          for stage_id in CLOSED_LOST_STAGES:
181              all_deals.extend(self._search_by_stage(stage_id, since_date))
182          return all_deals
183  
184      def _search_by_stage(self, stage_id, since_date):
185          deals = []
186          after = None
187          while True:
188              body = {
189                  "filterGroups": [{"filters": [
190                      {"propertyName": "dealstage", "operator": "EQ", "value": stage_id},
191                      {"propertyName": "closedate", "operator": "GTE", "value": since_date},
192                  ]}],
193                  "properties": DEAL_PROPERTIES,
194                  "sorts": [{"propertyName": "closedate", "direction": "DESCENDING"}],
195                  "limit": 100,
196              }
197              if after:
198                  body["after"] = after
199              data = self.post("/crm/v3/objects/deals/search", json=body)
200              deals.extend(data.get("results", []))
201              paging = data.get("paging", {}).get("next")
202              if paging:
203                  after = paging["after"]
204              else:
205                  break
206          return deals
207  
208      def get_deal_associations(self, deal_id, to_type="contacts"):
209          try:
210              data = self.get(f"/crm/v4/objects/deals/{deal_id}/associations/{to_type}")
211              return data.get("results", [])
212          except Exception:
213              return []
214  
215      def get_contact(self, contact_id):
216          try:
217              return self.get(
218                  f"/crm/v3/objects/contacts/{contact_id}",
219                  params={"properties": ",".join(CONTACT_PROPERTIES)},
220              )
221          except Exception:
222              return None
223  
224      def get_company_for_contact(self, contact_id):
225          try:
226              assocs = self.get(f"/crm/v4/objects/contacts/{contact_id}/associations/companies")
227              results = assocs.get("results", [])
228              if not results:
229                  return None
230              company_id = results[0].get("toObjectId")
231              return self.get(
232                  f"/crm/v3/objects/companies/{company_id}",
233                  params={"properties": ",".join(COMPANY_PROPERTIES)},
234              )
235          except Exception:
236              return None
237  
238  
239  # ─── Helpers ─────────────────────────────────────────────────────────────────
240  
241  def parse_ts(val):
242      """Parse a timestamp value (epoch ms or ISO string) to datetime."""
243      if not val:
244          return None
245      try:
246          if isinstance(val, (int, float)) or (isinstance(val, str) and val.isdigit()):
247              return datetime.fromtimestamp(int(val) / 1000, tz=timezone.utc)
248          return datetime.fromisoformat(val.replace("Z", "+00:00"))
249      except Exception:
250          return None
251  
252  
253  # ─── Layer 1: Time Decay Scoring ────────────────────────────────────────────
254  
255  def compute_time_decay_score(days_since_close: int, deal_value: float,
256                                max_deal_value: float, loss_reason: str,
257                                has_trigger: bool) -> dict:
258      """Compute composite score (0-100) using additive formula:
259        Time component:    up to 35 pts (decay weight × 35)
260        Value component:   up to 30 pts (normalized value × 30)
261        Reason component:  up to 20 pts (loss reason bonus × 20)
262        Trigger component: up to 15 pts (engagement signals)
263      """
264      # Time decay weight
265      time_weight = 0.0
266      for lo, hi, weight in DECAY_WINDOWS:
267          if lo <= days_since_close <= hi:
268              time_weight = weight
269              break
270  
271      # Too fresh (<60 days) — penalize (deal is still raw)
272      if days_since_close < 60:
273          time_weight = 0.2
274  
275      # Very old deals only score if trigger present
276      if days_since_close > 540 and not has_trigger:
277          time_weight = 0.0
278  
279      # Normalize deal value (0-1)
280      value_norm = min(deal_value / max(max_deal_value, 1), 1.0)
281  
282      # Loss reason bonus
283      reason_lower = (loss_reason or "").lower()
284      reason_score = 0.5  # default for unknown reasons
285      for keyword, bonus in LOSS_REASON_BONUS.items():
286          if keyword in reason_lower:
287              reason_score = min(bonus, 1.0)
288              break
289  
290      # Trigger bonus
291      trigger_pts = 15.0 if has_trigger else 0.0
292  
293      # Additive composite
294      time_pts = time_weight * 35
295      value_pts = value_norm * 30
296      reason_pts = reason_score * 20
297  
298      composite = min(100, round(time_pts + value_pts + reason_pts + trigger_pts))
299  
300      return {
301          "time_decay_weight": time_weight,
302          "value_normalized": round(value_norm, 3),
303          "trigger_bonus": round(reason_score, 2),
304          "composite_score": composite,
305      }
306  
307  
308  # ─── Email Generation ───────────────────────────────────────────────────────
309  
310  def _random_cta():
311      return random.choice([
312          "Worth revisiting?",
313          "Open to a quick catch-up?",
314          "Curious if the timing is better now?",
315          "Worth 15 min to compare notes?",
316          "Any interest in reconnecting?",
317          "Make sense to chat again?",
318      ])
319  
320  
321  def _random_signoff():
322      return random.choice([
323          YOUR_SENDER_NAME,
324          f"{YOUR_SENDER_NAME}\n{YOUR_SENDER_TITLE}, {YOUR_COMPANY_NAME}",
325          f"- {YOUR_SENDER_NAME}",
326      ])
327  
328  
329  # Revival email angles — rotated based on loss reason
330  REVIVAL_ANGLES = {
331      "timing": [
332          {
333              "subject": "{first}, checking back in",
334              "hook": "When we last talked, you mentioned the timing wasn't right. "
335                      "It's been {months} months. Figured I'd check in rather than assume.",
336          },
337          {
338              "subject": "been a while, {first}",
339              "hook": "It's been {months} months since we last connected on {company}. "
340                      "A lot has probably changed on both sides.",
341          },
342      ],
343      "competitor": [
344          {
345              "subject": "how's the current setup, {first}?",
346              "hook": "Last time, you went with another partner. Totally respect that. "
347                      "Curious how it's going and whether there's room to compare notes.",
348          },
349      ],
350      "budget": [
351          {
352              "subject": "new pricing options",
353              "hook": "Pricing was the sticking point last time. We've restructured since then. "
354                      "We now offer performance-based models where you pay for results.",
355          },
356      ],
357      "internal": [
358          {
359              "subject": "{first}, dust settled yet?",
360              "hook": "Last time, internal changes at {company} put things on hold. "
361                      "Wanted to see if the original initiative is back on the table.",
362          },
363      ],
364      "ghost": [
365          {
366              "subject": "{first}, one more try",
367              "hook": "We connected {months} months ago but lost touch. No hard feelings. "
368                      "Just wanted to resurface in case the need is still there.",
369          },
370      ],
371      "default": [
372          {
373              "subject": "quick update for {first}",
374              "hook": "We connected {months} months ago about growing {company}. "
375                      "A lot has changed on our end since then.",
376          },
377      ],
378  }
379  
380  
381  def _categorize_loss_reason(loss_reason):
382      """Map a free-text loss reason to a category for email angle selection."""
383      lr = (loss_reason or "").lower()
384      if any(w in lr for w in ["timing", "not ready", "circle back", "follow up"]):
385          return "timing"
386      if any(w in lr for w in ["competitor", "chose", "existing relationship"]):
387          return "competitor"
388      if any(w in lr for w in ["budget", "price", "pricing", "cost"]):
389          return "budget"
390      if any(w in lr for w in ["internal", "restructur", "reorg", "change"]):
391          return "internal"
392      if any(w in lr for w in ["ghost", "unresponsive", "no response"]):
393          return "ghost"
394      return "default"
395  
396  
397  def draft_revival_email(contact_name, company_name, deal_value, loss_reason,
398                           days_since_close, contact_title=""):
399      """Draft a personalized revival email based on loss reason category."""
400      first = contact_name.split()[0] if contact_name else "there"
401      months = days_since_close // 30
402      category = _categorize_loss_reason(loss_reason)
403  
404      angle = random.choice(REVIVAL_ANGLES.get(category, REVIVAL_ANGLES["default"]))
405      subject = angle["subject"].format(first=first, company=company_name, months=months)
406      hook = angle["hook"].format(first=first, company=company_name, months=months)
407  
408      cta = _random_cta()
409      signoff = _random_signoff()
410  
411      body = f"Hey {first},\n\n{hook}\n\n{YOUR_VALUE_PROP}\n\n{cta}\n\n{signoff}"
412  
413      return {"subject": subject, "body": body}
414  
415  
416  def draft_replacement_email(replacement_name, company_name, original_contact):
417      """Draft email to a replacement POC at the same company."""
418      first = replacement_name.split()[0] if replacement_name else "there"
419      orig_first = original_contact.split()[0] if original_contact else "your predecessor"
420      cta = _random_cta()
421      signoff = _random_signoff()
422  
423      return {
424          "subject": f"picking up where {orig_first} left off at {company_name}",
425          "body": (
426              f"Hey {first},\n\n"
427              f"We were in conversation with {original_contact} about growth for "
428              f"{company_name} before the team change.\n\n"
429              f"{YOUR_VALUE_PROP}\n\n"
430              f"{cta}\n\n{signoff}"
431          ),
432      }
433  
434  
435  def draft_champion_email(champion_name, new_company, new_title, old_company):
436      """Draft email to a champion who moved to a new company."""
437      first = champion_name.split()[0] if champion_name else "there"
438      cta = _random_cta()
439      signoff = _random_signoff()
440  
441      return {
442          "subject": f"congrats on the move, {first}",
443          "body": (
444              f"Hey {first},\n\n"
445              f"Saw you moved to {new_company}. Congrats on the {new_title} role.\n\n"
446              f"We had a great conversation when you were at {old_company}. "
447              f"Now that you're settling in, I'd love to show you what we can do "
448              f"for {new_company}.\n\n"
449              f"{cta}\n\n{signoff}"
450          ),
451      }
452  
453  
454  # ─── Main Pipeline ───────────────────────────────────────────────────────────
455  
456  def main():
457      parser = argparse.ArgumentParser(
458          description="Deal Resurrector v2 — Time Decay + POC Expansion + Champion Tracking"
459      )
460      parser.add_argument("--top", type=int, default=10, help="Number of top deals (default: 10)")
461      parser.add_argument("--min-score", type=int, default=40, help="Minimum composite score (default: 40)")
462      parser.add_argument("--min-deal-value", type=float, default=5000, help="Min deal value (default: 5000)")
463      parser.add_argument("--months", type=int, default=24, help="Look back N months (default: 24)")
464      parser.add_argument("--include-champion", action="store_true", help="Enable Layer 3: Follow the Champion")
465      parser.add_argument("--dry-run", action="store_true", help="Print results, don't save")
466      parser.add_argument("--skip-search", action="store_true", help="Skip web searches (faster)")
467      parser.add_argument("--add-exclusion", metavar="COMPANY", help="Add a company to exclusion list and exit")
468      args = parser.parse_args()
469  
470      if args.add_exclusion:
471          add_exclusion(args.add_exclusion)
472          return
473  
474      if not HUBSPOT_TOKEN:
475          print("❌ HUBSPOT_API_KEY environment variable not set.", file=sys.stderr)
476          print("   Set it: export HUBSPOT_API_KEY='your-token-here'", file=sys.stderr)
477          sys.exit(1)
478  
479      print("🔥 Deal Resurrector v2")
480      print(f"   Layers: Time Decay + POC Expansion"
481            f"{ ' + Champion Tracking' if args.include_champion else ''}")
482      print(f"   Top {args.top} | min score {args.min_score} | min value ${args.min_deal_value:,.0f}")
483      print()
484  
485      excluded_companies = load_exclusions()
486      if excluded_companies:
487          print(f"🚫 Exclusion list: {len(excluded_companies)} companies will be skipped")
488      print()
489  
490      client = HubSpotClient(HUBSPOT_TOKEN)
491  
492      # Step 1: Pull closed-lost deals
493      since = (datetime.now(timezone.utc) - timedelta(days=args.months * 30)).strftime("%Y-%m-%d")
494      print(f"📥 Fetching closed-lost deals since {since}…")
495      deals = client.search_closed_lost_deals(since)
496      print(f"   Found {len(deals)} closed-lost deals")
497  
498      # Filter by value
499      filtered = []
500      for d in deals:
501          amt = float(d["properties"].get("amount") or 0)
502          if amt >= args.min_deal_value:
503              filtered.append(d)
504      print(f"   {len(filtered)} deals above ${args.min_deal_value:,.0f}")
505  
506      # Filter exclusions
507      if excluded_companies:
508          pre = len(filtered)
509          filtered = [
510              d for d in filtered
511              if d["properties"].get("dealname", "").lower() not in excluded_companies
512              and not any(excl in d["properties"].get("dealname", "").lower()
513                          for excl in excluded_companies)
514          ]
515          excluded_count = pre - len(filtered)
516          if excluded_count:
517              print(f"   🚫 {excluded_count} deal(s) excluded")
518  
519      if not filtered:
520          print("No deals to process. Exiting.")
521          return
522  
523      max_value = max(float(d["properties"].get("amount") or 0) for d in filtered)
524      now = datetime.now(timezone.utc)
525  
526      # Step 2: Score and enrich
527      results = []
528      for i, deal in enumerate(filtered):
529          dp = deal["properties"]
530          deal_id = deal["id"]
531          deal_name = dp.get("dealname", "Unknown")
532          amount = float(dp.get("amount") or 0)
533          loss_reason = dp.get("closed_lost_reason") or "Unknown"
534          close_dt = parse_ts(dp.get("closedate"))
535          days_since = (now - close_dt).days if close_dt else 999
536  
537          print(f"  [{i+1}/{len(filtered)}] {deal_name} (${amount:,.0f}, {days_since}d ago)…",
538                end="", flush=True)
539  
540          # Get primary contact
541          assocs = client.get_deal_associations(deal_id, "contacts")
542          contact_name = "Unknown"
543          contact_email = ""
544          contact_title = ""
545          company_name = deal_name
546          contact_data = None
547  
548          if assocs:
549              cid = str(assocs[0].get("toObjectId"))
550              contact_data = client.get_contact(cid)
551              if contact_data:
552                  cp = contact_data.get("properties", {})
553                  fn = cp.get("firstname") or ""
554                  ln = cp.get("lastname") or ""
555                  contact_name = f"{fn} {ln}".strip() or "Unknown"
556                  contact_email = cp.get("email", "")
557                  contact_title = cp.get("jobtitle", "")
558                  company_name = cp.get("company") or company_name
559  
560                  company_data = client.get_company_for_contact(cid)
561                  if company_data:
562                      company_name = company_data.get("properties", {}).get("name") or company_name
563  
564          # Detect engagement triggers
565          triggers = []
566          if contact_data and contact_data.get("properties"):
567              cp = contact_data["properties"]
568              if parse_ts(cp.get("hs_email_last_open_date")):
569                  if (now - parse_ts(cp.get("hs_email_last_open_date"))).days < 60:
570                      triggers.append("recent_email_open")
571              if parse_ts(cp.get("hs_analytics_last_visit_timestamp")):
572                  if (now - parse_ts(cp.get("hs_analytics_last_visit_timestamp"))).days < 90:
573                      triggers.append("recent_site_visit")
574  
575          has_trigger = len(triggers) > 0
576  
577          # Layer 1: Time Decay Score
578          decay = compute_time_decay_score(days_since, amount, max_value, loss_reason, has_trigger)
579          composite = decay["composite_score"]
580  
581          if composite < args.min_score:
582              print(f" → score {composite} (skip)")
583              continue
584  
585          print(f" → score {composite}")
586  
587          # Generate revival email
588          original_email = draft_revival_email(
589              contact_name, company_name, amount, loss_reason, days_since, contact_title
590          )
591  
592          # Determine revival type
593          revival_type = "trigger" if has_trigger else "time_decay"
594  
595          entry = {
596              "deal_id": deal_id,
597              "company": company_name,
598              "original_contact": {
599                  "name": contact_name,
600                  "email": contact_email,
601                  "title": contact_title,
602              },
603              "deal_value": amount,
604              "days_since_close": days_since,
605              "close_date": dp.get("closedate", ""),
606              "loss_reason": loss_reason,
607              "pipeline": CLOSED_LOST_STAGES.get(dp.get("dealstage"), "Unknown"),
608              "time_decay_score": decay["time_decay_weight"],
609              "composite_score": composite,
610              "poc_status": "unknown",
611              "triggers": triggers,
612              "revival_emails": {
613                  "original": original_email,
614                  "replacement": None,
615                  "champion": None,
616              },
617              "revival_type": revival_type,
618          }
619          results.append(entry)
620  
621      # Sort by composite score
622      results.sort(key=lambda x: x["composite_score"], reverse=True)
623      top_results = results[:args.top]
624  
625      # Output
626      output = {
627          "generated_at": now.isoformat(),
628          "version": "v2",
629          "total_closed_lost": len(deals),
630          "above_min_value": len(filtered),
631          "scored_above_threshold": len(results),
632          "returned": len(top_results),
633          "parameters": {
634              "months": args.months,
635              "min_score": args.min_score,
636              "min_deal_value": args.min_deal_value,
637              "top": args.top,
638              "include_champion": args.include_champion,
639          },
640          "deals": top_results,
641      }
642  
643      # Print summary
644      print(f"\n{'='*70}")
645      print(f"🔥 TOP {len(top_results)} REVIVAL OPPORTUNITIES")
646      print(f"{'='*70}")
647      for i, d in enumerate(top_results, 1):
648          print(f"\n#{i} | Score: {d['composite_score']}/100 | {d['company']}")
649          print(f"   Deal Value: ${d['deal_value']:,.0f} | Days Since Close: {d['days_since_close']}")
650          print(f"   Contact: {d['original_contact']['name']} ({d['original_contact']['email']})")
651          print(f"   Title: {d['original_contact']['title']}")
652          print(f"   Loss Reason: {d['loss_reason']}")
653          print(f"   Revival Type: {d['revival_type']}")
654          print(f"   Triggers: {', '.join(d['triggers']) or 'none'}")
655  
656      if not args.dry_run:
657          DATA_DIR.mkdir(parents=True, exist_ok=True)
658          OUTPUT_FILE.write_text(json.dumps(output, indent=2, default=str))
659          print(f"\n📁 Saved to {OUTPUT_FILE}")
660      else:
661          print(f"\n🏃 Dry run — not saving.")
662  
663      print(f"\n{'='*70}")
664      print(f"✅ Deal Resurrector v2 complete. {len(top_results)} deals ready for review.")
665  
666  
667  if __name__ == "__main__":
668      main()