#!/usr/bin/env python3
"""
Omi Morning Briefing Integration

Fetches yesterday's insights from Omi BLE pipeline and generates
a briefing summary for Tony's morning review.

Usage:
    python3 omi_briefing_integration.py --date 2026-03-04
    python3 omi_briefing_integration.py --yesterday
    python3 omi_briefing_integration.py --latest
"""

import json
import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Optional

logger = logging.getLogger(__name__)
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s [%(levelname)s] %(message)s'
)

WORKSPACE = Path.home() / ".openclaw/workspace/projects/active/omi-direct-pipeline"
INSIGHTS_DIR = WORKSPACE / "omi-data/insights"


class OmiBriefingGenerator:
    """Generate briefing from Omi insights."""
    
    def __init__(self):
        self.insights = []
    
    def load_insights(self, date_str: Optional[str] = None) -> List[Dict]:
        """
        Load insights for specified date.
        
        Args:
            date_str: YYYY-MM-DD format (None = yesterday)
        """
        if date_str is None:
            # Yesterday
            date_str = (datetime.now() - timedelta(days=1)).strftime("%Y-%m-%d")
        
        logger.info(f"Loading insights for {date_str}...")
        
        if not INSIGHTS_DIR.exists():
            logger.warning("Insights directory does not exist")
            return []
        
        # Load insights file for the date
        insights_file = INSIGHTS_DIR / f"insights-{date_str}.json"
        
        if not insights_file.exists():
            logger.warning(f"No insights found for {date_str}")
            return []
        
        try:
            with open(insights_file) as f:
                self.insights = json.load(f)
            logger.info(f"Loaded {len(self.insights)} insights")
            return self.insights
            
        except Exception as e:
            logger.error(f"Error loading insights: {e}")
            return []
    
    def generate_summary(self) -> Dict:
        """Generate summary from loaded insights."""
        if not self.insights:
            return {
                "timestamp": datetime.now().isoformat(),
                "status": "no_data",
                "message": "No insights available for this period"
            }
        
        summary = {
            "timestamp": datetime.now().isoformat(),
            "date": self.insights[0].get("timestamp", "")[:10] if self.insights else "",
            "total_transcripts": len(self.insights),
            "total_duration_seconds": sum(i.get("duration_seconds", 0) for i in self.insights),
            "transcripts_with_actions": [],
            "all_topics": [],
            "all_action_items": [],
            "all_commitments": [],
            "summary_text": ""
        }
        
        # Aggregate data
        topics = set()
        action_items = []
        commitments = []
        
        for insight in self.insights:
            # Topics
            if insight.get("topics"):
                topics.update(insight["topics"])
            
            # Action items
            if insight.get("action_items"):
                action_items.extend(insight["action_items"])
                summary["transcripts_with_actions"].append({
                    "id": insight.get("id"),
                    "timestamp": insight.get("timestamp"),
                    "action_items": insight.get("action_items")
                })
            
            # Commitments
            if insight.get("commitments"):
                commitments.extend(insight["commitments"])
        
        summary["all_topics"] = sorted(list(topics))
        summary["all_action_items"] = action_items
        summary["all_commitments"] = list(set(commitments))
        
        # Build text summary
        parts = []
        parts.append(f"📝 Omi Brief for {summary['date']}")
        parts.append(f"Total transcripts: {summary['total_transcripts']}")
        parts.append(f"Total duration: {summary['total_duration_seconds']} seconds ({summary['total_duration_seconds'] // 60} minutes)")
        
        if summary["all_topics"]:
            parts.append(f"\n🏷️ Topics discussed ({len(summary['all_topics'])}): {', '.join(summary['all_topics'][:10])}")
        
        if summary["transcripts_with_actions"]:
            parts.append(f"\n⚠️ Action items found in {len(summary['transcripts_with_actions'])} transcript(s)")
            for item in summary["transcripts_with_actions"][:5]:
                parts.append(f"  • {item.get('id', 'unknown')}")
        
        if summary["all_commitments"]:
            parts.append(f"\n✅ Commitments made: {', '.join(set(summary['all_commitments']))}")
        
        summary["summary_text"] = "\n".join(parts)
        
        return summary
    
    def get_latest_insights(self, max_days: int = 7) -> List[Dict]:
        """Get most recent insights from last N days."""
        all_insights = []
        
        for i in range(max_days):
            date = datetime.now() - timedelta(days=i)
            date_str = date.strftime("%Y-%m-%d")
            
            insights_file = INSIGHTS_DIR / f"insights-{date_str}.json"
            
            if insights_file.exists():
                try:
                    with open(insights_file) as f:
                        data = json.load(f)
                        if isinstance(data, list):
                            all_insights.extend(data)
                except Exception as e:
                    logger.warning(f"Could not load {date_str}: {e}")
        
        return all_insights
    
    def to_json(self) -> str:
        """Export summary as JSON."""
        summary = self.generate_summary()
        return json.dumps(summary, indent=2)
    
    def to_markdown(self) -> str:
        """Export summary as Markdown."""
        summary = self.generate_summary()
        
        lines = []
        lines.append(f"# Omi Morning Briefing - {summary['date']}")
        lines.append("")
        lines.append(f"**Generated:** {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
        lines.append("")
        
        lines.append("## Summary")
        lines.append(f"- **Transcripts:** {summary['total_transcripts']}")
        lines.append(f"- **Duration:** {summary['total_duration_seconds']} sec ({summary['total_duration_seconds'] // 60} min)")
        lines.append("")
        
        if summary["all_topics"]:
            lines.append("## Topics Discussed")
            for topic in summary["all_topics"]:
                lines.append(f"- {topic}")
            lines.append("")
        
        if summary["transcripts_with_actions"]:
            lines.append("## ⚠️ Action Items Detected")
            for item in summary["transcripts_with_actions"]:
                lines.append(f"### {item['id']}")
                lines.append(f"*{item['timestamp'][:10]}*")
                for action in item.get("action_items", []):
                    if isinstance(action, dict):
                        lines.append(f"- {action.get('phrase', 'Unknown action')}: {action.get('note', '')}")
                    else:
                        lines.append(f"- {action}")
                lines.append("")
        
        if summary["all_commitments"]:
            lines.append("## ✅ Commitments")
            for commitment in summary["all_commitments"]:
                lines.append(f"- {commitment}")
            lines.append("")
        
        return "\n".join(lines)


def main():
    import argparse
    
    parser = argparse.ArgumentParser(description="Omi Morning Briefing Generator")
    parser.add_argument("--date", help="Date in YYYY-MM-DD format (default: yesterday)")
    parser.add_argument("--yesterday", action="store_true", help="Use yesterday's data")
    parser.add_argument("--latest", action="store_true", help="Use latest available data")
    parser.add_argument("--format", choices=["json", "markdown", "text"], default="markdown",
                        help="Output format")
    parser.add_argument("--output", help="Output file (print to stdout if not specified)")
    
    args = parser.parse_args()
    
    generator = OmiBriefingGenerator()
    
    if args.latest:
        generator.insights = generator.get_latest_insights()
    elif args.yesterday:
        generator.load_insights()
    else:
        generator.load_insights(args.date)
    
    # Generate output
    if args.format == "json":
        output = generator.to_json()
    elif args.format == "markdown":
        output = generator.to_markdown()
    else:
        output = generator.generate_summary()["summary_text"]
    
    # Write or print
    if args.output:
        with open(args.output, 'w') as f:
            f.write(output)
        print(f"✅ Saved to {args.output}")
    else:
        print(output)


if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt:
        logger.info("Interrupted by user")
    except Exception as e:
        logger.error(f"Fatal error: {e}")
