"""Cleanup Logs Management Command

This command performs cleanup of old log files, audit records, and temporary data:
- Application log files
- Audit log records
- System health records
- Temporary files and cache
- Old session data
- Expired notifications
- Analytics data beyond retention period

Usage:
    python manage.py cleanup_logs [options]
    
Options:
    --days: Number of days to retain (default: 30)
    --dry-run: Show what would be deleted without actually deleting
    --verbose, -v: Verbose output with detailed information
    --force: Force cleanup without confirmation
    --type: Specific cleanup type (logs, audit, health, sessions, temp)
    --size-limit: Maximum size threshold for cleanup
    --compress: Compress old files instead of deleting
    --backup: Create backup before cleanup
    
Examples:
    python manage.py cleanup_logs --days 30
    python manage.py cleanup_logs --dry-run --verbose
    python manage.py cleanup_logs --type audit --days 90
    python manage.py cleanup_logs --compress --backup
    python manage.py cleanup_logs --force --days 7
"""

import os
import gzip
import shutil
import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import Dict, List, Tuple, Optional

from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django.utils import timezone
from django.db import transaction
from django.contrib.sessions.models import Session

from apps.core.models import AuditLog, SystemHealth, ApplicationMetrics
from apps.core.logging import get_logger
from apps.accounts.models import LoginAttempt, UserSession
from apps.analytics.models import AnalyticsData


class Command(BaseCommand):
    """Cleanup Logs Command"""
    
    help = 'Clean up old log files, audit records, and temporary data'
    
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.logger = get_logger('core.management.cleanup_logs')
        self.stats = {
            'files_deleted': 0,
            'files_compressed': 0,
            'files_backed_up': 0,
            'records_deleted': 0,
            'space_freed': 0,
            'errors': 0,
        }
        self.dry_run = False
        self.verbose = False
    
    def add_arguments(self, parser):
        """Add command arguments"""
        parser.add_argument(
            '--days',
            type=int,
            default=30,
            help='Number of days to retain (default: 30)'
        )
        parser.add_argument(
            '--dry-run',
            action='store_true',
            help='Show what would be deleted without actually deleting'
        )
        parser.add_argument(
            '--verbose', '-v',
            action='store_true',
            help='Verbose output with detailed information'
        )
        parser.add_argument(
            '--force',
            action='store_true',
            help='Force cleanup without confirmation'
        )
        parser.add_argument(
            '--type',
            choices=['logs', 'audit', 'health', 'sessions', 'temp', 'analytics'],
            help='Specific cleanup type'
        )
        parser.add_argument(
            '--size-limit',
            type=str,
            help='Maximum size threshold for cleanup (e.g., 100MB, 1GB)'
        )
        parser.add_argument(
            '--compress',
            action='store_true',
            help='Compress old files instead of deleting'
        )
        parser.add_argument(
            '--backup',
            action='store_true',
            help='Create backup before cleanup'
        )
        parser.add_argument(
            '--backup-dir',
            type=str,
            help='Directory for backups (default: /var/backups/adtlas)'
        )
    
    def handle(self, *args, **options):
        """Main command handler"""
        try:
            self.dry_run = options.get('dry_run', False)
            self.verbose = options.get('verbose', False)
            
            days = options.get('days', 30)
            force = options.get('force', False)
            cleanup_type = options.get('type')
            size_limit = self.parse_size_limit(options.get('size_limit'))
            compress = options.get('compress', False)
            backup = options.get('backup', False)
            backup_dir = options.get('backup_dir', '/var/backups/adtlas')
            
            self.logger.info(f"Starting cleanup process (days={days}, dry_run={self.dry_run})")
            
            if self.dry_run:
                self.stdout.write("DRY RUN MODE - No files will be deleted")
            
            # Calculate cutoff date
            cutoff_date = timezone.now() - timedelta(days=days)
            
            # Confirm operation if not forced and not dry run
            if not force and not self.dry_run:
                if not self.confirm_cleanup(days, cleanup_type):
                    self.stdout.write("Cleanup cancelled by user")
                    return
            
            # Create backup directory if needed
            if backup:
                self.create_backup_directory(backup_dir)
            
            # Perform cleanup based on type
            if cleanup_type:
                self.cleanup_specific_type(
                    cleanup_type, cutoff_date, size_limit, compress, backup, backup_dir
                )
            else:
                self.cleanup_all_types(
                    cutoff_date, size_limit, compress, backup, backup_dir
                )
            
            # Output summary
            self.output_summary()
            
            self.logger.info("Cleanup process completed successfully")
            
        except Exception as e:
            self.logger.error(f"Cleanup process failed: {str(e)}")
            raise CommandError(f"Cleanup failed: {str(e)}")
    
    def confirm_cleanup(self, days, cleanup_type=None):
        """Confirm cleanup operation with user"""
        if cleanup_type:
            message = f"This will clean up {cleanup_type} data older than {days} days."
        else:
            message = f"This will clean up all data older than {days} days."
        
        message += "\nThis operation cannot be undone. Continue? [y/N]: "
        
        try:
            response = input(message)
            return response.lower() in ['y', 'yes']
        except (EOFError, KeyboardInterrupt):
            return False
    
    def cleanup_all_types(self, cutoff_date, size_limit=None, compress=False, 
                         backup=False, backup_dir=None):
        """Cleanup all types of data"""
        cleanup_types = ['logs', 'audit', 'health', 'sessions', 'temp', 'analytics']
        
        for cleanup_type in cleanup_types:
            try:
                self.cleanup_specific_type(
                    cleanup_type, cutoff_date, size_limit, compress, backup, backup_dir
                )
            except Exception as e:
                self.logger.error(f"Failed to cleanup {cleanup_type}: {str(e)}")
                self.stats['errors'] += 1
    
    def cleanup_specific_type(self, cleanup_type, cutoff_date, size_limit=None,
                            compress=False, backup=False, backup_dir=None):
        """Cleanup specific type of data"""
        if self.verbose:
            self.stdout.write(f"\nCleaning up {cleanup_type} data...")
        
        if cleanup_type == 'logs':
            self.cleanup_log_files(cutoff_date, size_limit, compress, backup, backup_dir)
        elif cleanup_type == 'audit':
            self.cleanup_audit_records(cutoff_date)
        elif cleanup_type == 'health':
            self.cleanup_health_records(cutoff_date)
        elif cleanup_type == 'sessions':
            self.cleanup_session_data(cutoff_date)
        elif cleanup_type == 'temp':
            self.cleanup_temp_files(cutoff_date, size_limit)
        elif cleanup_type == 'analytics':
            self.cleanup_analytics_data(cutoff_date)
    
    def cleanup_log_files(self, cutoff_date, size_limit=None, compress=False,
                         backup=False, backup_dir=None):
        """Cleanup log files"""
        log_directories = [
            getattr(settings, 'LOG_DIR', '/var/log/adtlas'),
            os.path.join(settings.BASE_DIR, 'logs'),
            '/tmp/adtlas_logs',
        ]
        
        for log_dir in log_directories:
            if os.path.exists(log_dir):
                self.cleanup_directory(
                    log_dir, cutoff_date, '*.log', size_limit, 
                    compress, backup, backup_dir
                )
    
    def cleanup_audit_records(self, cutoff_date):
        """Cleanup audit log records from database"""
        try:
            with transaction.atomic():
                old_records = AuditLog.objects.filter(
                    timestamp__lt=cutoff_date
                )
                
                count = old_records.count()
                
                if self.verbose:
                    self.stdout.write(f"Found {count} audit records to delete")
                
                if not self.dry_run and count > 0:
                    old_records.delete()
                    self.stats['records_deleted'] += count
                    
                    if self.verbose:
                        self.stdout.write(f"Deleted {count} audit records")
                
        except Exception as e:
            self.logger.error(f"Failed to cleanup audit records: {str(e)}")
            self.stats['errors'] += 1
    
    def cleanup_health_records(self, cutoff_date):
        """Cleanup system health records from database"""
        try:
            with transaction.atomic():
                old_records = SystemHealth.objects.filter(
                    timestamp__lt=cutoff_date
                )
                
                count = old_records.count()
                
                if self.verbose:
                    self.stdout.write(f"Found {count} health records to delete")
                
                if not self.dry_run and count > 0:
                    old_records.delete()
                    self.stats['records_deleted'] += count
                    
                    if self.verbose:
                        self.stdout.write(f"Deleted {count} health records")
                
        except Exception as e:
            self.logger.error(f"Failed to cleanup health records: {str(e)}")
            self.stats['errors'] += 1
    
    def cleanup_session_data(self, cutoff_date):
        """Cleanup expired session data"""
        try:
            with transaction.atomic():
                # Cleanup Django sessions
                expired_sessions = Session.objects.filter(
                    expire_date__lt=cutoff_date
                )
                
                count = expired_sessions.count()
                
                if self.verbose:
                    self.stdout.write(f"Found {count} expired sessions to delete")
                
                if not self.dry_run and count > 0:
                    expired_sessions.delete()
                    self.stats['records_deleted'] += count
                
                # Cleanup user sessions
                old_user_sessions = UserSession.objects.filter(
                    created_at__lt=cutoff_date,
                    is_active=False
                )
                
                user_session_count = old_user_sessions.count()
                
                if self.verbose:
                    self.stdout.write(f"Found {user_session_count} old user sessions to delete")
                
                if not self.dry_run and user_session_count > 0:
                    old_user_sessions.delete()
                    self.stats['records_deleted'] += user_session_count
                
                # Cleanup old login attempts
                old_login_attempts = LoginAttempt.objects.filter(
                    timestamp__lt=cutoff_date
                )
                
                login_attempt_count = old_login_attempts.count()
                
                if self.verbose:
                    self.stdout.write(f"Found {login_attempt_count} old login attempts to delete")
                
                if not self.dry_run and login_attempt_count > 0:
                    old_login_attempts.delete()
                    self.stats['records_deleted'] += login_attempt_count
                
                total_count = count + user_session_count + login_attempt_count
                
                if self.verbose and total_count > 0:
                    self.stdout.write(f"Deleted {total_count} session-related records")
                
        except Exception as e:
            self.logger.error(f"Failed to cleanup session data: {str(e)}")
            self.stats['errors'] += 1
    
    def cleanup_temp_files(self, cutoff_date, size_limit=None):
        """Cleanup temporary files"""
        temp_directories = [
            '/tmp/adtlas',
            os.path.join(settings.BASE_DIR, 'tmp'),
            os.path.join(settings.MEDIA_ROOT, 'temp'),
        ]
        
        for temp_dir in temp_directories:
            if os.path.exists(temp_dir):
                self.cleanup_directory(
                    temp_dir, cutoff_date, '*', size_limit
                )
    
    def cleanup_analytics_data(self, cutoff_date):
        """Cleanup old analytics data"""
        try:
            with transaction.atomic():
                # Get retention period from settings
                retention_days = getattr(settings, 'ANALYTICS_RETENTION_DAYS', 365)
                analytics_cutoff = timezone.now() - timedelta(days=retention_days)
                
                # Use the more restrictive cutoff date
                actual_cutoff = min(cutoff_date, analytics_cutoff)
                
                old_analytics = AnalyticsData.objects.filter(
                    timestamp__lt=actual_cutoff
                )
                
                count = old_analytics.count()
                
                if self.verbose:
                    self.stdout.write(f"Found {count} analytics records to delete")
                
                if not self.dry_run and count > 0:
                    old_analytics.delete()
                    self.stats['records_deleted'] += count
                    
                    if self.verbose:
                        self.stdout.write(f"Deleted {count} analytics records")
                
        except Exception as e:
            self.logger.error(f"Failed to cleanup analytics data: {str(e)}")
            self.stats['errors'] += 1
    
    def cleanup_directory(self, directory, cutoff_date, pattern='*', 
                         size_limit=None, compress=False, backup=False, backup_dir=None):
        """Cleanup files in a directory"""
        try:
            directory_path = Path(directory)
            
            if not directory_path.exists():
                return
            
            # Find files matching pattern and older than cutoff date
            files_to_process = []
            
            for file_path in directory_path.rglob(pattern):
                if file_path.is_file():
                    file_mtime = datetime.fromtimestamp(
                        file_path.stat().st_mtime, tz=timezone.get_current_timezone()
                    )
                    
                    if file_mtime < cutoff_date:
                        file_size = file_path.stat().st_size
                        
                        # Check size limit if specified
                        if size_limit and file_size < size_limit:
                            continue
                        
                        files_to_process.append((file_path, file_size))
            
            if self.verbose:
                self.stdout.write(
                    f"Found {len(files_to_process)} files to process in {directory}"
                )
            
            # Process files
            for file_path, file_size in files_to_process:
                try:
                    if backup and backup_dir:
                        self.backup_file(file_path, backup_dir)
                    
                    if compress and not self.dry_run:
                        self.compress_file(file_path)
                        self.stats['files_compressed'] += 1
                    elif not self.dry_run:
                        file_path.unlink()
                        self.stats['files_deleted'] += 1
                    
                    self.stats['space_freed'] += file_size
                    
                    if self.verbose:
                        action = "compressed" if compress else "deleted"
                        self.stdout.write(f"  {action}: {file_path}")
                
                except Exception as e:
                    self.logger.error(f"Failed to process file {file_path}: {str(e)}")
                    self.stats['errors'] += 1
        
        except Exception as e:
            self.logger.error(f"Failed to cleanup directory {directory}: {str(e)}")
            self.stats['errors'] += 1
    
    def backup_file(self, file_path, backup_dir):
        """Create backup of file before deletion"""
        try:
            backup_path = Path(backup_dir)
            backup_path.mkdir(parents=True, exist_ok=True)
            
            # Create backup with timestamp
            timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
            backup_file_path = backup_path / f"{file_path.name}_{timestamp}"
            
            if not self.dry_run:
                shutil.copy2(file_path, backup_file_path)
                self.stats['files_backed_up'] += 1
            
            if self.verbose:
                self.stdout.write(f"  backed up: {file_path} -> {backup_file_path}")
        
        except Exception as e:
            self.logger.error(f"Failed to backup file {file_path}: {str(e)}")
            self.stats['errors'] += 1
    
    def compress_file(self, file_path):
        """Compress file using gzip"""
        try:
            compressed_path = Path(str(file_path) + '.gz')
            
            with open(file_path, 'rb') as f_in:
                with gzip.open(compressed_path, 'wb') as f_out:
                    shutil.copyfileobj(f_in, f_out)
            
            # Remove original file after compression
            file_path.unlink()
            
            if self.verbose:
                self.stdout.write(f"  compressed: {file_path} -> {compressed_path}")
        
        except Exception as e:
            self.logger.error(f"Failed to compress file {file_path}: {str(e)}")
            self.stats['errors'] += 1
    
    def create_backup_directory(self, backup_dir):
        """Create backup directory if it doesn't exist"""
        try:
            backup_path = Path(backup_dir)
            backup_path.mkdir(parents=True, exist_ok=True)
            
            if self.verbose:
                self.stdout.write(f"Backup directory: {backup_dir}")
        
        except Exception as e:
            self.logger.error(f"Failed to create backup directory {backup_dir}: {str(e)}")
            raise CommandError(f"Cannot create backup directory: {str(e)}")
    
    def parse_size_limit(self, size_str):
        """Parse size limit string (e.g., '100MB', '1GB')"""
        if not size_str:
            return None
        
        size_str = size_str.upper()
        
        try:
            if size_str.endswith('KB'):
                return int(size_str[:-2]) * 1024
            elif size_str.endswith('MB'):
                return int(size_str[:-2]) * 1024 * 1024
            elif size_str.endswith('GB'):
                return int(size_str[:-2]) * 1024 * 1024 * 1024
            else:
                return int(size_str)
        except ValueError:
            self.logger.warning(f"Invalid size format: {size_str}")
            return None
    
    def format_size(self, size_bytes):
        """Format size in bytes to human readable format"""
        if size_bytes < 1024:
            return f"{size_bytes} B"
        elif size_bytes < 1024 * 1024:
            return f"{size_bytes / 1024:.1f} KB"
        elif size_bytes < 1024 * 1024 * 1024:
            return f"{size_bytes / (1024 * 1024):.1f} MB"
        else:
            return f"{size_bytes / (1024 * 1024 * 1024):.1f} GB"
    
    def output_summary(self):
        """Output cleanup summary"""
        self.stdout.write("\n" + "=" * 50)
        self.stdout.write("CLEANUP SUMMARY")
        self.stdout.write("=" * 50)
        
        if self.dry_run:
            self.stdout.write("DRY RUN - No actual changes made")
        
        self.stdout.write(f"Files deleted: {self.stats['files_deleted']}")
        self.stdout.write(f"Files compressed: {self.stats['files_compressed']}")
        self.stdout.write(f"Files backed up: {self.stats['files_backed_up']}")
        self.stdout.write(f"Database records deleted: {self.stats['records_deleted']}")
        self.stdout.write(f"Space freed: {self.format_size(self.stats['space_freed'])}")
        
        if self.stats['errors'] > 0:
            self.stdout.write(f"Errors encountered: {self.stats['errors']}")
        
        self.stdout.write("=" * 50 + "\n")