"""
Load testing for activities app.
"""
import time
import random
from django.test import TestCase, TransactionTestCase
from django.contrib.auth import get_user_model
from django.db import transaction
from django.utils import timezone
from django.core.management import call_command

from apps.activities.models import Activity, ActivityCategory
from apps.activities.utils import log_activity

User = get_user_model()


class ActivityLoadTest(TransactionTestCase):
    """Load testing for activity logging."""
    
    def setUp(self):
        """Set up test data."""
        self.users = []
        for i in range(10):
            user = User.objects.create_user(
                email=f'user{i}@example.com',
                username=f'user{i}',
                password='testpass123'
            )
            self.users.append(user)
        
        self.categories = []
        category_data = [
            ('User', 'user', '#007bff', 'fas fa-user'),
            ('System', 'system', '#6c757d', 'fas fa-cog'),
            ('Security', 'security', '#dc3545', 'fas fa-shield-alt'),
            ('Performance', 'performance', '#28a745', 'fas fa-tachometer-alt'),
            ('API', 'api', '#ffc107', 'fas fa-code'),
        ]
        
        for name, code, color, icon in category_data:
            category = ActivityCategory.objects.create(
                name=name,
                code=code,
                description=f'{name} activities',
                color=color,
                icon=icon,
                is_system=True
            )
            self.categories.append(category)
    
    def test_high_volume_logging(self):
        """Test logging high volumes of activities."""
        start_time = time.time()
        
        # Log 1000 activities
        for i in range(1000):
            user = random.choice(self.users)
            category = random.choice(self.categories)
            
            log_activity(
                user=user,
                action=f'LOAD_TEST_{i}',
                category=category,
                description=f'Load test activity {i}',
                metadata={
                    'index': i,
                    'batch': i // 100,
                    'random': random.randint(1, 100)
                }
            )
        
        end_time = time.time()
        duration = end_time - start_time
        
        # Log performance metrics
        print(f"Logged 1000 activities in {duration:.2f} seconds")
        print(f"Rate: {1000/duration:.2f} activities/second")
        
        # Verify all activities were created
        self.assertEqual(Activity.objects.count(), 1000)
        
        # Should complete within reasonable time
        self.assertLess(duration, 30.0, f"High volume logging took {duration:.2f} seconds")
    
    def test_bulk_create_performance(self):
        """Test bulk creation performance."""
        start_time = time.time()
        
        # Create activities in bulk
        activities = []
        for i in range(5000):
            user = random.choice(self.users)
            category = random.choice(self.categories)
            
            activities.append(Activity(
                user=user,
                action=f'BULK_CREATE_{i}',
                category=category,
                description=f'Bulk create test {i}',
                metadata={'index': i, 'batch': i // 500},
                created_at=timezone.now()
            ))
        
        Activity.objects.bulk_create(activities, batch_size=100)
        
        end_time = time.time()
        duration = end_time - start_time
        
        # Log performance metrics
        print(f"Bulk created 5000 activities in {duration:.2f} seconds")
        print(f"Rate: {5000/duration:.2f} activities/second")
        
        # Verify all activities were created
        self.assertEqual(Activity.objects.count(), 5000)
        
        # Should be much faster than individual creates
        self.assertLess(duration, 10.0, f"Bulk creation took {duration:.2f} seconds")
    
    def test_concurrent_read_write(self):
        """Test concurrent read and write operations."""
        import threading
        
        # Create initial data
        for i in range(100):
            user = random.choice(self.users)
            category = random.choice(self.categories)
            log_activity(
                user=user,
                action=f'INITIAL_{i}',
                category=category
            )
        
        results = {'writes': 0, 'reads': 0, 'errors': 0}
        
        def write_activities():
            """Write activities in a separate thread."""
            try:
                for i in range(50):
                    user = random.choice(self.users)
                    category = random.choice(self.categories)
                    log_activity(
                        user=user,
                        action=f'CONCURRENT_WRITE_{i}',
                        category=category
                    )
                    results['writes'] += 1
            except Exception as e:
                results['errors'] += 1
                print(f"Write error: {e}")
        
        def read_activities():
            """Read activities in a separate thread."""
            try:
                for i in range(50):
                    # Various read operations
                    Activity.objects.count()
                    Activity.objects.order_by('-created_at')[:10]
                    Activity.objects.filter(user=random.choice(self.users))[:5]
                    results['reads'] += 1
            except Exception as e:
                results['errors'] += 1
                print(f"Read error: {e}")
        
        # Create threads
        write_threads = [threading.Thread(target=write_activities) for _ in range(3)]
        read_threads = [threading.Thread(target=read_activities) for _ in range(5)]
        
        start_time = time.time()
        
        # Start all threads
        for thread in write_threads + read_threads:
            thread.start()
        
        # Wait for all threads to complete
        for thread in write_threads + read_threads:
            thread.join()
        
        end_time = time.time()
        duration = end_time - start_time
        
        # Log results
        print(f"Concurrent operations completed in {duration:.2f} seconds")
        print(f"Writes: {results['writes']}, Reads: {results['reads']}, Errors: {results['errors']}")
        
        # Should complete without errors
        self.assertEqual(results['errors'], 0, "Concurrent operations had errors")
        self.assertGreater(results['writes'], 0, "No writes completed")
        self.assertGreater(results['reads'], 0, "No reads completed")
    
    def test_memory_leak_detection(self):
        """Test for memory leaks during sustained activity logging."""
        import gc
        
        # Force garbage collection
        gc.collect()
        initial_objects = len(gc.get_objects())
        
        # Log activities in batches
        for batch in range(10):
            for i in range(100):
                user = random.choice(self.users)
                category = random.choice(self.categories)
                log_activity(
                    user=user,
                    action=f'MEMORY_LEAK_TEST_{batch}_{i}',
                    category=category,
                    metadata={'batch': batch, 'index': i}
                )
            
            # Force garbage collection after each batch
            gc.collect()
        
        # Check final object count
        final_objects = len(gc.get_objects())
        object_increase = final_objects - initial_objects
        
        print(f"Object count increased by {object_increase} after 1000 activities")
        
        # Should not have excessive object growth
        self.assertLess(object_increase, 2000, 
                       f"Potential memory leak: {object_increase} objects created")
    
    def test_database_connection_pooling(self):
        """Test database connection usage under load."""
        from django.db import connection
        
        # Reset connection queries
        connection.queries_log.clear()
        
        # Log many activities
        for i in range(200):
            user = random.choice(self.users)
            category = random.choice(self.categories)
            log_activity(
                user=user,
                action=f'CONNECTION_TEST_{i}',
                category=category
            )
        
        # Check query count
        query_count = len(connection.queries)
        
        print(f"Used {query_count} queries for 200 activities")
        print(f"Average: {query_count/200:.2f} queries per activity")
        
        # Should be efficient with queries
        self.assertLess(query_count, 400, f"Too many queries: {query_count}")
    
    def test_large_metadata_performance(self):
        """Test performance with large metadata objects."""
        start_time = time.time()
        
        # Create activities with large metadata
        for i in range(100):
            user = random.choice(self.users)
            category = random.choice(self.categories)
            
            # Create large metadata object
            large_metadata = {
                'description': 'A' * 1000,  # 1KB string
                'tags': ['tag' + str(j) for j in range(100)],  # 100 tags
                'nested': {
                    'level1': {
                        'level2': {
                            'data': list(range(50))
                        }
                    }
                },
                'index': i
            }
            
            log_activity(
                user=user,
                action=f'LARGE_METADATA_{i}',
                category=category,
                metadata=large_metadata
            )
        
        end_time = time.time()
        duration = end_time - start_time
        
        print(f"Logged 100 activities with large metadata in {duration:.2f} seconds")
        
        # Should handle large metadata reasonably well
        self.assertLess(duration, 15.0, f"Large metadata logging took {duration:.2f} seconds")
        
        # Test querying large metadata
        query_start = time.time()
        activities = Activity.objects.filter(metadata__index__gte=50)
        count = activities.count()
        query_end = time.time()
        query_duration = query_end - query_start
        
        print(f"Queried large metadata in {query_duration:.2f} seconds, found {count} activities")
        
        # Should query efficiently
        self.assertLess(query_duration, 5.0, f"Large metadata query took {query_duration:.2f} seconds")
    
    def test_cleanup_performance(self):
        """Test performance of cleanup operations."""
        # Create old activities
        old_time = timezone.now() - timezone.timedelta(days=90)
        
        activities = []
        for i in range(500):
            user = random.choice(self.users)
            category = random.choice(self.categories)
            
            activity = Activity(
                user=user,
                action=f'OLD_ACTIVITY_{i}',
                category=category,
                created_at=old_time
            )
            activities.append(activity)
        
        Activity.objects.bulk_create(activities)
        
        # Test cleanup performance
        start_time = time.time()
        
        # Delete old activities
        deleted_count = Activity.objects.filter(
            created_at__lt=timezone.now() - timezone.timedelta(days=30)
        ).delete()[0]
        
        end_time = time.time()
        duration = end_time - start_time
        
        print(f"Deleted {deleted_count} old activities in {duration:.2f} seconds")
        
        # Should complete quickly
        self.assertLess(duration, 5.0, f"Cleanup took {duration:.2f} seconds")
        self.assertEqual(deleted_count, 500, "Not all old activities were deleted")
