import time
import uuid
from locust import HttpUser, task, between, events
import json
from collections import defaultdict
import logging
from retcode_stats import stats_collector, print_stats, export_stats

# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)

# Request counter
request_count = 0

# Test end event handler
@events.test_stop.add_listener
def on_test_stop(environment, **kwargs):
    """Print and export statistics when test ends"""
    logger.info("Test ended, generating retcode statistics report...")
    
    # Print statistics summary
    print_stats()
    
    # Export statistics data
    try:
        export_stats('csv')
        export_stats('json')
        logger.info("Retcode statistics report generation completed")
    except Exception as e:
        logger.error(f"Error exporting statistics report: {e}")

# Periodically print statistics
@events.request.add_listener
def on_request(request_type, name, response_time, response_length, response, context, exception, **kwargs):
    """Statistics update after each request"""
    global request_count
    request_count += 1
    
    # Print detailed statistics every 500 requests
    if request_count % 500 == 0:
        logger.info(f"\n=== Real-time Statistics (Requests: {request_count}) ===")
        logger.info(f"Success Rate: {stats_collector.get_success_rate():.2f}%")
        logger.info(f"Error Rate: {stats_collector.get_error_rate():.2f}%")
        logger.info(f"Current retcode distribution: {stats_collector.get_stats()}")


class LogSearchUser(HttpUser):
    
    def on_start(self):
        """Initialize user session"""
        self.headers = {
            'x-uin': '100001127589',
            'Content-Type': 'application/json'
        }
        
        self.payload = {
            "topic_id": "adfc163a-638f-4250-98e8-8f597515aefe",
            "starttime": "2025-06-24 15:30:01.046",
            "endtime": "2025-06-24 15:50:01.046",
            "query": "*|select count(*)",
            "queryrule": 2,
            "query_optimize": 1,
            "samplingrate": 1
        }
    
    @task
    def log_search_request(self):
        """Execute log search request"""
        # Generate dynamic request headers
        request_headers = self.headers.copy()
        request_headers['x-cls-requestid'] = str(uuid.uuid4())
        request_headers['x-request-time'] = str(int(time.time() * 1000))
        request_headers['x-request-timeout'] = '50000'
        
        # Send POST request
        with self.client.post(
            "/logsearch/8831bbd1-10a6-430e-89ad-85ee438e9029",
            headers=request_headers,
            json=self.payload,
            catch_response=True
        ) as response:
            if response.status_code == 200:
                try:
                    # Parse JSON response
                    response_data = response.json()
                    retcode = response_data.get('retcode', -1)
                    
                    # Use statistics collector to record retcode
                    stats_collector.record_retcode(retcode)
                    
                    # Determine if request was successful
                    if retcode == 0:
                        response.success()
                    else:
                        response.failure(f"Business failure, retcode: {retcode}")
                        
                except (json.JSONDecodeError, KeyError) as e:
                    stats_collector.record_retcode('json_parse_error')
                    response.failure(f"Response parsing failed: {str(e)}")
            else:
                stats_collector.record_retcode('http_error')
                response.failure(f"HTTP request failed, status code: {response.status_code}")