import fs from "fs/promises";
import path from "path";
import { globSync } from "glob";
import { createHash } from "crypto";
import { v5 as uuidv5 } from "uuid";
import pLimit from "p-limit";
import { Mutex } from "async-mutex";

import { IEmbedder, IVectorStore, ICodeParser, PointStruct, IndexingStats } from "../interfaces/index.js";
import { CacheManager } from "./cache-manager.js";
import { isPathInIgnoredDirectory, generateNormalizedAbsolutePath, generateRelativeFilePath } from "../utils/path-utils.js";
import { ConfigReader } from "../utils/config-reader.js";
import { InstructionManager } from "../utils/instruction-manager.js";
import {
  SUPPORTED_EXTENSIONS,
  isTextFile,
  MAX_FILE_SIZE_BYTES,
  BATCH_SEGMENT_THRESHOLD,
  MAX_BATCH_RETRIES,
  INITIAL_RETRY_DELAY_MS,
  PARSING_CONCURRENCY,
  BATCH_PROCESSING_CONCURRENCY,
  QDRANT_CODE_BLOCK_NAMESPACE,
} from "../constants/index.js";

/**
 * Scans directories and processes files for indexing
 */
export class DirectoryScanner {
  private readonly parseLimiter = pLimit(PARSING_CONCURRENCY);
  private readonly batchLimiter = pLimit(BATCH_PROCESSING_CONCURRENCY);
  private readonly mutex = new Mutex();

  constructor(
    private readonly embedder: IEmbedder,
    private readonly vectorStore: IVectorStore,
    private readonly codeParser: ICodeParser,
    private readonly cacheManager: CacheManager,
  ) {}

  /**
   * Scans a directory and indexes all supported files
   */
  async scanDirectory(
    directoryPath: string,
    onProgress?: (stats: IndexingStats) => void,
    onError?: (error: Error) => void,
    verbose: boolean = false,
    configDirectory?: string,
  ): Promise<IndexingStats> {
    const stats: IndexingStats = {
      processed: 0,
      skipped: 0,
      totalBlocks: 0,
      errors: 0,
    };

    try {
      // Read configuration files for include/exclude patterns
      const configDir = configDirectory || process.cwd();
      const configReader = new ConfigReader(configDir);
      const includeDirs = await configReader.readIncludeDirs();
      const excludeDirs = await configReader.readExcludeDirs();

      if (verbose) {
        console.log(`📁 Configuration loaded from: ${configDir}/.codebase-rag/`);
        if (includeDirs.length > 0) {
          console.log(`  ✅ Include patterns: ${includeDirs.join(', ')}`);
        } else {
          console.log(`  📋 Include patterns: none (will process all supported files)`);
        }
        if (excludeDirs.length > 0) {
          console.log(`  ❌ Exclude patterns: ${excludeDirs.join(', ')}`);
        }
      }

      // Build ignore patterns - start with defaults, then add user excludes
      const defaultIgnorePatterns = [
        '**/node_modules/**',
        '**/.git/**',
        '**/dist/**',
        '**/build/**',
        '**/coverage/**',
        '**/.next/**',
        '**/.nuxt/**',
        '**/target/**',
        '**/.*/**',        // Exclude all hidden directories
        '**/.*',           // Exclude all hidden files
      ];

      // Add user-defined exclude patterns
      const ignorePatterns = [...defaultIgnorePatterns, ...excludeDirs];

      // Find all text files (more inclusive approach)
      let files: string[] = [];
      
      if (includeDirs.length > 0) {
        // If include patterns are specified, use them to find all files, then filter for text files
        for (const includePattern of includeDirs) {
          let pattern: string;
          
          // Check if pattern looks like a specific file pattern (contains * or file extension)
          if (includePattern.includes('*') || includePattern.includes('.')) {
            pattern = includePattern;
          } else {
            // Directory pattern - scan all files in that directory
            pattern = includePattern.endsWith('/') 
              ? `${includePattern}**/*`
              : `${includePattern}/**/*`;
          }
          
          const matchedFiles = globSync(pattern, {
            cwd: directoryPath,
            absolute: true,
            ignore: ignorePatterns,
            nodir: true, // Only files, not directories
          });
          files.push(...matchedFiles);
        }
        // Remove duplicates
        files = [...new Set(files)];
        
        // Filter to only include text files
        files = files.filter(file => isTextFile(file));
      } else {
        // Default behavior - scan all files, then filter for text files
        const allFiles = globSync('**/*', {
          cwd: directoryPath,
          absolute: true,
          ignore: ignorePatterns,
          nodir: true, // Only files, not directories
        });
        
        // Filter to only include text files
        files = allFiles.filter(file => isTextFile(file));
      }

      console.log(`Found ${files.length} files to process`);
      
      if (verbose) {
        console.log(`\n🔍 File discovery details:`);
        console.log(`  📂 Target directory: ${directoryPath}`);
        console.log(`  📊 Total files found: ${files.length}`);
        if (files.length > 0) {
          console.log(`  📝 First 10 files:`);
          files.slice(0, 10).forEach((file, index) => {
            const relativePath = file.replace(directoryPath, '.');
            console.log(`    ${index + 1}. ${relativePath}`);
          });
          if (files.length > 10) {
            console.log(`    ... and ${files.length - 10} more files`);
          }
        }
        console.log(`\n⚡ Starting file processing...`);
      }

      // Process files with progress tracking
      const processedFiles = new Set<string>();
      let currentBatchBlocks: PointStruct[] = [];
      let currentBatchFileInfos: Array<{ filePath: string; fileHash: string }> = [];
      const batchPromises: Promise<void>[] = [];

      // Process files in parallel with concurrency control
      const parsePromises = files.map((filePath) =>
        this.parseLimiter(async () => {
          try {
            const result = await this.processFile(filePath, directoryPath);
            
            if (verbose) {
              const relativePath = filePath.replace(directoryPath, '.');
              if (result.status === 'processed') {
                console.log(`  ✅ ${relativePath} (${result.blocks?.length || 0} blocks)`);
              } else if (result.status === 'skipped') {
                console.log(`  ⏭️  ${relativePath} (skipped)`);
              } else if (result.status === 'error') {
                console.log(`  ❌ ${relativePath} (error: ${result.error?.message})`);
              }
            }
            
            if (result.status === 'processed') {
              processedFiles.add(filePath);
              stats.processed++;
              stats.totalBlocks += result.blocks?.length || 0;

              // Add to batch for vector storage
              if (result.points && result.points.length > 0) {
                const release = await this.mutex.acquire();
                try {
                  currentBatchBlocks.push(...result.points);
                  currentBatchFileInfos.push({
                    filePath,
                    fileHash: result.fileHash!,
                  });

                  // Process batch if it's large enough
                  if (currentBatchBlocks.length >= BATCH_SEGMENT_THRESHOLD) {
                    const batchBlocks = [...currentBatchBlocks];
                    const batchFileInfos = [...currentBatchFileInfos];
                    currentBatchBlocks = [];
                    currentBatchFileInfos = [];

                    // Process batch with proper await
                    batchPromises.push(
                      this.batchLimiter(() =>
                        this.processBatch(batchBlocks, batchFileInfos, onError)
                      )
                    );
                  }
                } finally {
                  release();
                }
              }
            } else if (result.status === 'skipped') {
              stats.skipped++;
            } else if (result.status === 'error') {
              stats.errors++;
              if (onError) {
                onError(result.error!);
              }
            }

            // Report progress
            if (onProgress) {
              onProgress({ ...stats });
            }
          } catch (error) {
            stats.errors++;
            console.error(`Error processing file ${filePath}:`, error);
            if (onError) {
              onError(error as Error);
            }
          }
        })
      );

      // Wait for all files to be processed
      await Promise.all(parsePromises);

      // Process any remaining items in the final batch
      if (currentBatchBlocks.length > 0) {
        batchPromises.push(this.processBatch(currentBatchBlocks, currentBatchFileInfos, onError));
      }

      // Wait for all batch operations to complete
      console.log(`Waiting for ${batchPromises.length} batch operations to complete...`);
      await Promise.all(batchPromises);
      console.log("All batch operations completed");

      // Clean up deleted files from cache and vector store
      await this.cleanupDeletedFiles(processedFiles, directoryPath);

      console.log(`Indexing complete. Stats:`, stats);
      return stats;
    } catch (error) {
      console.error("Error during directory scanning:", error);
      throw error;
    }
  }

  /**
   * Processes a single file
   */
  private async processFile(
    filePath: string,
    workspaceRoot: string,
  ): Promise<{
    status: 'processed' | 'skipped' | 'error';
    blocks?: any[];
    points?: PointStruct[];
    fileHash?: string;
    error?: Error;
  }> {
    try {
      // Check if file is in ignored directory
      if (isPathInIgnoredDirectory(filePath)) {
        return { status: 'skipped' };
      }

      // Check file size
      const stats = await fs.stat(filePath);
      if (stats.size > MAX_FILE_SIZE_BYTES) {
        return { status: 'skipped' };
      }

      // Read file content
      const content = await fs.readFile(filePath, 'utf-8');
      const fileHash = createHash('sha256').update(content).digest('hex');

      // Check if file has changed
      const cachedHash = this.cacheManager.getHash(filePath);
      if (cachedHash === fileHash) {
        return { status: 'skipped' };
      }

      // Parse file into code blocks
      const blocks = await this.codeParser.parseFile(filePath, { content, fileHash });

      if (blocks.length === 0) {
        return { status: 'skipped' };
      }

      // Create embeddings and points for vector storage
      const docType = this.getDocumentType(filePath);
      const instructedContent = blocks.map(block =>
        InstructionManager.getInstruction("retrieval_document", docType, block.content)
      );

      const { embeddings } = await this.embedder.createEmbeddings(
        instructedContent,
        docType,
        "retrieval_document"
      );

      const points = embeddings.map((embedding, i) => ({
        id: blocks[i].segmentHash,
        vector: embedding,
        payload: blocks[i],
      }));
      
      return {
        status: 'processed',
        blocks,
        points,
        fileHash,
      };
    } catch (error) {
      return {
        status: 'error',
        error: error as Error,
      };
    }
  }

  /**
   * Processes a batch of points for vector storage
   */
  private async processBatch(
    batchBlocks: PointStruct[],
    batchFileInfos: Array<{ filePath: string; fileHash: string }>,
    onError?: (error: Error) => void,
  ): Promise<void> {
    let attempts = 0;
    let lastError: Error | null = null;

    console.log(`Processing batch of ${batchBlocks.length} blocks from ${batchFileInfos.length} files`);

    while (attempts < MAX_BATCH_RETRIES) {
      attempts++;
      try {
        // Delete existing points for modified files
        const filesToDelete = batchFileInfos.map(info => info.filePath);
        if (filesToDelete.length > 0) {
          console.log(`Deleting existing points for ${filesToDelete.length} files`);
          await this.vectorStore.deletePointsByMultipleFilePaths(filesToDelete, "code");
          await this.vectorStore.deletePointsByMultipleFilePaths(filesToDelete, "text");
        }

        // Upsert new points
        console.log(`Upserting ${batchBlocks.length} new points to vector store`);
        const codePoints = batchBlocks.filter(p => this.getDocumentType(p.payload.filePath) === 'code');
        const textPoints = batchBlocks.filter(p => this.getDocumentType(p.payload.filePath) === 'text');
        
        if (codePoints.length > 0) await this.vectorStore.upsertPoints(codePoints, "code");
        if (textPoints.length > 0) await this.vectorStore.upsertPoints(textPoints, "text");
        
        console.log(`Successfully upserted ${batchBlocks.length} points`);

        // Update cache with new hashes
        for (const fileInfo of batchFileInfos) {
          await this.cacheManager.updateHash(fileInfo.filePath, fileInfo.fileHash);
        }

        console.log(`Batch processing completed successfully`);
        return; // Success
      } catch (error) {
        lastError = error as Error;
        console.error(`Batch processing error (attempt ${attempts}/${MAX_BATCH_RETRIES}):`, error);
        console.error(`Error details:`, {
          message: lastError.message,
          stack: lastError.stack,
          batchSize: batchBlocks.length
        });

        if (attempts < MAX_BATCH_RETRIES) {
          const delay = INITIAL_RETRY_DELAY_MS * Math.pow(2, attempts - 1);
          console.log(`Retrying batch processing in ${delay}ms...`);
          await new Promise(resolve => setTimeout(resolve, delay));
        }
      }
    }

    // All retries failed
    const errorMsg = `Failed to process batch after ${MAX_BATCH_RETRIES} attempts: ${lastError?.message}`;
    console.error(errorMsg);
    if (lastError && onError) {
      onError(new Error(errorMsg));
    }
    // Re-throw the error to ensure it's properly propagated
    throw new Error(errorMsg);
  }

  /**
   * Cleans up cache and vector store entries for deleted files
   */
  private async cleanupDeletedFiles(
    processedFiles: Set<string>,
    workspaceRoot: string,
  ): Promise<void> {
    const allCachedFiles = Object.keys(this.cacheManager.getAllHashes());
    
    for (const cachedFilePath of allCachedFiles) {
      if (!processedFiles.has(cachedFilePath)) {
        try {
          // File was deleted, remove from vector store and cache
          await this.vectorStore.deletePointsByMultipleFilePaths([cachedFilePath], "code");
          await this.vectorStore.deletePointsByMultipleFilePaths([cachedFilePath], "text");
          await this.cacheManager.deleteHash(cachedFilePath);
        } catch (error) {
          console.error(`Failed to cleanup deleted file ${cachedFilePath}:`, error);
        }
      }
    }
  }

  private getDocumentType(filePath: string): 'code' | 'text' {
    const extension = path.extname(filePath).toLowerCase();
    // A simple heuristic: if the extension is in our list of supported code extensions, it's code.
    // Otherwise, we'll treat it as text. This can be refined.
    return SUPPORTED_EXTENSIONS.includes(extension) ? 'code' : 'text';
  }
}
