import aiConfig                                      from '../../config.mjs';
import Base                                          from '../../../../../../src/core/Base.mjs';
import crypto                                        from 'crypto';
import fs                                            from 'fs/promises';
import logger                                        from '../../logger.mjs';
import matter                                        from 'gray-matter';
import path                                          from 'path';
import GraphqlService                                from '../GraphqlService.mjs';
import ReleaseSyncer                                 from './ReleaseSyncer.mjs';
import {FETCH_ISSUES_FOR_SYNC, FETCH_SINGLE_ISSUE} from '../queries/issueQueries.mjs';
import {GET_ISSUE_ID, UPDATE_ISSUE}                  from '../queries/mutations.mjs';

const issueSyncConfig = aiConfig.issueSync;

/**
 * @summary Handles fetching, creating, and updating local issue files from GitHub.
 *
 * This service manages the conversion between the GitHub issue JSON format and
 * the local Markdown format with frontmatter. It handles the logic for:
 * - Formatting issue bodies and comments
 * - Archiving closed issues based on milestones or release dates
 * - Detecting content changes via hashing
 * - Pushing local edits back to GitHub
 *
 * @class Neo.ai.mcp.server.github-workflow.services.sync.IssueSyncer
 * @extends Neo.core.Base
 * @singleton
 */
class IssueSyncer extends Base {
    static config = {
        /**
         * @member {String} className='Neo.ai.mcp.server.github-workflow.services.sync.IssueSyncer'
         * @protected
         */
        className: 'Neo.ai.mcp.server.github-workflow.services.sync.IssueSyncer',
        /**
         * @member {Boolean} singleton=true
         * @protected
         */
        singleton: true
    }

    /**
     * Calculates a SHA-256 hash of the given content for change detection.
     * @param {string} content The content to hash.
     * @returns {string} The hex-encoded hash.
     * @private
     */
    #calculateContentHash(content) {
        return crypto.createHash('sha256').update(content).digest('hex');
    }

    /**
     * Formats a GitHub issue and its comments into a single Markdown string with YAML frontmatter.
     * @param {object}   issue    The GitHub issue object.
     * @param {object[]} comments An array of comment objects associated with the issue.
     * @returns {string} The fully formatted Markdown string.
     * @private
     */
    #formatIssueMarkdown(issue, comments) {
        const frontmatter = {
            id                : issue.number,
            title             : issue.title,
            state             : issue.state,
            labels            : issue.labels.nodes.map(l => l.name),
            assignees         : issue.assignees.nodes.map(a => a.login),
            createdAt         : issue.createdAt,
            updatedAt         : issue.updatedAt,
            githubUrl         : issue.url,
            author            : issue.author.login,
            commentsCount     : comments.length,
            parentIssue       : issue.parent ? issue.parent.number : null,
            subIssues         : issue.subIssues?.nodes.map(sub => sub.number) || [],
            subIssuesCompleted: issue.subIssuesSummary?.completed || 0,
            subIssuesTotal    : issue.subIssuesSummary?.total || 0,
            blockedBy         : issue.blockedBy?.nodes.map(b => b.number) || [],
            blocking          : issue.blocking?.nodes.map(b => b.number) || []
        };

        if (issue.closedAt) {
            frontmatter.closedAt = issue.closedAt;
        }
        if (issue.milestone) {
            frontmatter.milestone = issue.milestone.title;
        }

        let body = `# ${issue.title}\n\n`;

        body += issue.body || '*(No description provided)*';
        body += '\n\n';

        if (comments.length > 0) {
            body += issueSyncConfig.commentSectionDelimiter + '\n\n';
            for (const comment of comments) {
                const date = comment.createdAt.split('T')[0];
                const time = comment.createdAt.split('T')[1].substring(0, 5);
                body += `### @${comment.author.login} - ${date} ${time}\n\n`;
                body += comment.body;
                body += '\n\n';
            }
        }

        // Add Activity Log section
        if (issue.timelineItems?.nodes.length > 0) {
            body += '## Activity Log\n\n';
            for (const event of issue.timelineItems.nodes) {
                body += this.#formatTimelineEvent(event);
            }
            body += '\n';
        }

        return matter.stringify(body, frontmatter);
    }

    /**
     * Formats a single timeline event into a human-readable Markdown string.
     * @param {object} event The timeline event object.
     * @returns {string} The formatted Markdown string for the event.
     * @private
     */
    #formatTimelineEvent(event) {
        const date  = event.createdAt.split('T')[0];
        const actor = event.actor?.login || 'Ghost';
        let details = '';

        switch (event.__typename) {
            case 'LabeledEvent':
                details = `added the \`${event.label.name}\` label`;
                break;
            case 'UnlabeledEvent':
                details = `removed the \`${event.label.name}\` label`;
                break;
            case 'AssignedEvent':
                details = `assigned to @${event.assignee.login}`; // Assuming assignee is always a User
                break;
            case 'UnassignedEvent':
                details = `unassigned from @${event.assignee.login}`; // Assuming assignee is always a User
                break;
            case 'ClosedEvent':
                details = `closed this issue`;
                break;
            case 'ReferencedEvent':
                const commitMessage = event.commit.message.split('\\n')[0];
                details = `referenced in commit \`${event.commit.oid.substring(0, 7)}\` - "${commitMessage}"`;
                break;
            case 'CrossReferencedEvent':
                const sourceRef = event.source.__typename === 'Issue' ? `#${event.source.number}` : `PR #${event.source.number}`;
                details = `cross-referenced by ${sourceRef}`;
                break;
            case 'SubIssueAddedEvent':
                details = `added sub-issue #${event.subIssue.number}`;
                break;
            case 'SubIssueRemovedEvent':
                details = `removed sub-issue #${event.subIssue.number}`;
                break;
            case 'ParentIssueAddedEvent':
                details = `added parent issue #${event.parent.number}`;
                break;
            case 'ParentIssueRemovedEvent':
                details = `removed parent issue #${event.parent.number}`;
                break;
            case 'BlockedByAddedEvent':
                details = `marked this issue as being blocked by #${event.blockingIssue.number}`;
                break;
            case 'BlockingAddedEvent':
                details = `marked this issue as blocking #${event.blockedIssue.number}`;
                break;
            case 'BlockedByRemovedEvent':
                details = `removed the block by #${event.blockingIssue.number}`;
                break;
            case 'BlockingRemovedEvent':
                details = `removed the block on #${event.blockedIssue.number}`;
                break;
            default:
                details = `performed a "${event.__typename}" event`;
        }

        return `- ${date} @${actor} ${details}\n`;
    }

    /**
     * Determines the correct local file path for a given issue based on its state (OPEN/CLOSED),
     * labels (dropped), and milestone or closed date (for archiving).
     * @param {object} issue The GitHub issue object.
     * @returns {string|null} The absolute file path for the issue's Markdown file, or null if the issue should be dropped.
     * @private
     */
    #getIssuePath(issue) {
        const filename = `${issueSyncConfig.issueFilenamePrefix}${issue.number}.md`;

        // Handle both GraphQL (issue.labels.nodes) and potential direct array
        const labels = issue.labels?.nodes
            ? issue.labels.nodes.map(l => l.name.toLowerCase())
            : issue.labels?.map(l => l.name?.toLowerCase() || l.toLowerCase()) || [];

        const isDropped = issueSyncConfig.droppedLabels.some(label => labels.includes(label));
        if (isDropped) {
            return null; // Dropped issues are not stored locally.
        }

        // OPEN issues are always in the main directory
        if (issue.state === 'OPEN') {
            return path.join(issueSyncConfig.issuesDir, filename);
        }

        // Logic for CLOSED issues
        if (issue.state === 'CLOSED') {
            // If an issue has a milestone, it is explicitly archived under that version.
            if (issue.milestone?.title) {
                const milestoneDir = issue.milestone.title.startsWith(issueSyncConfig.versionDirectoryPrefix)
                    ? issue.milestone.title
                    : issueSyncConfig.versionDirectoryPrefix + issue.milestone.title;
                return path.join(issueSyncConfig.archiveDir, milestoneDir, filename);
            }

            // For issues without a milestone, find the earliest release that was published after it was closed.
            const closed = new Date(issue.closedAt);

            const release = (ReleaseSyncer.sortedReleases || []).find(r => new Date(r.publishedAt) > closed);

            // If a subsequent release exists, archive the issue under that release tag.
            if (release) {
                const releaseDir = release.tagName.startsWith(issueSyncConfig.versionDirectoryPrefix)
                    ? release.tagName
                    : issueSyncConfig.versionDirectoryPrefix + release.tagName;
                return path.join(issueSyncConfig.archiveDir, releaseDir, filename);
            }

            // If no subsequent release is found, the issue is recently closed and remains in the main issues directory.
            return path.join(issueSyncConfig.issuesDir, filename);
        }

        return null;
    }

    /**
     * Fetches all relevant issues from GitHub using GraphQL with automatic pagination.
     * This single query fetches issues WITH their comments and relationships in one go!
     * @param {object} metadata
     * @returns {Promise<{newMetadata: object, stats: object}>}
     */
    async pullFromGitHub(metadata) {
        logger.info('📥 Fetching issues from GitHub via GraphQL...');

        let allIssues   = [];
        let hasNextPage = true;
        let cursor      = null;
        let totalCost   = 0;
        const maxIssues = issueSyncConfig.maxIssues;

        // Paginate through all issues
        while (hasNextPage && allIssues.length < maxIssues) {
            const data = await GraphqlService.query(
                FETCH_ISSUES_FOR_SYNC,
                {
                    owner           : aiConfig.owner,
                    repo            : aiConfig.repo,
                    limit           : 100,
                    cursor,
                    states          : ['OPEN', 'CLOSED'],
                    since           : metadata.lastSync || issueSyncConfig.syncStartDate, // Use lastSync for delta updates
                    maxLabels       : issueSyncConfig.maxLabelsPerIssue,
                    maxAssignees    : issueSyncConfig.maxAssigneesPerIssue,
                    maxComments     : issueSyncConfig.maxCommentsPerIssue,
                    maxSubIssues    : issueSyncConfig.maxSubIssuesPerIssue,
                    maxTimelineItems: issueSyncConfig.maxTimelineItemsPerIssue
                },
                true // Enable sub-issues feature
            );

            const issues = data.repository.issues;
            allIssues.push(...issues.nodes);

            hasNextPage = issues.pageInfo.hasNextPage;
            cursor      = issues.pageInfo.endCursor;

            // Monitor rate limit usage
            totalCost += data.rateLimit.cost;
            logger.debug(`Fetched ${issues.nodes.length} issues (total: ${allIssues.length}, cost: ${totalCost}, remaining: ${data.rateLimit.remaining})`);

            // Safety check: If rate limit is getting low, warn
            if (data.rateLimit.remaining < 500) {
                logger.warn(`⚠️ GraphQL rate limit low: ${data.rateLimit.remaining} remaining, resets at ${data.rateLimit.resetAt}`);
            }
        }

        logger.info(`Found ${allIssues.length} issues updated since last sync`);

        const newMetadata = {
            issues      : { ...metadata.issues }, // Start with existing metadata
            pushFailures: metadata.pushFailures || [],
            lastSync    : new Date().toISOString()
        };

        const stats = {
            pulled : { count: 0, created: 0, updated: 0, moved: 0, issues: [] },
            dropped: { count: 0, issues: [] }
        };

        // Process each issue
        for (const issue of allIssues) {
            const issueNumber = issue.number;
            const targetPath  = this.#getIssuePath(issue);

            if (!targetPath) {
                stats.dropped.count++;
                stats.dropped.issues.push(issueNumber);
                const oldPath = metadata.issues[issueNumber]?.path;
                if (oldPath) {
                    try {
                        await fs.unlink(oldPath);
                        logger.info(`🗑️ Removed dropped issue #${issueNumber}: ${oldPath}`);
                    } catch (e) { /* File might not exist */ }
                }
                // Remove from metadata
                delete newMetadata.issues[issueNumber];
                continue;
            }

            const oldIssue = metadata.issues[issueNumber];
            const needsUpdate = !oldIssue ||
                oldIssue.updated !== issue.updatedAt ||
                oldIssue.path !== targetPath;

            let contentHash = oldIssue?.contentHash;

            if (needsUpdate) {
                stats.pulled.count++;
                stats.pulled.issues.push(issueNumber);

                // Comments are already in issue.comments - no separate fetch needed!
                const markdown = this.#formatIssueMarkdown(issue, issue.comments.nodes);
                contentHash = this.#calculateContentHash(markdown);

                await fs.mkdir(path.dirname(targetPath), { recursive: true });
                await fs.writeFile(targetPath, markdown, 'utf-8');

                if (!oldIssue) {
                    stats.pulled.created++;
                    logger.info(`✨ Created #${issueNumber}: ${targetPath}`);
                } else if (oldIssue.path && oldIssue.path !== targetPath) {
                    stats.pulled.moved++;
                    try {
                        await fs.rename(oldIssue.path, targetPath);
                        logger.info(`📦 Moved #${issueNumber}: ${oldIssue.path} → ${targetPath}`);
                    } catch (e) {
                        logger.warn(`Could not rename #${issueNumber}, falling back to write. Error: ${e.message}`);
                        await fs.unlink(oldIssue.path).catch(() => {});
                    }
                } else {
                    stats.pulled.updated++;
                    logger.info(`✅ Updated #${issueNumber}: ${targetPath}`);
                }
            }

            newMetadata.issues[issueNumber] = {
                state    : issue.state,
                path     : targetPath,
                updatedAt: issue.updatedAt,
                closedAt : issue.closedAt || null,
                milestone: issue.milestone?.title || null,
                title    : issue.title,
                contentHash // Store hash for push comparison
            };
        }

        /*
         * Strategy: Timeline-Based Relationship Discovery
         *
         * GitHub does NOT reliably update the `updatedAt` timestamp of a related issue when
         * a relationship change occurs (e.g., adding a sub-issue, blocking relationship).
         * This means our standard delta-sync (based on `since: lastSync`) will miss these updates
         * because the related issue is filtered out by the query.
         *
         * To fix this, we scan the `timelineItems` of every fetched issue for relationship-altering
         * events that occurred since the last sync. We collect the IDs of these related issues
         * and force-update them to ensure referential integrity.
         *
         * This timeline scan handles most cases where relationships change. For the ultra-rare
         * scenario where BOTH sides of a relationship are old and filtered out by delta sync,
         * a periodic full re-sync (e.g., monthly) ensures eventual consistency.
         */
        // Identify related issues that need force-updating
        const relatedIssuesToUpdate = new Set();
        const lastSyncDate = metadata.lastSync ? new Date(metadata.lastSync) : new Date(0);

        allIssues.forEach(issue => {
            // 1. Existing Parent Check (Legacy but safe)
            if (issue.parent) {
                relatedIssuesToUpdate.add(issue.parent.number);
            }

            // 2. Timeline Scan for Relationship Events
            if (issue.timelineItems?.nodes) {
                issue.timelineItems.nodes.forEach(event => {
                    // Only care about events that happened AFTER the last sync
                    if (new Date(event.createdAt) <= lastSyncDate) return;

                    switch (event.__typename) {
                        case 'SubIssueAddedEvent':
                        case 'SubIssueRemovedEvent':
                            if (event.subIssue) relatedIssuesToUpdate.add(event.subIssue.number);
                            break;
                        case 'ParentIssueAddedEvent':
                        case 'ParentIssueRemovedEvent':
                            if (event.parent) relatedIssuesToUpdate.add(event.parent.number);
                            break;
                        case 'BlockedByAddedEvent':
                        case 'BlockedByRemovedEvent':
                            if (event.blockingIssue) relatedIssuesToUpdate.add(event.blockingIssue.number);
                            break;
                        case 'BlockingAddedEvent':
                        case 'BlockingRemovedEvent':
                            if (event.blockedIssue) relatedIssuesToUpdate.add(event.blockedIssue.number);
                            break;
                    }
                });
            }
        });

        // Remove issues that were already updated in the main loop
        allIssues.forEach(issue => relatedIssuesToUpdate.delete(issue.number));

        if (relatedIssuesToUpdate.size > 0) {
            logger.info(`🔄 Force-updating ${relatedIssuesToUpdate.size} related issues due to relationship activity...`);

            for (const relatedIssueNumber of relatedIssuesToUpdate) {
                try {
                    const data = await GraphqlService.query(
                        FETCH_SINGLE_ISSUE,
                        {
                            owner           : aiConfig.owner,
                            repo            : aiConfig.repo,
                            number          : relatedIssueNumber,
                            maxLabels       : issueSyncConfig.maxLabelsPerIssue,
                            maxAssignees    : issueSyncConfig.maxAssigneesPerIssue,
                            maxComments     : issueSyncConfig.maxCommentsPerIssue,
                            maxSubIssues    : issueSyncConfig.maxSubIssuesPerIssue,
                            maxTimelineItems: issueSyncConfig.maxTimelineItemsPerIssue
                        },
                        true // Enable sub-issues
                    );

                    const issue = data.repository.issue;
                    if (!issue) continue;

                    const targetPath = this.#getIssuePath(issue);
                    if (!targetPath) continue;

                    const markdown    = this.#formatIssueMarkdown(issue, issue.comments.nodes);
                    const contentHash = this.#calculateContentHash(markdown);

                    await fs.mkdir(path.dirname(targetPath), { recursive: true });
                    await fs.writeFile(targetPath, markdown, 'utf-8');

                    stats.pulled.updated++;
                    stats.pulled.issues.push(relatedIssueNumber);
                    logger.info(`✅ Force-updated related issue #${relatedIssueNumber}`);

                    newMetadata.issues[relatedIssueNumber] = {
                        state    : issue.state,
                        path     : targetPath,
                        updatedAt: issue.updatedAt,
                        closedAt : issue.closedAt || null,
                        milestone: issue.milestone?.title || null,
                        title    : issue.title,
                        contentHash
                    };

                } catch (e) {
                    logger.error(`Failed to force-update related issue #${relatedIssueNumber}: ${e.message}`);
                }
            }
        }

        return { newMetadata, stats };
    }

    /**
     * Pushes local changes to GitHub using GraphQL mutations.
     * Uses content hash comparison to detect actual changes and prevent false updates.
     * @param {object} metadata
     * @returns {Promise<object>}
     */
    async pushToGitHub(metadata) {
        logger.info('📤 Checking for local changes to push via GraphQL...');
        const stats = { count: 0, issues: [], failures: [] };

        if (!metadata.lastSync) {
            logger.info('✨ No previous sync found, skipping push.');
            return stats;
        }

        const localFiles       = await this.#scanLocalFiles();
        const previousFailures = metadata.pushFailures || [];

        logger.debug(`Scanning ${localFiles.length} local files for changes...`);

        for (const filePath of localFiles) {
            try {
                const content     = await fs.readFile(filePath, 'utf-8');
                const parsed      = matter(content);
                const issueNumber = parsed.data.id;

                if (!issueNumber) {
                    logger.debug(`Skipping file without issue number: ${path.basename(filePath)}`);
                    continue;
                }

                // Calculate current content hash
                const currentHash = this.#calculateContentHash(content);
                const oldIssue    = metadata.issues[issueNumber];

                // Skip if no metadata exists (shouldn't happen, but be safe)
                if (!oldIssue) {
                    logger.debug(`No metadata for #${issueNumber}, skipping push`);
                    continue;
                }

                // Compare content hash - skip if unchanged
                if (oldIssue.contentHash && oldIssue.contentHash === currentHash) {
                    logger.debug(`No content change for #${issueNumber}, skipping`);
                    continue;
                }

                // Skip previously failed pushes
                if (previousFailures.includes(issueNumber)) {
                    logger.debug(`Skipping previously failed push for issue #${issueNumber}`);
                    stats.failures.push(issueNumber);
                    continue;
                }

                logger.info(`📝 Content changed for #${issueNumber}`);

                // Step 1: Get the issue's GraphQL ID
                const idData = await GraphqlService.query(GET_ISSUE_ID, {
                    owner : aiConfig.owner,
                    repo  : aiConfig.repo,
                    number: issueNumber
                });

                const issueId = idData.repository.issue.id;

                // Step 2: Prepare the updated content
                // Remove comments section and everything after it
                let bodyContent = parsed.content.split(issueSyncConfig.commentSectionDelimiter)[0];

                // Remove Activity Log section and everything after it (if present)
                // This prevents the read-only activity log from being pushed back to the issue body
                bodyContent = bodyContent.split('## Activity Log')[0].trim();

                // Extract title from the markdown
                const titleMatch = bodyContent.match(/^#\s+(.+)$/m);
                const title      = titleMatch ? titleMatch[1] : parsed.data.title;

                // Remove only the title from body
                const cleanBody = bodyContent
                    .replace(/^#\s+.+$/m, '') // Remove title
                    .trim();

                // Step 3: Execute the mutation
                await GraphqlService.query(UPDATE_ISSUE, {
                    issueId,
                    title,
                    body: cleanBody
                });

                logger.info(`✅ Updated GitHub issue #${issueNumber} via GraphQL`);
                stats.count++;
                stats.issues.push(issueNumber);
            } catch (e) {
                logger.warn(`⚠️ Could not push changes for file ${path.basename(filePath)}. Error: ${e.message}`);
                const parsed = matter(await fs.readFile(filePath, 'utf-8'));
                if (parsed.data.id) {
                    stats.failures.push(parsed.data.id);
                }
            }
        }

        if (stats.count > 0) {
            logger.info(`📤 Pushed ${stats.count} local change(s) to GitHub`);
        }
        if (stats.failures.length > 0) {
            logger.warn(`⚠️ Encountered ${stats.failures.length} push failure(s).`);
        }

        return stats;
    }

    /**
     * Reconciles the locations of closed issues in the active directory.
     * This handles the case where a new release is created but issues weren't updated,
     * so they didn't get moved during the pull operation (delta sync limitation).
     *
     * CRITICAL: This method ONLY processes issues that are:
     * 1. Currently in the active issues directory (not already archived)
     * 2. In a CLOSED state
     * 3. Should be archived based on milestone or release date
     * @param {object} metadata The current metadata object
     * @returns {Promise<object>} Stats about reconciled issues
     */
    async reconcileClosedIssueLocations(metadata) {
        logger.info('🔄 Reconciling closed issue locations...');

        const stats = { count: 0, issues: [] };

        // Ensure releases are loaded
        if (!ReleaseSyncer.sortedReleases || ReleaseSyncer.sortedReleases.length === 0) {
            logger.warn('No releases available for reconciliation, skipping.');
            return stats;
        }

        for (const issueNumber in metadata.issues) {
            const issueData = metadata.issues[issueNumber];

            // CRITICAL: Only process issues in the active directory
            if (!issueData.path.startsWith(issueSyncConfig.issuesDir)) {
                continue; // Already archived, skip it
            }

            // Only process CLOSED issues
            if (issueData.state !== 'CLOSED') {
                continue;
            }

            // Calculate where this closed issue SHOULD be
            const correctPath = this.#getIssuePath({
                number   : parseInt(issueNumber),
                state    : issueData.state,
                milestone: issueData.milestone ? { title: issueData.milestone } : null,
                closedAt : issueData.closedAt,
                updatedAt: issueData.updatedAt
            });

            // If the correct path is null, the issue should be dropped (shouldn't happen here)
            if (!correctPath) {
                logger.warn(`Issue #${issueNumber} has null target path during reconciliation, skipping.`);
                continue;
            }

            // Check if the issue needs to be moved to an archive
            if (issueData.path !== correctPath) {
                // Verify the correct path is actually in an archive, not back to active directory
                if (correctPath.startsWith(issueSyncConfig.issuesDir) &&
                    !correctPath.includes(issueSyncConfig.archiveDir)) {
                    logger.debug(`Issue #${issueNumber} correct path is still in active directory, no move needed.`);
                    continue;
                }

                logger.info(`📦 Archiving closed issue #${issueNumber}: ${issueData.path} → ${correctPath}`);

                try {
                    // Ensure target directory exists
                    await fs.mkdir(path.dirname(correctPath), { recursive: true });

                    // Move the file
                    await fs.rename(issueData.path, correctPath);

                    // Update metadata
                    metadata.issues[issueNumber].path = correctPath;

                    stats.count++;
                    stats.issues.push(parseInt(issueNumber));

                    logger.info(`✅ Archived #${issueNumber} to ${path.relative(process.cwd(), correctPath)}`);
                } catch (e) {
                    logger.error(`❌ Failed to archive #${issueNumber}: ${e.message}`);
                }
            }
        }

        if (stats.count > 0) {
            logger.info(`📦 Archived ${stats.count} closed issue(s)`);
        } else {
            logger.info('✓ No closed issues need archiving');
        }

        return stats;
    }

    /**
     * Recursively scans the configured issue directory to find all local .md issue files.
     * This operation is intentionally limited to the active issues directory as a performance
     * optimization, based on the assumption that closed/archived issues are immutable and
     * do not need to be checked for local changes to push.
     * @returns {Promise<string[]>} A flat list of absolute file paths for all found issue files.
     * @private
     */
    async #scanLocalFiles() {
        const localFiles = [];
        const scanDir = async (dir) => {
            try {
                const entries = await fs.readdir(dir, { withFileTypes: true });
                for (const entry of entries) {
                    const fullPath = path.join(dir, entry.name);
                    if (entry.isDirectory()) {
                        await scanDir(fullPath);
                    } else if (entry.isFile() && entry.name.endsWith('.md')) {
                        localFiles.push(fullPath);
                    }
                }
            } catch (e) {
                // Directory doesn't exist yet, which is fine.
            }
        };

        await scanDir(issueSyncConfig.issuesDir);

        return localFiles;
    }
}

export default Neo.setupClass(IssueSyncer);
