| |
| |
| |
|
|
| import { createLogger } from '@automaker/utils'; |
| import { secureFs } from '@automaker/platform'; |
| import path from 'path'; |
| import { exec } from 'child_process'; |
| import { promisify } from 'util'; |
| import { BINARY_EXTENSIONS, type FileStatus, type MergeStateInfo } from './types.js'; |
| import { isGitRepo, parseGitStatus, detectMergeState, detectMergeCommit } from './status.js'; |
|
|
| const execAsync = promisify(exec); |
| const logger = createLogger('GitUtils'); |
|
|
| |
| const MAX_SYNTHETIC_DIFF_SIZE = 1024 * 1024; |
|
|
| |
| |
| |
| function isBinaryFile(filePath: string): boolean { |
| const ext = path.extname(filePath).toLowerCase(); |
| return BINARY_EXTENSIONS.has(ext); |
| } |
|
|
| |
| |
| |
| |
| function createNewFileDiff(relativePath: string, mode: string, contentLines: string[]): string { |
| const lineCount = contentLines.length; |
| const addedLines = contentLines.map((line) => `+${line}`).join('\n'); |
|
|
| return `diff --git a/${relativePath} b/${relativePath} |
| new file mode ${mode} |
| index 0000000..0000000 |
| --- /dev/null |
| +++ b/${relativePath} |
| @@ -0,0 +${lineCount === 1 ? '1' : `1,${lineCount}`} @@ |
| ${addedLines} |
| `; |
| } |
|
|
| |
| |
| |
| |
| |
| |
| export async function generateSyntheticDiffForNewFile( |
| basePath: string, |
| relativePath: string |
| ): Promise<string> { |
| |
| const cleanPath = relativePath.endsWith('/') ? relativePath.slice(0, -1) : relativePath; |
| const fullPath = path.join(basePath, cleanPath); |
|
|
| try { |
| |
| const stats = await secureFs.stat(fullPath); |
|
|
| |
| |
| if (stats.isDirectory()) { |
| const filesInDir = await listAllFilesInDirectory(basePath, cleanPath); |
| if (filesInDir.length === 0) { |
| |
| return createNewFileDiff(cleanPath, '040000', ['[Empty directory]']); |
| } |
| |
| |
| const diffs: string[] = []; |
| for (const filePath of filesInDir) { |
| diffs.push(await generateSyntheticDiffForNewFile(basePath, filePath)); |
| } |
| return diffs.join(''); |
| } |
|
|
| |
| if (isBinaryFile(cleanPath)) { |
| return `diff --git a/${cleanPath} b/${cleanPath} |
| new file mode 100644 |
| index 0000000..0000000 |
| Binary file ${cleanPath} added |
| `; |
| } |
|
|
| const fileSize = Number(stats.size); |
| if (fileSize > MAX_SYNTHETIC_DIFF_SIZE) { |
| const sizeKB = Math.round(fileSize / 1024); |
| return createNewFileDiff(cleanPath, '100644', [`[File too large to display: ${sizeKB}KB]`]); |
| } |
|
|
| |
| const content = (await secureFs.readFile(fullPath, 'utf-8')) as string; |
| const hasTrailingNewline = content.endsWith('\n'); |
| const lines = content.split('\n'); |
|
|
| |
| if (lines.length > 0 && lines[lines.length - 1] === '') { |
| lines.pop(); |
| } |
|
|
| |
| const lineCount = lines.length; |
| const addedLines = lines.map((line) => `+${line}`).join('\n'); |
|
|
| let diff = `diff --git a/${cleanPath} b/${cleanPath} |
| new file mode 100644 |
| index 0000000..0000000 |
| --- /dev/null |
| +++ b/${cleanPath} |
| @@ -0,0 +1,${lineCount} @@ |
| ${addedLines}`; |
|
|
| |
| if (!hasTrailingNewline && content.length > 0) { |
| diff += '\n\\ No newline at end of file'; |
| } |
|
|
| return diff + '\n'; |
| } catch (error) { |
| |
| logger.error(`Failed to generate synthetic diff for ${fullPath}:`, error); |
| |
| return createNewFileDiff(cleanPath, '100644', ['[Unable to read file content]']); |
| } |
| } |
|
|
| |
| |
| |
| export async function appendUntrackedFileDiffs( |
| basePath: string, |
| existingDiff: string, |
| files: Array<{ status: string; path: string }> |
| ): Promise<string> { |
| |
| const untrackedFiles = files.filter((f) => f.status === '?'); |
|
|
| if (untrackedFiles.length === 0) { |
| return existingDiff; |
| } |
|
|
| |
| const syntheticDiffs = await Promise.all( |
| untrackedFiles.map((f) => generateSyntheticDiffForNewFile(basePath, f.path)) |
| ); |
|
|
| |
| const combinedDiff = existingDiff + syntheticDiffs.join(''); |
|
|
| return combinedDiff; |
| } |
|
|
| |
| |
| |
| |
| export async function listAllFilesInDirectory( |
| basePath: string, |
| relativePath: string = '' |
| ): Promise<string[]> { |
| const files: string[] = []; |
| const fullPath = path.join(basePath, relativePath); |
|
|
| |
| const skipDirs = new Set([ |
| 'node_modules', |
| '.git', |
| '.automaker', |
| 'dist', |
| 'build', |
| '.next', |
| '.nuxt', |
| '__pycache__', |
| '.cache', |
| 'coverage', |
| '.venv', |
| 'venv', |
| 'target', |
| 'vendor', |
| '.gradle', |
| 'out', |
| 'tmp', |
| '.tmp', |
| ]); |
|
|
| try { |
| const entries = await secureFs.readdir(fullPath, { withFileTypes: true }); |
|
|
| for (const entry of entries) { |
| |
| if (entry.name.startsWith('.') && entry.name !== '.env') { |
| continue; |
| } |
|
|
| const entryRelPath = relativePath ? `${relativePath}/${entry.name}` : entry.name; |
|
|
| if (entry.isDirectory()) { |
| if (!skipDirs.has(entry.name)) { |
| const subFiles = await listAllFilesInDirectory(basePath, entryRelPath); |
| files.push(...subFiles); |
| } |
| } else if (entry.isFile()) { |
| files.push(entryRelPath); |
| } |
| } |
| } catch (error) { |
| |
| logger.error(`Error reading directory ${fullPath}:`, error); |
| } |
|
|
| return files; |
| } |
|
|
| |
| |
| |
| |
| export async function generateDiffsForNonGitDirectory( |
| basePath: string |
| ): Promise<{ diff: string; files: FileStatus[] }> { |
| const allFiles = await listAllFilesInDirectory(basePath); |
|
|
| const files: FileStatus[] = allFiles.map((filePath) => ({ |
| status: '?', |
| path: filePath, |
| statusText: 'New', |
| })); |
|
|
| |
| const syntheticDiffs = await Promise.all( |
| files.map((f) => generateSyntheticDiffForNewFile(basePath, f.path)) |
| ); |
|
|
| return { |
| diff: syntheticDiffs.join(''), |
| files, |
| }; |
| } |
|
|
| |
| |
| |
| |
| |
| export async function getGitRepositoryDiffs(repoPath: string): Promise<{ |
| diff: string; |
| files: FileStatus[]; |
| hasChanges: boolean; |
| mergeState?: MergeStateInfo; |
| }> { |
| |
| const isRepo = await isGitRepo(repoPath); |
|
|
| if (!isRepo) { |
| |
| const result = await generateDiffsForNonGitDirectory(repoPath); |
| return { |
| diff: result.diff, |
| files: result.files, |
| hasChanges: result.files.length > 0, |
| }; |
| } |
|
|
| |
| const { stdout: diff } = await execAsync('git diff HEAD', { |
| cwd: repoPath, |
| maxBuffer: 10 * 1024 * 1024, |
| }); |
| const { stdout: status } = await execAsync('git status --porcelain', { |
| cwd: repoPath, |
| }); |
|
|
| const files = parseGitStatus(status); |
|
|
| |
| let combinedDiff = await appendUntrackedFileDiffs(repoPath, diff, files); |
|
|
| |
| const mergeState = await detectMergeState(repoPath); |
|
|
| |
| |
| if (!mergeState.isMerging) { |
| const mergeCommitInfo = await detectMergeCommit(repoPath); |
|
|
| if (mergeCommitInfo.isMergeCommit && mergeCommitInfo.mergeAffectedFiles.length > 0) { |
| |
| try { |
| const { stdout: mergeDiff } = await execAsync('git diff HEAD~1 HEAD', { |
| cwd: repoPath, |
| maxBuffer: 10 * 1024 * 1024, |
| }); |
|
|
| |
| const fileByPath = new Map(files.map((f) => [f.path, f])); |
| const existingPaths = new Set(fileByPath.keys()); |
| for (const filePath of mergeCommitInfo.mergeAffectedFiles) { |
| if (!existingPaths.has(filePath)) { |
| const newFile = { |
| status: 'M', |
| path: filePath, |
| statusText: 'Merged', |
| indexStatus: ' ', |
| workTreeStatus: ' ', |
| isMergeAffected: true, |
| mergeType: 'merged', |
| }; |
| files.push(newFile); |
| fileByPath.set(filePath, newFile); |
| existingPaths.add(filePath); |
| } else { |
| |
| const existing = fileByPath.get(filePath); |
| if (existing) { |
| existing.isMergeAffected = true; |
| existing.mergeType = 'merged'; |
| } |
| } |
| } |
|
|
| |
| |
| if (mergeDiff.trim()) { |
| |
| const workingTreeDiffPaths = new Set<string>(); |
| const diffLines = combinedDiff.split('\n'); |
| for (const line of diffLines) { |
| if (line.startsWith('diff --git')) { |
| const match = line.match(/diff --git a\/(.*?) b\/(.*)/); |
| if (match) { |
| workingTreeDiffPaths.add(match[2]); |
| } |
| } |
| } |
|
|
| |
| const mergeDiffFiles = mergeDiff.split(/(?=diff --git)/); |
| const newMergeDiffs: string[] = []; |
| for (const fileDiff of mergeDiffFiles) { |
| if (!fileDiff.trim()) continue; |
| const match = fileDiff.match(/diff --git a\/(.*?) b\/(.*)/); |
| if (match && !workingTreeDiffPaths.has(match[2])) { |
| newMergeDiffs.push(fileDiff); |
| } |
| } |
|
|
| if (newMergeDiffs.length > 0) { |
| combinedDiff = newMergeDiffs.join('') + combinedDiff; |
| } |
| } |
| } catch (mergeError) { |
| |
| logger.error('Failed to get merge commit diff:', mergeError); |
|
|
| |
| |
| |
| const existingPathsAfterError = new Set(files.map((f) => f.path)); |
| for (const filePath of mergeCommitInfo.mergeAffectedFiles) { |
| if (!existingPathsAfterError.has(filePath)) { |
| files.push({ |
| status: 'M', |
| path: filePath, |
| statusText: 'Merged', |
| indexStatus: ' ', |
| workTreeStatus: ' ', |
| isMergeAffected: true, |
| mergeType: 'merged', |
| }); |
| existingPathsAfterError.add(filePath); |
| } else { |
| |
| const existing = files.find((f) => f.path === filePath); |
| if (existing) { |
| existing.isMergeAffected = true; |
| existing.mergeType = 'merged'; |
| } |
| } |
| } |
| } |
|
|
| |
| return { |
| diff: combinedDiff, |
| files, |
| hasChanges: files.length > 0, |
| mergeState: { |
| isMerging: false, |
| mergeOperationType: 'merge', |
| isCleanMerge: true, |
| mergeAffectedFiles: mergeCommitInfo.mergeAffectedFiles, |
| conflictFiles: [], |
| isMergeCommit: true, |
| }, |
| }; |
| } |
| } |
|
|
| return { |
| diff: combinedDiff, |
| files, |
| hasChanges: files.length > 0, |
| ...(mergeState.isMerging ? { mergeState } : {}), |
| }; |
| } |
|
|