Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| """ | |
| GitHub Repository Intelligence Analyzer β CLI | |
| βββββββββββββββββββββββββββββββββββββββββββββ | |
| Usage examples: | |
| # Analyze repos from command line | |
| python cli.py https://github.com/c2siorg/Webiu https://github.com/django/django | |
| # Read URLs from a file (one per line) | |
| python cli.py --file repos.txt | |
| # Save JSON output | |
| python cli.py --file repos.txt --json output.json | |
| # Use a GitHub token (recommended β raises rate limit from 60 to 5000 req/hr) | |
| python cli.py --file repos.txt --token ghp_xxxxxxxxxxxx | |
| """ | |
| import argparse | |
| import os | |
| import sys | |
| # Allow running from project root | |
| sys.path.insert(0, os.path.dirname(__file__)) | |
| from src.analyzer import GitHubClient, analyze_repo | |
| from src.reporter import format_report, format_summary_table, to_json | |
| def main(): | |
| parser = argparse.ArgumentParser( | |
| description="Analyze GitHub repositories for activity, complexity, and learning difficulty.", | |
| formatter_class=argparse.RawDescriptionHelpFormatter, | |
| epilog=__doc__, | |
| ) | |
| parser.add_argument( | |
| "repos", | |
| nargs="*", | |
| metavar="URL", | |
| help="GitHub repository URLs to analyze", | |
| ) | |
| parser.add_argument( | |
| "--file", "-f", | |
| metavar="FILE", | |
| help="Text file with one GitHub URL per line", | |
| ) | |
| parser.add_argument( | |
| "--token", "-t", | |
| metavar="TOKEN", | |
| default=os.environ.get("GITHUB_TOKEN"), | |
| help="GitHub personal access token (or set GITHUB_TOKEN env var)", | |
| ) | |
| parser.add_argument( | |
| "--json", "-j", | |
| metavar="OUTPUT", | |
| help="Save full JSON report to this file path", | |
| ) | |
| parser.add_argument( | |
| "--no-summary", | |
| action="store_true", | |
| help="Skip the summary table at the end", | |
| ) | |
| args = parser.parse_args() | |
| # Collect URLs | |
| urls = list(args.repos) | |
| if args.file: | |
| try: | |
| with open(args.file) as f: | |
| file_urls = [ | |
| line.strip() | |
| for line in f | |
| if line.strip() and not line.startswith("#") | |
| ] | |
| urls.extend(file_urls) | |
| except FileNotFoundError: | |
| print(f"β File not found: {args.file}") | |
| sys.exit(1) | |
| if not urls: | |
| parser.print_help() | |
| print("\nβ οΈ No URLs provided. Pass URLs as arguments or use --file.") | |
| sys.exit(1) | |
| # Auth | |
| client = GitHubClient(token=args.token) | |
| if not args.token: | |
| print("β οΈ No GitHub token provided. Rate limit: 60 req/hour.") | |
| print(" Set GITHUB_TOKEN or use --token to increase to 5000 req/hour.\n") | |
| print(f"π Analyzing {len(urls)} repository/repositories...\n") | |
| reports = [] | |
| for i, url in enumerate(urls, 1): | |
| print(f" [{i}/{len(urls)}] {url}") | |
| report = analyze_repo(url, client) | |
| reports.append(report) | |
| print(format_report(report)) | |
| # Summary table | |
| if not args.no_summary and len(reports) > 1: | |
| print("\nπ Summary") | |
| print(format_summary_table(reports)) | |
| # JSON export | |
| if args.json: | |
| json_output = to_json(reports) | |
| with open(args.json, "w") as f: | |
| f.write(json_output) | |
| print(f"\nβ JSON report saved to: {args.json}") | |
| print(f"\nβ Done. Analyzed {len(reports)} repo(s).") | |
| if __name__ == "__main__": | |
| main() | |