package com.tal.pad.javabase.spider;

import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;

import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.time.Duration;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

public class RepoSpider {

	private final HttpClient httpClient;
	private final String baseUrl;
	private final String authHeader; // may be null
	private final int maxDepth;
	private final long delayMillis;

	// Support href="..." and href='...'
	private static final Pattern HREF_PATTERN = Pattern.compile("href\\s*=\\s*['\"]([^'\"][^\"]*)['\"]", Pattern.CASE_INSENSITIVE);

	private static final String[] DOWNLOAD_EXTS = new String[]{".jar", ".pom", ".aar", ".xml", ".sha1", ".md5"};

	private final Gson gson = new Gson();

	// Filters for REST mode
	private boolean includeJar = true;           // regular jars
	private boolean includeSources = true;       // *-sources.jar
	private boolean includeJavadoc = true;       // *-javadoc.jar
	private boolean includePom = true;           // pom files
	private boolean includeAar = true;           // android aars
	private long maxFiles = Long.MAX_VALUE;      // limit total downloads
	private int perLibraryMaxVersions = -1;      // <=0 means unlimited; otherwise newest N per group:name
	private int concurrency = 4;                 // parallel download threads

	public RepoSpider(String baseUrl, String username, String password, int maxDepth, long delayMillis) {
		this.baseUrl = normalizeBase(baseUrl);
		this.maxDepth = Math.max(0, maxDepth);
		this.delayMillis = Math.max(0, delayMillis);
		this.httpClient = HttpClient.newBuilder()
				.connectTimeout(Duration.ofSeconds(20))
				.followRedirects(HttpClient.Redirect.ALWAYS)
				.build();
		if (username != null && !username.isEmpty()) {
			String token = Base64.getEncoder().encodeToString((username + ":" + (password == null ? "" : password)).getBytes());
			this.authHeader = "Basic " + token;
		} else {
			this.authHeader = null;
		}
	}

	public RepoSpider setIncludeJar(boolean v) { this.includeJar = v; return this; }
	public RepoSpider setIncludeSources(boolean v) { this.includeSources = v; return this; }
	public RepoSpider setIncludeJavadoc(boolean v) { this.includeJavadoc = v; return this; }
	public RepoSpider setIncludePom(boolean v) { this.includePom = v; return this; }
	public RepoSpider setIncludeAar(boolean v) { this.includeAar = v; return this; }
	public RepoSpider setMaxFiles(long v) { this.maxFiles = v; return this; }
	public RepoSpider setPerLibraryMaxVersions(int v) { this.perLibraryMaxVersions = v; return this; }
	public RepoSpider setConcurrency(int v) { this.concurrency = Math.max(1, v); return this; }

	public void crawlTo(Path downloadDir) throws IOException, InterruptedException {
		Objects.requireNonNull(downloadDir, "downloadDir");
		Files.createDirectories(downloadDir);

		ArrayDeque<QueueItem> queue = new ArrayDeque<>();
		Set<String> visited = new HashSet<>();
		queue.add(new QueueItem(baseUrl, 0));

		while (!queue.isEmpty()) {
			QueueItem item = queue.pollFirst();
			if (!visited.add(item.url)) continue;
			if (item.depth > maxDepth) continue;

			System.out.println("Visiting depth=" + item.depth + " -> " + item.url);
			String html = fetch(item.url);
			if (html == null) continue;

			Set<String> links = extractLinks(item.url, html);
			System.out.println("Found links: " + links.size() + " on " + item.url);
			if (links.isEmpty()) {
				System.out.println("No links parsed from page. The repository may have browsing disabled or uses JS-based UI.");
			}
			for (String link : links) {
				if (!link.startsWith(baseUrl)) continue; // only stay within base
				if (isDirectoryLink(link)) {
					System.out.println("Enqueue dir: " + link);
					if (item.depth < maxDepth) {
						queue.add(new QueueItem(link, item.depth + 1));
					}
				} else if (isDownloadFile(link)) {
					System.out.println("Queue download: " + link);
					downloadFile(link, downloadDir);
				}
				if (delayMillis > 0) {
					Thread.sleep(delayMillis);
				}
			}
		}
	}

	public void crawlRest(Path downloadDir, String repository) throws IOException, InterruptedException {
		Objects.requireNonNull(downloadDir, "downloadDir");
		Files.createDirectories(downloadDir);

		ExecutorService pool = Executors.newFixedThreadPool(concurrency);
		List<Future<?>> futures = new ArrayList<>();
		AtomicLong totalAttempts = new AtomicLong(0);
		AtomicLong totalDownloaded = new AtomicLong(0);
		String apiBase = resolveApiBase(baseUrl);
		String token = null;
		int page = 0;
		Set<String> printedGav = ConcurrentHashMap.newKeySet();
		Map<String, Integer> perLibCounts = new HashMap<>();
		while (true) {
			if (totalDownloaded.get() >= maxFiles) break;
			String url = apiBase + "/service/rest/v1/components?repository=" + repository + "&sort=uploaded&direction=desc" + (token != null ? ("&continuationToken=" + token) : "");
			System.out.println("REST page=" + page + " -> " + url);
			String body = fetch(url);
			if (body == null) break;
			JsonObject root = gson.fromJson(body, JsonObject.class);
			JsonArray items = root.getAsJsonArray("items");
			if (items == null || items.size() == 0) {
				System.out.println("No items returned.");
				break;
			}
			for (JsonElement item : items) {
				if (totalDownloaded.get() >= maxFiles) break;
				JsonObject obj = item.getAsJsonObject();
				String group = stringOf(obj, "group");
				String name = stringOf(obj, "name");
				String version = stringOf(obj, "version");
				String uploaded = stringOf(obj, "uploaded");
				String perLibKey = (group + ":" + name);
				int used = perLibCounts.getOrDefault(perLibKey, 0);
				if (perLibraryMaxVersions > 0 && used >= perLibraryMaxVersions) {
					continue; // cap for this library
				}
				JsonArray assets = obj.getAsJsonArray("assets");
				if (assets == null) continue;
				List<JsonObject> includedAssets = new ArrayList<>();
				for (JsonElement ae : assets) {
					JsonObject asset = ae.getAsJsonObject();
					String downloadUrl = stringOf(asset, "downloadUrl");
					String path = stringOf(asset, "path");
					if (downloadUrl == null) continue;
					if (!shouldInclude(path != null ? path : downloadUrl)) continue;
					includedAssets.add(asset);
				}
				if (includedAssets.isEmpty()) continue;
				String gavVersionKey = perLibKey + ":" + version;
				if (!printedGav.contains(gavVersionKey)) {
					printedGav.add(gavVersionKey);
					System.out.println((group != null ? group : "(no-group)") + "/");
					System.out.println("  " + (name != null ? name : "(no-name)") + "/");
					System.out.println("    " + (version != null ? version : "(no-version)") + "/");
				System.out.println("      Uploaded: " + (uploaded != null ? uploaded : "(unknown)"));
				}
				System.out.println("      includedAssets size: " + includedAssets.size());
				for (JsonObject asset : includedAssets) {
					if (totalDownloaded.get() >= maxFiles) break;
					String downloadUrl = stringOf(asset, "downloadUrl");
					String path = stringOf(asset, "path");
					Long size = null;
					try { String s = stringOf(asset, "fileSize"); if (s != null) size = Long.parseLong(s); } catch (Exception ignored) {}
					String fileName = fileNameOf(path != null ? path : downloadUrl);
					System.out.println("      - " + fileName);
					final Long  sizeFinal = size;
					Future<?> f = pool.submit(() -> {
						try {
							downloadFile(downloadUrl, downloadDir, path, sizeFinal);
							totalAttempts.incrementAndGet();
						} catch (Exception ignored) {}
					});
					futures.add(f);
					totalDownloaded.incrementAndGet();
				}
				perLibCounts.put(perLibKey, used + 1);
				System.out.println("");
			}
			token = stringOf(root, "continuationToken");
			page++;
			if (token == null || token.equals("null") || token.isEmpty()) {
				break;
			}
		}
		pool.shutdown();
		try {
			pool.awaitTermination(30, TimeUnit.MINUTES);
		} catch (InterruptedException e) {
			Thread.currentThread().interrupt();
		}
		System.out.println("REST crawl complete. Attempts: " + totalAttempts.get() + ", Scheduled: " + totalDownloaded.get());
	}

	private boolean shouldInclude(String path) {
		if (path == null) return false;
		String p = path.toLowerCase(Locale.ROOT);
		boolean isJar = p.endsWith(".jar") && !p.endsWith("-sources.jar") && !p.endsWith("-javadoc.jar");
		boolean isSources = p.endsWith("-sources.jar");
		boolean isJavadoc = p.endsWith("-javadoc.jar");
		boolean isPom = p.endsWith(".pom");
		boolean isAar = p.endsWith(".aar");
		return (includeJar && isJar) || (includeSources && isSources) || (includeJavadoc && isJavadoc) || (includePom && isPom) || (includeAar && isAar);
	}

	private static String stringOf(JsonObject obj, String key) {
		if (obj == null || !obj.has(key) || obj.get(key).isJsonNull()) return null;
		return obj.get(key).getAsString();
	}

	private static String fileNameOf(String pathOrUrl) {
		if (pathOrUrl == null || pathOrUrl.isEmpty()) return "(unknown)";
		int idx = pathOrUrl.lastIndexOf('/');
		return idx >= 0 ? pathOrUrl.substring(idx + 1) : pathOrUrl;
	}

	private String resolveApiBase(String base) {
		// Try to strip "/repository/<name>/" to get server root
		int idx = base.indexOf("/repository/");
		if (idx > 0) {
			return base.substring(0, idx);
		}
		// Already root like http://host:port/
		return base.endsWith("/") ? base.substring(0, base.length() - 1) : base;
	}

	private String fetch(String url) throws IOException, InterruptedException {
		HttpRequest.Builder builder = HttpRequest.newBuilder().uri(URI.create(url)).timeout(Duration.ofSeconds(30)).GET();
		if (authHeader != null) builder.header("Authorization", authHeader);
		HttpResponse<String> resp = httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofString());
		System.out.println("GET " + url + " -> " + resp.statusCode());
		if (resp.statusCode() >= 200 && resp.statusCode() < 300) {
			return resp.body();
		}
		System.out.println("Skip url status=" + resp.statusCode() + " " + url);
		return null;
	}

	private void downloadFile(String fileUrl, Path downloadDir) {
		try {
			URI uri = new URI(fileUrl);
			Path target = downloadDir.resolve(safeFileName(uri.getPath()));
			if (Files.exists(target)) {
				System.out.println("Exists: " + target.toAbsolutePath());
				return;
			}
			HttpRequest.Builder builder = HttpRequest.newBuilder().uri(uri).timeout(Duration.ofMinutes(2)).GET();
			if (authHeader != null) builder.header("Authorization", authHeader);
			HttpResponse<InputStream> resp = httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofInputStream());
			if (resp.statusCode() >= 200 && resp.statusCode() < 300) {
				Files.createDirectories(target.getParent());
				try (InputStream in = resp.body()) {
					Files.copy(in, target, StandardCopyOption.REPLACE_EXISTING);
				}
				System.out.println("Downloaded: " + target.toAbsolutePath());
			} else {
				System.out.println("Download failed status=" + resp.statusCode() + " " + fileUrl);
			}
		} catch (IOException e) {
			throw new UncheckedIOException(e);
		} catch (InterruptedException e) {
			Thread.currentThread().interrupt();
		} catch (URISyntaxException e) {
			System.out.println("Bad URI: " + fileUrl + " e=" + e.getMessage());
		}
	}

	private void downloadFile(String fileUrl, Path downloadDir, String relativePath, Long expectedSize) {
		try {
			URI uri = new URI(fileUrl);
			Path target = downloadDir.resolve(relativePath != null && !relativePath.isEmpty() ? Path.of(relativePath) : Path.of(safeFileName(uri.getPath())));
			Files.createDirectories(target.getParent());
			if (Files.exists(target)) {
				System.out.println("Skip (same size): " + target.toAbsolutePath());
//				if (expectedSize != null) {
//					try {
//						long existing = Files.size(target);
//						if (existing == expectedSize) {
//							System.out.println("Skip (same size): " + target.toAbsolutePath());
//							return;
//						}
//					} catch (IOException ignored) {}
//				}
//				// Different size or unknown: choose suffixed target
//				Path suffixed = uniqueSuffix(target);
//				if (!suffixed.equals(target)) {
//					System.out.println("Name collision, saving as: " + suffixed.getFileName());
//				}
//				HttpRequest.Builder builder = HttpRequest.newBuilder().uri(uri).timeout(Duration.ofMinutes(2)).GET();
//				if (authHeader != null) builder.header("Authorization", authHeader);
//				HttpResponse<InputStream> resp = httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofInputStream());
//				if (resp.statusCode() >= 200 && resp.statusCode() < 300) {
//					try (InputStream in = resp.body()) {
//						Files.copy(in, suffixed, StandardCopyOption.REPLACE_EXISTING);
//					}
//					System.out.println("Downloaded: " + suffixed.toAbsolutePath());
//				} else {
//					System.out.println("Download failed status=" + resp.statusCode() + " " + fileUrl);
//				}
				return;
			}
			// Normal path (doesn't exist)
			HttpRequest.Builder builder = HttpRequest.newBuilder().uri(uri).timeout(Duration.ofMinutes(2)).GET();
			if (authHeader != null) builder.header("Authorization", authHeader);
			HttpResponse<InputStream> resp = httpClient.send(builder.build(), HttpResponse.BodyHandlers.ofInputStream());
			if (resp.statusCode() >= 200 && resp.statusCode() < 300) {
				try (InputStream in = resp.body()) {
					Files.copy(in, target, StandardCopyOption.REPLACE_EXISTING);
				}
				System.out.println("Downloaded: " + target.toAbsolutePath());
			} else {
				System.out.println("Download failed status=" + resp.statusCode() + " " + fileUrl);
			}
		} catch (IOException e) {
			throw new UncheckedIOException(e);
		} catch (InterruptedException e) {
			Thread.currentThread().interrupt();
		} catch (URISyntaxException e) {
			System.out.println("Bad URI: " + fileUrl + " e=" + e.getMessage());
		}
	}

	private static Path uniqueSuffix(Path target) {
		if (!Files.exists(target)) return target;
		String name = target.getFileName().toString();
		int dot = name.lastIndexOf('.');
		String base = dot > 0 ? name.substring(0, dot) : name;
		String ext = dot > 0 ? name.substring(dot) : "";
		int i = 1;
		Path parent = target.getParent();
		while (true) {
			Path cand = parent.resolve(base + ".dup" + i + ext);
			if (!Files.exists(cand)) return cand;
			i++;
		}
	}

	private static boolean isDirectoryLink(String url) {
		return url.endsWith("/");
	}

	private static boolean isDownloadFile(String url) {
		for (String ext : DOWNLOAD_EXTS) {
			if (url.endsWith(ext)) return true;
		}
		return false;
	}

	private static String safeFileName(String path) {
		int idx = path.lastIndexOf('/') + 1;
		return path.substring(idx);
	}

	private static String normalizeBase(String base) {
		if (!base.endsWith("/")) return base + "/";
		return base;
	}

	private static Set<String> extractLinks(String pageUrl, String html) {
		Set<String> links = new HashSet<>();
		Matcher m = HREF_PATTERN.matcher(html);
		while (m.find()) {
			String href = m.group(1);
			if (href == null || href.startsWith("#")) continue;
			String absolute = toAbsolute(pageUrl, href);
			if (absolute != null) {
				links.add(absolute);
			}
		}
		return links;
	}

	private static String toAbsolute(String pageUrl, String href) {
		try {
			URI page = new URI(pageUrl);
			URI abs = page.resolve(href);
			return abs.toString();
		} catch (URISyntaxException e) {
			return null;
		}
	}

	private static class QueueItem {
		final String url;
		final int depth;
		QueueItem(String url, int depth) { this.url = url; this.depth = depth; }
	}
} 