#!/usr/bin/env python3
from tqdm.asyncio import tqdm
import argparse
import pandas as pd
import aiohttp
import asyncio
import logging

success_counter = [0]
fail_counter = [0]

async def download_file(session, id, sem, smi_file, failed_file, lock, max_retries=3):
    url = f'https://zinc20.docking.org/substances/{id}.smi'
    for attempt in range(max_retries):
        async with sem:
            try:
                async with session.get(url) as response:
                    response.raise_for_status()
                    content = await response.text()
                    async with lock:
                        smi_file.write(content)
                        success_counter[0] += 1
                    return
            except Exception as e:
                if attempt == max_retries - 1:
                    async with lock:
                        failed_file.write(f"{id}\n")
                        fail_counter[0] += 1
                await asyncio.sleep(2 ** attempt)  # Exponential backoff

async def download_all(zinc_ids, output_file, failed_file, concurrency=10):
    sem = asyncio.Semaphore(concurrency)
    lock = asyncio.Lock()
    async with aiohttp.ClientSession() as session:
        # Open files in async context
        with open(output_file, "w", encoding="utf-8") as smi_file, \
             open(failed_file, "w", encoding="utf-8") as fail_file:
            fail_file.write("zinc_id\n")
            tasks = [
                download_file(session, id, sem, smi_file, fail_file, lock)
                for id in zinc_ids
            ]
            for f in tqdm(asyncio.as_completed(tasks), total=len(tasks), desc="Downloading"):
                await f

    print(f"\nDownload completed: {success_counter[0]} succeeded, {fail_counter[0]} failed.")
    if fail_counter[0]:
        print(f"Failed IDs saved to: {failed_file}")

def main():
    parser = argparse.ArgumentParser(description="Download SMI file from zinc id list via AIO.")
    parser.add_argument("-i", "--input", required=True, help="Input TSV/CSV file with zinc_id column")
    parser.add_argument("-o", "--output", required=True, help="Output smi file")
    parser.add_argument("-f", "--failed", default="failed_ids.txt", help="Failed IDs output file")
    parser.add_argument("-c", "--concurrency", type=int, default=10, help="Number of concurrent downloads")
    args = parser.parse_args()

    try:
        df = pd.read_csv(args.input, sep=None, engine="python")
        if "zinc_id" not in df.columns:
            raise ValueError("Input TSV must contain a 'zinc_id' column.")
        zinc_ids = df["zinc_id"].unique()
        print(f"Found unique zinc id: {len(zinc_ids)}")
    except Exception as e:
        print(f"Error reading input file: {e}")
        return

    asyncio.run(download_all(zinc_ids, args.output, args.failed, args.concurrency))

if __name__ == "__main__":
    main()
