#!/usr/bin/python3

from packet_data_types import *
from packets import *
from ip_address import * 

import socket
import logging
import multiprocessing
import time
import sys
import argparse

writing_queue = None

def scanner_process(task):
    # This is a certified Pool.map_async moment
    global writing_queue
    global separator
    global start_port, end_port 
    global timeout

    address = str(task)
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.settimeout(timeout)
    for port in range(start_port, end_port + 1):
        try:
            s.connect((address, port))
        except Exception as err:
            logging.info(f"{address}:{port} didn't respond, reason: {err}")
            s.close()
            continue

        s.send(handshake(765, address, port, 1))
        s.send(status())

        try:
            bytes = read_packet(s)
            data = parse_status(bytes)
            name = parse_server_description(data['description'])

            fields = [address, name, data['version']['name'], str(data['players']['online']), str(data['players']['max'])]
            output = separator.join(fields)

            logging.info(f"SERVER FOUND: {output}")
            writing_queue.put(output)

        except Exception as err: 
            logging.info(f"{address} raised an error {err}")

    s.close()


def writer_process(filename, queue):
    logging.info("Writer started")
    if not filename:
        logging.info("No output file provided, clsoing the writer")
        return 

    with open(filename, "w") as data_file:
        while True:
            data = queue.get()
            if data == "done": break 
            data_file.write(data + "\n")
            data_file.flush()
        logging.info("No more data to write, closing the writer")

        

if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        prog="Minecraft Server Scanner",
        description="Scans IPs from a given range on given ports for presence of Minecraft servers",
        epilog="Output format is <server_ip>,'<server_name>',<server_version>,<online_player_count>,<max_player_count>, the default ',' separator can be changed with the '-s' flag"
    )
    parser.add_argument("ip_range", help="Inclusive IP range, ex. 123.123.123.100-123.123.123.200 or 123.123.123.123 if you only need to scan one IP")
    parser.add_argument("port_range", help="Inclusive port range, ex. 25565-25570 or 25565 if you only need to scan on one port")
    parser.add_argument("-o", "--output-file", help="Output file path")
    parser.add_argument("-s", "--separator", default=",", help="Field separator to use in the output")
    parser.add_argument("-t", "--timeout", default=3, type=int, help="Timeout before dropping a socket connection")
    parser.add_argument("-w", "--worker-count", default=100, type=int, help="Amount of worker processes to use for scanning")

    args = parser.parse_args()
    
    if "-" in args.ip_range:
        start_ip, end_ip = args.ip_range.split("-")
    else:
        start_ip = args.ip_range
        end_ip = start_ip
    
    ip_range = list(IP4Range(start_ip, end_ip))
    if "-" in args.port_range:
        start_port, end_port = [int(x) for x in args.port_range.split("-")]
    else:
        start_port = int(args.port_range)
        end_port = start_port
    
    if start_port > end_port:
        print("Start port cannot be larger than end port")
        sys.exit(1)
    
    timeout = args.timeout
    separator = args.separator
    print(f"Will scan {len(ip_range)} addresses, on {end_port - start_port + 1} ports, for a total of {len(ip_range) * (end_port - start_port + 1)} connections")

    logging.basicConfig(level=logging.DEBUG)

    writing_queue = multiprocessing.Queue()
    writer = multiprocessing.Process(target=writer_process, args=(args.output_file, writing_queue))
    writer.start()

    with multiprocessing.Pool(args.worker_count) as pool:
        logging.info("Starting querrying")
        pool.map_async(scanner_process, ip_range)
        pool.close()
        logging.info("Querrying started")
        pool.join()
        logging.info("Done querrying")
        writing_queue.put("done")