#  Copyright 2013 Christoffer
#  
#  This program is free software; you can redistribute it and/or modify
#  it under the terms of the GNU General Public License as published by
#  the Free Software Foundation; either version 3 of the License, or
#  (at your option) any later version.
#  
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#  
#  You should have received a copy of the GNU General Public License
#  along with this program; if not, write to the Free Software
#  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
#  MA 02110-1301, USA.
#  
#  

#spshc main program
#v 0.1.1
#Simple Python Site Hash Checker

import urllib.request
import hashlib
import pickle
from time import sleep
from tkinter import *

class Site_node(object):
    def __init__(self, url, response_text):
        self.url = url
        self.response_text = response_text

def show_notification(url):
    #Pops up a window to notify the user of a site change.
    master = Tk()
    master.title("Update found")
    a = Label(master, text="A site was updated.", font=("Helvetica", 14)).pack()
    b = Label(master, text=url).pack()
    master.mainloop()

def download(url_list):
    # Downloads the specified sites.
    new_download = []
    print("Visiting sites...")
    for url in url_list:
        try:
            response = urllib.request.urlopen(url)
            print("Got a response from", url, end="")
            response_text = response.read()
            site = Site_node(url, response_text)
            new_download.append(site)
        except:
            print("Could not visit", url, end="")

    return(new_download)

def compare(new_download, old_hash):
    #Hashes the new downloads, and compare them against the old one.
    print("\nComparing new site data against old download")
    new_hash = []
    for site in new_download:
        site_hash = hashlib.md5(site.response_text).hexdigest()
        new_hash.append(site_hash)
        #print(site_hash)

        if site_hash not in old_hash and old_hash != []:
            print("Site update found")
            show_notification(site.url)

    pickle.dump(new_hash, open("old_hash.dat", "wb"))
        
    print("Saved hashes for current sites.")


def read_saved_data():
    # Reads old saved site hashes, and the list of sites to check.
    print("Reading saved hash data")
    try:
        old_hash = pickle.load(open("old_hash.dat", "rb"))
        print("Loaded saved hash data")
            
    except FileNotFoundError:
        print("old_hash.dat not found, assuming first run")
        old_hash = []


    print("Reading the URL list")
    try:
        with open("url_list_file.txt", "r") as url_list_file:
            url_list = []
            for line in url_list_file:
                url_list.append(line)

    except FileNotFoundError:
        print("Empty url file! Put some sites in a file named 'url_list_file.txt' and try again.")

    return old_hash, url_list


def main():
    #The main loop.
    while True:
        wait = 900 # Change this to any number of seconds.
        old_hash, url_list = read_saved_data()
        new_download = download(url_list)
        compare(new_download, old_hash)
        print("Loop completed. Going to sleep for", wait//60, "minutes...\n")
        sleep(wait)
    
main()
