import sys
from bs4 import BeautifulSoup
import urllib.request as urllib2
import urllib
import re
import os
import save_sm_mid_imgs as saveIm
import saveToDB as DB
from datetime import datetime



def save_img_to_drive(url,path_to_file):
    image_on_web = urllib2.urlopen(url)
    buf = image_on_web.read()
    downloaded_image = open(path_to_file, "wb")
    downloaded_image.write(buf)
    downloaded_image.close()
    image_on_web.close()

print('---------------------- start download -------------------------------')
print(str(datetime.now()))
url = 'http://barin.kiev.ua/krovati.htm'


response = urllib2.urlopen(url)
html = response.read()

soup = BeautifulSoup(html)

img_classes = soup.findAll('div', {'class':'wrap_overflow'})



im = img_classes[0]

#print (im.prettify())




host = 'http://barin.kiev.ua/'




i = 0
for im in img_classes:
    #print(im)
    #im = img_classes[0]

#print(im.prettify())

    img = im.findNext('img')

    img_url = host +  img.get('src')

    

    big_img = im.findNext('a')

    big_img_url = host +  big_img.get('href')

    item_name = img.get('alt')

    print(big_img_url)




    response = urllib2.urlopen(big_img_url)
    html = response.read()

    soup = BeautifulSoup(html)

    big_img_classes = soup.findAll('a', {'class':'thumbopt'})

    big_im = big_img_classes[0]
    #print(big_im.prettify())

    big_img_url = big_im.findNext('img')

    big_img_url = host + big_img_url.get('src')
    print(big_img_url)      

    newpath = r'beds_photos'

    if not os.path.exists(newpath): 
        os.makedirs(newpath)

    #print(big_img_url)

    

    regexp_pettern = '((\d)|(\w)|(\-))+(.jpg){1}$'

    tmp_image_name = re.search(regexp_pettern, img_url.lower())

    tmp_big_image_name  = re.search(regexp_pettern, big_img_url.lower())

    big_image_name = tmp_big_image_name.group(0)
    print("big_image_name = " + big_image_name)
    

    if tmp_image_name:
        image_name = tmp_image_name.group(0)
        big_image_name = tmp_big_image_name.group(0)
        print("iteration = " + str(i))
        print("       image_name =" + image_name)
        print("       img_url    =" + img_url)
        print("      big_img_url =" + big_img_url)
        print("   big_image_name =" + big_image_name)  
        
        path_to_file = newpath +'\\' + 'catalog_img' + '\\'+big_image_name
        save_img_to_drive(img_url,path_to_file)


        #item_name = "Stairs " + str(i)
        page_from = "null"
        folder = newpath +'/' + 'catalog_img'
        photo_type = 'catalog_image'
        category = 'BEDS'

        item_name = "Beds " + str(i)
        catalog_photo_id = DB.savePhotoToBd(image_name, folder, photo_type, item_name, page_from,category)

        print("catalog_photo_id =" + str(catalog_photo_id))

        path_to_file = newpath + '\\' + 'large_img' + '\\'+big_image_name
        save_img_to_drive(big_img_url,path_to_file)


        item_name = "Beds " + str(i)
        page_from = "null"
        folder = newpath +'/' + 'large_img'
        photo_type = 'large_image'

        print("folder =" + folder)

        catalog_big_photo_id = DB.savePhotoToBd(big_image_name, folder, photo_type, item_name, page_from,category)

        print("catalog_big_photo_id =" + str(catalog_big_photo_id))

        DB.setCatalogLargePhoto(catalog_photo_id,catalog_big_photo_id)  

        

        i = i+1
        if i==10:exit()
    #exit()


    # image_on_web = urllib2.urlopen(img_url)
    # buf = image_on_web.read()
    # path_to_file = newpath + '\\'+image_name


    # downloaded_image = open(path_to_file, "wb")
    # downloaded_image.write(buf)
    # downloaded_image.close()
    # image_on_web.close()

    