from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from datetime import datetime
import urllib2, time
from BeautifulSoup import BeautifulSoup
from fdafeed.models import Company, Recall

def scrape_fda(request):
    
    num_saved = 0; num_errors = 0
    recalls_data = urllib2.urlopen(settings.ALL_RECALLS_URL)
    recalls_soup = BeautifulSoup(recalls_data)
    products = recalls_soup.findAll('product')
    for p in products:
        company_name = p.company.string
        try:
            company = Company.objects.get(name=unicode(company_name))
        except:
            try:
                company = Company(name=unicode(company_name))
                company.save()
            except:
                num_errors += 1
                continue

        
        
        release_link = unicode(p.company_release_link.string)
        
        try:    
            when_time = time.strptime(p.date.string, "%a, %d %b %Y %H:%M:%S -0400")
            when = datetime(*when_time[0:6])
        except:
            try:
                when_time = time.strptime(p.date.string, "%a, %d %b %Y %H:%M:%S -0500")        
                when = datetime(*when_time[0:6])
            except:
                continue

        recall, created = Recall.objects.get_or_create(release_link=release_link, company=company, when=when)
        if created:
            
            try:
                brand_name = p.brand_name.string.strip()
                if brand_name:
                    recall.brand_name = unicode(brand_name)
            except:
                pass
            
            try:
                description = p.product_description.string.strip()
                if description:
                    recall.description = unicode(description)
            except: 
                pass
            
            try:
                reason = p.reason.string.strip()
                if reason:
                    recall.reason = unicode(reason)
            except:
                pass
                
            try:
                photo_link = p.photos_link.string.strip()
                if photo_link:
                    recall.photo_link = unicode(photo_link)
            except:
                pass
                
            recall.save()
            
            num_saved += 1
    
    return HttpResponse("Number of products saved: " + str(num_saved) + " / Num errors: " + str(num_errors))



def scrape_rss(request):
    num_updated = 0
    recalls_feed = urllib2.urlopen(settings.ALL_RECALLS_RSS)
    feed_soup = BeautifulSoup(recalls_feed)
    items = feed_soup.findAll('item')
    for item in items:
        release_link = unicode(item.guid.string)
        product = Recall.objects.filter(release_link=release_link)
        if product:
            product = product[0]
            product.summary = unicode(item.description.string)
            product.save()
            num_updated += 1
    return HttpResponse("Number of products updated: " + str(num_updated))
     
