#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import scrapy
import json


class KittySpider(scrapy.Spider):
    name = 'kitty'
    allowed_domains = ['api.cryptokitties.co']

    def start_requests(self):
        url=''
        for kittyId in range(1,2):
            url='https://api.cryptokitties.co/kitties/'+str(kittyId)
            yield scrapy.Request(url=url)
    
    def parse(self, response):
        body = json.loads(response.body)
        filename = './data/kitty/list.txt'
        with open(filename, 'a', encoding='utf-8') as f:
            newKitty = response.body.decode('utf-8')+'\n'
            f.write(newKitty)
        if(body.get('auction')):
            yield scrapy.Request(url='https://api.cryptokitties.co/user/'+body['auction']['seller']['address'], callback=self.getUser)
        else:
            filename = './data/kitty/user.csv'
            with open(filename, 'a', encoding='utf-8') as f:
                userInfo = ','.join(map(str, body['owner'].values()))+'\n'
                f.write(userInfo)
        pass
    
    def getUser(self, response):
        body = json.loads(response.body)
        filename = './data/kitty/user.csv'
        with open(filename, 'a', encoding='utf-8') as f:
            userInfo = body['address']+','+body['image']+','+body['nickname']+','+'\n'
            f.write(userInfo)
        