# import os
# import sys
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# sys.path.insert(0, BASE_DIR)
# import requests
#
#
#
# def upload_img(imgUrl, imgId):
# 	resp_img = requests.get(imgUrl)
# 	img = resp_img.content
# 	local_file = BASE_DIR + "\\images\\{}.jpg".format(imgId)
# 	f = open(local_file, 'wb')
# 	f.write(img)
# 	f.close()
# 	# cv2.imwrite(local_file, cv2.resize(cv2.imread(local_file), (510, 750)))  # 修改图片大小
# 	upload_url = "https://sit.zooming-data.com/dist-admin/api/dist_uploadFile"
# 	headers = {
# 		"User-Agent": "Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36",
# 		"Authorization": "eyJhbGciOiJIUzUxMiJ9.eyJyYW5kb21LZXkiOiJneHFqc2UiLCJzdWIiOiJ7XCJiZWxvbmdUeXBlXCI6MSxcImJlbG9uZ1wiOjYsXCJlbnRlcnByaXNlSWRcIjo5MCxcInVzZXJJZFwiOjEsXCJhY2NvdW50XCI6XCJhZG1pblwifSIsImV4cCI6MTYwNzM5NzUzNiwiaWF0IjoxNjA2NzkyNzM2fQ.ApLsYUMPR_U4Xi-liMQs6KX6QPAwgrFwVoR5f2kDDXXJyaA-g29fQ7ID49eUR17yfZyXQr7pJpLg65fFOjT3LQ"
# 	}
# 	upload_resp = requests.post(upload_url, headers=headers, files={"file": open(local_file, 'rb')})
# 	print(upload_resp.content.decode())
#
#
# # imgUrl = "https://pic4.ajkimg.com/display/xinfang/a42e6b25f848ab3bc59321e14f271818/180x135m.jpg"
# # imgId = "271819"
# # upload_img(imgUrl, imgId)
#
#
# # url = "https://sit.zooming-data.com/dist-admin/api/dist_uploadFile"
# # local_file = 'C:\\Users\\Administrator\\Desktop\\demo\\mySpiders\\mySpiders\\images\\271818.bmp'
# # files = [('file', ('271818.bmp', open(local_file, 'rb'), 'image/jpeg'))]
# # # files = {"file": open(local_file, 'rb')}
# # headers = {
# #   'Authorization': 'eyJhbGciOiJIUzUxMiJ9.eyJyYW5kb21LZXkiOiJneHFqc2UiLCJzdWIiOiJ7XCJiZWxvbmdUeXBlXCI6MSxcImJlbG9uZ1wiOjYsXCJlbnRlcnByaXNlSWRcIjo5MCxcInVzZXJJZFwiOjEsXCJhY2NvdW50XCI6XCJhZG1pblwifSIsImV4cCI6MTYwNzM5NzUzNiwiaWF0IjoxNjA2NzkyNzM2fQ.ApLsYUMPR_U4Xi-liMQs6KX6QPAwgrFwVoR5f2kDDXXJyaA-g29fQ7ID49eUR17yfZyXQr7pJpLg65fFOjT3LQ',
# #
# # }
# #
# # response = requests.post(url, headers=headers, files=files)
# #
# # print(response.text)

import scrapy
import logging
logger = logging.getLogger(__name__)


class TmallSpider(scrapy.Spider):
    name = 'test'
    allowed_domains = ['ip.filefab.com']
    start_urls = ['http://ip.filefab.com/index.php']

    def parse(self, response):
        ip = response.xpath('//*[@id="ipd"]/span/text()').extract_first()
        print(ip)
        coutry = response.xpath('//*[@id="cntdetected"]/span/text()').extract_first()
        print(coutry)
        city = response.xpath('//*[@id="city"]/span/text()').extract_first()
        print(city)





