# -*- coding:utf-8 -*-
from scrapy.cmdline import execute
import sys
import os

sys.path.append(os.path.dirname(os.path.abspath(__file__)))
execute(["scrapy", "crawl", "sfw"])


# city_url = "https://sh.fang.com/"
# 构建新房的链接
# city_url_module = city_url.split("//")
# scheme = city_url_module[0]
# city_domain = city_url_module[1]
# url_module = city_domain.split(".")
# newhouse_url = scheme + "//" + url_module[0] + ".newhouse." + url_module[1] + "." + url_module[2]+ "house/s/"
# # 构建二手房的链接
# esf_url = scheme + "//" + url_module[0] + ".esf." + url_module[1] + "." + url_module[2]
# print("城市连接：", city_url)
# print("二手房链接:", esf_url)
# print("新房链接:", newhouse_url)

# 暂停2 crawl spider lagou -s JOBDIR=job_info/001
# scrapy startproject [爬虫项目名称]
# scrapy genspider -t crawl [爬虫名字] [域名]
