export PYTHONPATH=$(PWD):$(PWD)/app
venv=$(HOME)/.virtualenvs/django
vpath=$(venv)/bin/
db_dir=db/
dbname=django
dbuser=admin
dbpwd=test123

run:
	$(vpath)python manage.py runserver

install:
	[ -d $(venv) ] || (make mkvenv && make vreq)

mkvenv:
	pip install virtualenv
	virtualenv $(venv)

req:
	pip install -r requirements.txt

vreq:
	$(vpath)pip install -r venv-requires.txt > /dev/null

import:
	mysql -u$(dbuser) -p $(dbname) < $(db_dir)$(dbname).sql

export:
	mysqldump -u$(dbuser) -p$(dbpwd) $(dbname) > $(db_dir)$(dbname).sql

init_mysql:
	echo "CREATE USER $(dbuser)@localhost IDENTIFIED BY \"$(dbpwd)\"; DROP DATABASE IF EXISTS $(dbname); CREATE DATABASE $(dbname) DEFAULT CHARSET utf8 COLLATE utf8_general_ci; GRANT ALL ON $(dbname).* TO $(dbuser)@localhost;" | mysql -uroot -p && \
	make db && \
	mysql -u$(dbuser) -p$(dbpwd) $(dbname) < $(db_dir)$(dbname).sql

init_sqlite3: db
	sqlite3 $(db_dir)test.db -init $(db_dir)init.sql

dump_sqlite3:
	sqlite3 $(db_dir)test.db .dump $(db_dir)test.sql

db: vreq
	python manage.py syncdb

clean:
	@find ./ -name '*.pyc' -exec rm -f {} \;

crawl: crawl_submotors_scraper

crawl_submotors_scraper:
	scrapy crawl motorsubsite_spider -a id=1 -a do_action=yes -a max_items_read=30

update:
	#echo 'update gpj_scraper_motorsubsite set scraper_id=3,scraper_runtime_id=3;' | sqlite3 $(db_dir)test.db
	echo 'update gpj_scraper_motorsubsite set scraper_id=3,scraper_runtime_id=3;' | mysql -u$(dbuser) -p$(dbpwd) $(dbname)

crawl_car_scraper: update
	scrapy crawl car_spider -a id=1 -a do_action=yes -a max_items_read=30

crawl_open_news:
	scrapy crawl article_spider -a id=1 -a do_action=yes
