import requests
from urllib.parse import urljoin
from selenium import webdriver
import time

BASE_URL = 'https://login2.scrape.center/'
LOGIN_URL = urljoin(BASE_URL, './login')
INDEX_URL = urljoin(BASE_URL, '/page/1')
USERNAME = 'admin'
PASSWORD = 'admin'

broswer = webdriver.Chrome()
broswer.get(BASE_URL)
broswer.find_element_by_css_selector('input[name="username"]').send_keys(USERNAME)
broswer.find_element_by_css_selector('input[name="password"]').send_keys(PASSWORD)
broswer.find_element_by_css_selector('input[type="submit"]').click()
time.sleep(10)
# get coolies from selenium
cookies = broswer.get_cookies()
print('Cookies', cookies)
broswer.close()
# set coolies to requests
session = requests.Session()
for cookie in cookies:
    session.cookies.set(cookie['name'], cookie['value'])
response_index = session.get(INDEX_URL)
print('Response Status', response_index.status_code)
print('Response URL', response_index.url)
# response_login = session.post(LOGIN_URL, data={
#     'username': USERNAME,
#     'password': PASSWORD
# }, allow_redirects=False)
#
# cookies = session.cookies
# print('Cookies', cookies)
#
# response_index = requests.get(INDEX_URL, cookies=cookies)
# print('Response Status', response_index.status_code)
# print('Response URL', response_index.url)
