import requests
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.common.proxy import ProxyType
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities

driver = webdriver.PhantomJS()
# session=requests.Session()
# headers={"User-Agent":"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36",
#          "Accept":"application/json, text/plain, */*"}
headers = {
    'User-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.12; rv:55.0) Gecko/20100101 Firefox/55.0',
    'Accept' : 'application/json, text/plain, */*',
    'Accept-Language' : 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
    'Connection' : 'keep-alive',
    # 'Referer':'http://www.baidu.com/'
    }
url="https://www.whatismybrowser.com/detect/what-http-headers-is-my-browser-sending"
desired_capabilities = DesiredCapabilities.PHANTOMJS.copy()
for key, value in headers.items():
    desired_capabilities['phantomjs.page.customHeaders.{}'.format(key)] = value
# desired_capabilities["phantomjs.page.customHeaders.userAgent"] = 'Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36'
# req=session.get(url,headers=headers)
driver.start_session(desired_capabilities)
driver.get(url)
# print(driver.page_source)
# bsObj=BeautifulSoup(req.text,"lxml")
bsObj=BeautifulSoup(driver.page_source,"lxml")
print(bsObj.find_all("table",{"class","table-striped"}))#.get_text用于requests