Spaces:
Running
Running
File size: 2,987 Bytes
01f823f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 |
# -*- coding: utf-8 -*-
import requests
from bs4 import BeautifulSoup
session = requests.Session()
#购买请求test
urlBase = 'https://jingling.bifangpu.com'
purchaseBase = urlBase+"/api/resource/purchase"
cookie = 'Hm_lvt_19198da8061c292cacd55088bd6e3eba=1718943605; _ga=GA1.2.1632565634.1718943606; _gid=GA1.2.1271774125.1718943606; jinglingpan:sessid=b3f56f88-841e-4b38-b1da-5d1f73c6f5fc; jinglingpan:sessid.sig=nCmvxKK90Ho6KRcluS0qWGIv1m0; Hm_lpvt_19198da8061c292cacd55088bd6e3eba=1718943640; _ga_Y024PNQKTV=GS1.2.1718943605.1.1.1718943640.0.0.0'
headers = {
"authority": "jingling.bifangpu.com",
"method": "GET",
"path": "/resource/detail/b01940f0f34e6ca91f68b258362d50f1",
"scheme": "https",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
"Accept-Encoding": "gzip, deflate, br, zstd",
"Accept-Language": "zh-CN,zh;q=0.9",
"Cache-Control": "max-age=0",
"Cookie": cookie,
"Dnt": "1",
"If-None-Match": "\"5b56-ChT6C0U9s3iYKFMBV41XLcqXxRc\"",
"Priority": "u=0, i",
"Sec-Ch-Ua": "\"Not/A)Brand\";v=\"8\", \"Chromium\";v=\"126\", \"Google Chrome\";v=\"126\"",
"Sec-Ch-Ua-Mobile": "?0",
"Sec-Ch-Ua-Platform": "\"Windows\"",
"Sec-Fetch-Dest": "document",
"Sec-Fetch-Mode": "navigate",
"Sec-Fetch-Site": "same-origin",
"Sec-Fetch-User": "?1",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/126.0.0.0 Safari/537.36"
}
#返回{"code": 10602}说明已经购买过
def try_puchase(fid:str,session)->dict:
payload = {
"fid": fid
}
resp = session.post(purchaseBase, data=payload)
return resp.json()
#获取网盘信息函数,如果10602购买过,则刷新页面获取网盘信息,否则购买后刷新,反正都是调用一次try_purchase函数进行判断再刷新界面
#->list / None
def get_pan_info(fid:str,session):
#返回提取码和网盘连接的list
result = []
detailContent = get_detail(fid=fid, session=session)
soup = BeautifulSoup(detailContent, 'html.parser')
copy_contents = soup.find_all('span', class_='copy-content')
if copy_contents.__len__()>0:
for info in copy_contents:
#print(info.text) # 遍历得到提取码和网盘链接
result.append(info.text)
return result
return None
#通过fid刷新页面获取html返回信息函数
def get_detail(fid:str,session)->str:
detailUrl = urlBase+'/resource/detail/'+fid
resp = session.get(detailUrl)
return resp.text
if __name__ == '__main__':
fid = "b01940f0f34e6ca91f68b258362d50f1"
session.headers=headers
jsonResp = try_puchase(fid=fid,session=session)
print(jsonResp)
panResult = get_pan_info(fid = fid,session=session)#每次先尝试购买然后重新打开detail页面查询返回
print(panResult)
|