import json
from os import stat_result
from typing import AsyncIterator, Any
from urllib.parse import urlencode

import aiohttp
import scrapy


class PostGetSpider(scrapy.Spider):
    name = "post_get"

    async def start(self):
        yield  scrapy.Request(url=f'https://httpbin.org/get?name=qiku&{urlencode({
            "id" :101,
            "age" : 20
        })}',headers={
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36'
        },method='GET')

        # 默认Content-Type":application/json
        # yield scrapy.Request(url="http://httpbin.org/post", method="POST",body=urlencode({"username":"admin","password":"123456"}))
        # yield scrapy.Request(url="http://httpbin.org/post", method="POST",body=urlencode({"username":"admin","password":"123456"}),headers={"Content-Type":"application/x-www-form-urlencoded"})
        # yield scrapy.Request(url="http://httpbin.org/post", method="POST",body=json.dumps({"username":"admin","password":"123456"}),headers={"Content-Type":"application/json"})
        # yield scrapy.FormRequest(url="http://httpbin.org/post",formdata={"name":"admin","password":"123456"})


    def parse(self, response):
        print(response.json())
