import json
from typing import AsyncIterator, Any
from urllib.parse import urlencode

import scrapy

# from util.headers import get_header


class QinqiuSpider(scrapy.Spider):
    name = "qinqiu"

    # allowed_domains = ["quotes.toscrape.com"]
    # start_urls = ["https://quotes.toscrape.com"]
    # async def start(self):
    #     url = f"http://httpbin.org/get?{urlencode({
    #         'nema':'qiku',
    #         'age':17,
    #     })}"
    #     yield scrapy.Request(url)
    # async def start(self):
    #     url = f"http://httpbin.org/post?{urlencode({
    #         'nema':'qiku',
    #         'age':17,
    #     })}"
    #     yield scrapy.Request(
    #         url,
    #         method="POST",
    #         body=json.dumps(
    #             {
    #                 "ddd": "wwww",
    #                 "ssss": 15,
    #             }
    #         ),
    #     )
    async def start(self):
        url = f"http://httpbin.org/post?{urlencode({
            'nema':'qiku',
            'age':17,
        })}"
        yield scrapy.FormRequest(
            url,
            formdata={
                "ddd": "wwww",
                "ssss": "14",
            },
            headers={"user-agent": "55555"},
        )
        # yield scrapy.Request(
        #     url,
        #     method="POST",
        #     body=json.dumps(
        #         {
        #             "ddd": "wwww",
        #             "ssss": 15,
        #         }
        #     ),
        #     headers={"content-type": ""},
        # )

    def parse(self, response):
        print("sssss")
        print(response.json())
