import json
from urllib.parse import urlencode


import scrapy
from scrapy.http.request.json_request import JsonRequest

class RequestSpiderSpider(scrapy.Spider):
    name = "request_spider"

    async def start(self):
        # yield scrapy.Request(f"https://httpbin.org/get?name=wert&{urlencode({
        #     'age':10
        # })}",callback=self.parse_get, headers={
        #     'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36',
        # }, method="GET")

        # yield scrapy.Request(f"https://httpbin.org/post?name=wert", headers={
        #     'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36',
        #     'Content-Type': 'application/json',
        # }, body= json.dumps({
        #     'age':10
        # }), method="POST")

        # yield scrapy.Request(f"https://httpbin.org/post?name=wert", headers={
        #     'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/141.0.0.0 Safari/537.36',
        #     'Content-Type': 'application/x-www-form-urlencoded',
        # }, body= urlencode({
        #     'age':10
        # }), method="POST")

        # yield scrapy.FormRequest(f"https://httpbin.org/post?name=wert", formdata= {
        #     'age':'10'
        # })

        yield JsonRequest(f"https://httpbin.org/post?name=wert", dumps_kwargs= {
            'age':'10'
        })

    def parse_get(self, response):
        print(response.url, response.json())

    def parse(self, response):
        print(response.url, response.json())
