package com.example.crawler.test;

import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;

import java.io.IOException;

public class CrawlerFirst {
    public static void main(String[] args) throws IOException {
        // 1. 打开浏览器，创建 httpClient 对象
        CloseableHttpClient httpClient = HttpClients.createDefault();

        // 2. 输入网址，发起 get 请求，创建 HttpGet 对象
//        HttpGet httpGet = new HttpGet("https://www.itcast.cn");
        HttpGet httpGet = new HttpGet("https://www.baidu.com");
//        HttpGet httpGet = new HttpGet("https://www.nasdaq.com/market-activity/ipos");
//        HttpGet httpGet = new HttpGet("https://api.nasdaq.com/api/calendar/upcoming");

        // 3. 按回车，发起请求，使用 HttpClient 对象发起请求
        CloseableHttpResponse response = httpClient.execute(httpGet);

        // 4. 解析相应，获取数据
        if (response.getStatusLine().getStatusCode() == 200) {
            HttpEntity httpEntity = response.getEntity();
            String content = EntityUtils.toString(httpEntity, "utf8");
            System.out.println("页面信息：");
            System.out.println(content);
        }


    }
}
