package com.ruoyi.system.webcrawler;

import javax.validation.constraints.NotBlank;
import javax.validation.constraints.Pattern;

public class CrawlRequest {

    @NotBlank(message = "URL不能为空")
    @Pattern(regexp = "^(https?://).*", message = "URL格式不正确")
    private String url;

//    private int maxPages = 10;
    private int maxPages = 2;
    private boolean followLinks = true;
    private String dataType; // html, text, links, images

    // 构造器、getter、setter
    public CrawlRequest() {}

    public CrawlRequest(String url, int maxPages, boolean followLinks, String dataType) {
        this.url = url;
        this.maxPages = maxPages;
        this.followLinks = followLinks;
        this.dataType = dataType;
    }

    // getter and setter methods
    public String getUrl() { return url; }
    public void setUrl(String url) { this.url = url; }

    public int getMaxPages() { return maxPages; }
    public void setMaxPages(int maxPages) { this.maxPages = maxPages; }

    public boolean isFollowLinks() { return followLinks; }
    public void setFollowLinks(boolean followLinks) { this.followLinks = followLinks; }

    public String getDataType() { return dataType; }
    public void setDataType(String dataType) { this.dataType = dataType; }
}
