use surf::Response;
use crabler_derive::WebScraper;

#[derive(WebScraper)]
#[on_response(response_handler)]
#[on_html("a[href]", print_handler)]
struct Scraper{}

impl Scraper {
    async fn response_handler(&self,  response: Response) -> Result<(), E>{
       println!("Status {}", response.status);
       Ok(())
    }

    async fn print_handler(&self, response: Response, a: Element) -> Result<(), E> {
       if let Some(href) = a.attr("href") {
           println!("Found link {} on {}", href, response.url);
       }
       Ok(())
   }
}

#[async_std::main]
async fn main(){
    // let sp =
    println!("test");
}