package eventos

import crawler.*

CrawlerBuilder cr = new CrawlerBuilder()

def datas = []
def data = []

cr.config {
    props {
        downloadDir = "/tmp"
        maxLevel = 0
//        verbose = true
        cache = false
//        onlyFollowThisSites = ["dandoporculo.com", "*.media.tumblr.com"]
    }

    rules {
        finish { Doc doc, File file ->
//            println "Finish ${doc}"
            println data
            datas << data
            data = []
        }
        onLink { LinkEvent e ->
//            println "LINK ${e.doc.url}"
//            if (e.doc.resource?.startsWith("/photo") || e.doc.resource?.startsWith("/page")) {
//                add e.doc
//            }
        }
        onImg { LinkEvent e ->
//            if (e.doc.resource?.startsWith("/avatar_")) {
//                return;
//            }
//            println "IMG  ${e.doc.url}"
//            add e.doc
        }
        onText { TextEvent e->
            if (e.tag == "b" && data.size() < 6) {
                data << e.text
            }
        }
    }

//    add url: "http://www.google.es/images?um=1&hl=es&safe=off&tbs=isch:1&sa=1&q=epic+boobs&aq=f&aqi=&aql=&oq=&gs_rfai="

    0.upto 3520, {
        add url: "http://www.dices.net/mapas/playas/mapa.php?playa=${it}"

    }

}

cr.begin()

println datas
println datas.size()

StringBuffer text = new StringBuffer()
datas.each {
    text.append(it.toString())
    text.append("\n")
}
new File("/tmp/coord.txt").write(text.toString())



