-- metscrape - bulk-download weather forecasts.
-- Copyright (C) 2013  Jack Kelly <jack@jackkelly.name>
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program.  If not, see <http://www.gnu.org/licenses/>.

module Scrapers.BOM (scrapeInteractiveWeather,
                     scrapeMarineWind,
                     scrapeMSLPP,
                     scrapeSurfacePressureRainfall) where

import Control.Applicative
import Data.Char
import Data.List
import Network.HTTP
import Network.URI (parseURI)
import Text
import Text.Printf
import Scraper

-- From the BOM's Interactive Weather and Wave maps tool. productCode
-- is something like "IDY30223" and productName is something like
-- "sigWaveHgt".
scrapeInteractiveWeather :: String -> String -> String -> Scraper
scrapeInteractiveWeather code name pattern
    = WGet $ [ (printf urlPattern n,
                printf pattern n) | n <- [0, 3..72] :: [Int] ] where
      urlPattern = "http://www.bom.gov.au/charts_data/" ++ code ++
                   "/current/" ++ munge name ++ "/" ++ code ++ "." ++ name ++
                   ".%03d.png"
      munge str = [ if s == '-' then '/' else s | s <- str ]

scrapeMSLPP :: Scraper
scrapeMSLPP = WGet $ [("http://www.bom.gov.au/fwo/IDG00074.gif",
                       "Weather/BOM_MSLPP.gif")]

scrapeMarineWind :: String -> String -> Scraper
scrapeMarineWind loc pattern = IOWGet $ do
                     let url = ("http://www.bom.gov.au/marine/wind.shtml?"
                                ++ "unit=p0&location=" ++ loc ++ "&tz=AEDT")
                     let uri = case parseURI url of
                                 Nothing -> error $ "Invalid URI: " ++ url
                                 Just u -> u

                     -- Hacky, but it works. For now.
                     text <- (simpleHTTP (defaultGETRequest_ uri)
                              >>= getResponseBody)
                     let urls = [ ("http://www.bom.gov.au" ++)
                                $ takeWhile (/= '"')
                                $ tail
                                $ dropWhile (/= '"') line | line <- lines text
                                , "url: \"" `isInfixOf` line]

                     let files = (printf
                                  <$> [pattern]
                                  <*> ([1..] :: [Int])) :: [FilePath]
                     return $ zip urls files

scrapeSurfacePressureRainfall :: Scraper
scrapeSurfacePressureRainfall = WGet $ first ++ second where
    first = [ (printf firstUrl n,
               printf path n) | n <- [6, 12..72] :: [Int] ]
    firstUrl = "http://www.bom.gov.au/charts_data/IDY20301/\
               \current/mslp-precip/IDY20301.mslp-precip.%03d.png"
    second = [ (printf secondUrl n,
                printf path n)| n <- [78, 84..156] :: [Int] ]
    secondUrl = "http://www.bom.gov.au/charts_data/IDY20002/\
                \current/mslp-precip/IDY20002.mslp-precip.%03d.png"
    path = "Weather/SurfacePressureRainfall%03d.png"
