{-# OPTIONS -Wall #-}

--------------------------------------------------------------------------------
-- |
-- Module      :  Lambola.Rewriting
-- Copyright   :  (c) Stephen Tetley 2014
-- License     :  BSD3
--
-- Maintainer  :  stephen.tetley@gmail.com
-- Stability   :  unstable
-- Portability :  GHC
--
-- Name (string) rewriting.
-- 
--------------------------------------------------------------------------------

module Lambola.Rewriting
   

  where


import Control.Applicative
import Control.Monad
import Data.Char
import Data.List.Split

-- "Obviously" this needs a string rewriting combinator 
-- library...
--
underscoreToCase :: String -> String
underscoreToCase ss = first $ ss
  where
    first (x:xs) | isLetter x   = toUpper x : second xs
    first xs                    = second xs

    second ('_':xs)             = goUpper xs
    second (x:xs) | isLetter x  = toLower x : second xs
    second (x:xs)               = x : second xs
    second []                   = []

    goUpper (x:xs) | isLetter x = toUpper x : second xs
    goUpper xs                  = second xs


-- The simplest description of what to do is - split into words
-- taking "_" as punctuation. Rewrite each word as a proper 
-- name then concat dropping punctuation.
--
utc :: String -> String
utc = concat . map namecase . splitOn "_"
  where
    namecase (x:xs) = toUpper x : map toLower xs
    namecase xs     = map toLower xs

{-

-- In Robert Ennals\'s Jekyll, whitespace (punctuation for our 
-- needs) preceeds its token. This implies that EOF is either a 
-- token or the final whitespace is stored specially.

data Token = Token 
    { white_prefix :: !String 
    , token_body   :: !String
    }

data TokenList = TokenList 
    { content_tokens :: ![Token] 
    , trailing_white :: !String 
    }

-}


-- Alternatively a formulation with two cases:
--
-- > T String | W String 
--
-- ... would allow a simple implementation. Heinous sequences, 
-- e.g. W, W ... shouldn\'t be so bad in practice.
--

data Token a = T a | W String
  deriving (Eq,Show)


instance Functor Token where
  fmap f (T a) = T (f a)
  fmap _ (W s) = W s

type ErrMsg = String

type WPrefix = ShowS

emptyW :: WPrefix 
emptyW = id

unW :: ShowS -> String
unW = ($ "")

snocW :: WPrefix -> Char -> WPrefix
snocW f ch = f . (ch:)

type Input = String

-- | Failure has no message - we expect failure as it indicates 
-- whitespace.
--
data Result a = Okay !Input a
              | Fail

instance Functor Result where
  fmap f (Okay st a)  = Okay st (f a)
  fmap _ (Fail)       = Fail


newtype Tokenize a = Tokenize { 
    getTokenize :: Input -> Result a }

instance Functor Tokenize where
  fmap f ma = Tokenize $ \s -> fmap f $ getTokenize ma s 

instance Applicative Tokenize where
  pure a    = Tokenize $ \s -> Okay s a
  mf <*> ma = Tokenize $ \s -> case getTokenize mf s of
                Okay s1 f -> case getTokenize ma s1 of
                  Okay s2 a -> Okay s2 (f a)
                  Fail -> Fail
                Fail -> Fail 


instance Alternative Tokenize where
  empty = failure
  (<|>) = alt


instance Monad Tokenize where
  return    = pure
  m >>= k   = Tokenize $ \s -> case getTokenize m s of
                Okay s1 a -> getTokenize (k a) s1
                Fail -> Fail

instance MonadPlus Tokenize where
  mzero = failure
  mplus = alt


alt :: Tokenize a -> Tokenize a -> Tokenize a
alt ma mb = Tokenize $ \s -> case getTokenize ma s of
    Okay s1 a -> Okay s1 a 
    Fail -> getTokenize mb s

failure :: Tokenize a
failure = Tokenize $ \_ -> Fail




anyChar :: Tokenize Char
anyChar = Tokenize $ \s -> case s of
    (x:xs) -> Okay xs x
    _      -> Fail



literal :: (Char -> Bool) -> Tokenize Char
literal test = Tokenize $ \s -> case s of
    (x:xs) | test x    -> Okay xs x
           | otherwise -> Fail
    _      -> Fail

-- Do we represent two monads (parser and writer) as lhs and rhs
-- of a rewriting rule?
--
-- > many1 uppercase ==> {- tell -} map tolower

-- If tokenizer fails, do we collect a char as whitespace and move on?


newtype TokenStream a = TokenStream { getTokenStream :: [Token a] }
  deriving (Eq,Show)


instance Functor TokenStream where
  fmap f = TokenStream . map (fmap f) . getTokenStream

tokenize :: Tokenize a -> String -> TokenStream a
tokenize ma inp = TokenStream $ go emptyW inp
  where
    go w []        = let ac = unW w in if null ac then [] else [W ac]
    go w ss@(x:xs) = case getTokenize ma ss of
        Okay rest a -> let ac = unW w 
                       in if null ac then T a : go emptyW rest
                                     else W ac : T a : go emptyW rest
        Fail -> go (w `snocW` x) xs



many1 :: Tokenize a -> Tokenize [a]
many1 = some


eof :: Tokenize ()
eof = Tokenize $ \s -> case null s of
    True -> Okay s ()
    False -> Fail



oneOf :: [Char] -> Tokenize Char
oneOf cs = literal (`elem` cs)

noneOf :: [Char] -> Tokenize Char
noneOf cs = literal (\c -> not $ elem c cs)



--------------------------------------------------------------------------------
-- Transform words

uppercase :: String -> String
uppercase = map toUpper

lowercase :: String -> String
lowercase = map toLower

properName :: String -> String
properName (x:xs) = toUpper x : lowercase xs
properName _      = []


everyWord :: (a -> b) -> TokenStream a -> TokenStream b
everyWord = fmap

--------------------------------------------------------------------------------
-- Unwind a TokenStream



deleteWhite :: TokenStream String -> String
deleteWhite = concat . go . getTokenStream
  where
    go [] = []
    go (T a : xs) = a : go xs
    go (_   : xs) = go xs

