package tnet

// Minimal functions necessary to parse and generate "TNET B" subset of TNET.
// 
// Easier to implement with typed containers.
//
// The interface is assymmetric:
//
// - To deserialize a TNET dict, we parse it into an array of Pair of Chunk.
// - On the serialization side, we represent a TNET dic tiwth map[string] string
//
// TODO:
//
// - Add error handling everywhere (change signatures)  -- MOSTLY DONE
// - More unit tests
//
// Interface?
//
// tnet.Reader(io.Reader)
// ParseDict
// is there a "sub" IO.reader for recursion?
// Nah, I think this is too high level... wouldn't work in PGI loop.  That has
// to do all sorts of checking.

import (
  "errors"
  "fmt"
  "io"
  "os"
  "bytes"
  "strconv"
  "strings"
)

type Chunk struct {
  Payload []byte  // shared memory with the original string/slice
  Tag byte  // , } etc.
}

// A key-value pair.
type Pair struct {
  Key, Value Chunk
}

// A PairArray is just a slice of pairs.
type PairArray []Pair


// Debug logging
func Log(format string, a ...interface{}) {
  fmt.Fprintf(os.Stderr, format, a...)
  fmt.Fprintln(os.Stderr, "")  // newline
}

// Get a value from a map.  Sort of like builtin map[], except does a linear
// serach.
func (m PairArray) Get(key string) (value Chunk, ok bool) {
  ok = false
  //for i := 0; i < len(m); i++ {
  //  pair := m[i]
  for _, pair := range(m) {
    if bytes.Equal([]byte(key), pair.Key.Payload) {
      return pair.Value, true
    }
  }
  return Chunk{}, ok
}

// Load a chunk from the front.
func ParseChunkPrefix(buf []byte) (chunk Chunk, rest []byte, err error) {
  left := bytes.Index(buf, []byte(":"))
  if left == -1 {
    return Chunk{}, nil, errors.New("Expected :")
  }

  length_str := buf[:left]

  length, err := strconv.Atoi(string(length_str))
  if err != nil {
    return Chunk{}, nil, err
  }

  right := left + length + 1

  payload := buf[left+1:right]
  tag := buf[right]

  return Chunk{payload, tag}, buf[right+1:], nil
}

// Version that throws an error if there's any extra data.
// NOTE: I don't think I'm using this anywhere!
/*
func ParseChunk(buf []byte) (chunk Chunk, err error) {
  chunk, rest, err := ParseChunkPrefix(buf)
  if err != nil {
    return chunk, err
  }
  if rest != nil {
    return chunk, errors.New(
      "Unexpected extra data at the end of chunk: " + string(rest))
  }
  return
}
*/


// Parse a byte string into an array of Pairs of Chunks.
//
// I guess this uses a different naming convention than Python because it has a
// different API.  It doesn't recurse.  And it doesn't give you typed data back
// -- just chunks.

func ParseDictChunks(buf []byte) (pairs PairArray, err error) {
  var chunk Chunk

  for {
    if len(buf) == 0 {
      break
    }

    chunk, buf, err = ParseChunkPrefix(buf)
    if err != nil {
      return pairs, err
    }
    key := chunk

    if len(buf) == 0 {
      return pairs, errors.New("Got an odd number of dictionary items")
    }
    chunk, buf, err = ParseChunkPrefix(buf)
    if err != nil {
      return pairs, err
    }
    value := chunk

    pairs = append(pairs, Pair{key, value})
  }
  return
}

// output a byte string as TNET
func DumpString(s string) (buf string) {
  len_str := strconv.Itoa(len(s))
  return strings.Join([]string{len_str, ":", s, ","}, "")
}

// output a map as a TNET dict
func DumpDict(m map[string] string) (buf string) {
  chunks := make([]string, 10)

  // placeholder for the length
  chunks = append(chunks, "")
  chunks = append(chunks, ":")

  length := 0
  for k, v := range m {
    key_buf := DumpString(k)
    chunks = append(chunks, key_buf)
    length += len(key_buf)

    value_buf := DumpString(v)
    chunks = append(chunks, value_buf)
    length += len(value_buf)
  }

  // Fill in the length now that we know it
  len_str := strconv.Itoa(length)
  chunks[0] = len_str

  // End Tag
  chunks = append(chunks, "}")

  return strings.Join(chunks, "")
}

// reads the length and :
func readLength(in io.Reader) ([]byte, error) {

  max_size := 16;
  // Allocate
  len_buf := make([]byte, max_size)
  i := 0  // used outside the loop
  for ; i < max_size; i++ {
    byte_slice := len_buf[i:i+1]
    _, err := in.Read(byte_slice)
    if err != nil {
      Log("err reading");
      return nil, err
    }
    if byte_slice[0] == byte(':') {
      break
    }
  }
  Log("buf: %q", len_buf[:i])
  return len_buf[:i], nil
}

// should be readChunk?
// read (chunk, tag) from a file.
func Read(in io.Reader) (c Chunk, err error) {
  len_buf, err := readLength(in)
  if err != nil {
    return
  }
  length, _ := strconv.Atoi(string(len_buf))

  payload_buf := make([]byte, length + 1)
  in.Read(payload_buf)

  payload := payload_buf[:length]
  tag := payload_buf[length]
  return Chunk{payload, tag}, nil
}
