from tokenizer import Tokenizer 

class Library:
  BINARY = 0
  LIBRARY = 1
  UNITTEST = 2
  PROTO = 3
  def __init__(self):
    self.name = ""
    self.deps = []
    self.srcs = []
    self.libtype = Library.LIBRARY
    self.path = ""
  def __str__(self):
    return (self.name + " " + self.srcs.__str__() + " " +
        self.deps.__str__() + " " + ("%d"%(self.libtype)))

  def write_makefile_str(self, f):
    opath = self.path[2:] + "/" + self.name + ".o"
    srcs = " ".join(self.path[2:] + "/" + src for src in self.srcs)
    s = "%s: %s" % (opath, srcs)
    s += "\n\tmkdir -p " + "build-bin/" + self.path[2:]
    if self.libtype != Library.PROTO:
      s += "\n\t$(CC) $(CFLAGS) "
      s += srcs
      s += " -o "
      s += "build-bin/" + opath
      s += "\n"
    else:
      s += "\n\tprotoc --cpp_out=build-bin " + self.path[2:] + "/" + self.srcs[0]
      s += "\n\t$(CC) $(CFLAGS) build-bin/" + self.path[2:] + "/" + self.srcs[0].split(".")[0]+ ".pb.cc"
      s += " -o "
      s += "build-bin/" + opath
      s += "\n"
    f.write(s)
    return opath

  @staticmethod
  def write_binary_str(target, opaths, f, is_test):
    path = target[2:].replace(":", "/")
    opath = " ".join(opaths)
    opath2 = " ".join("build-bin/" + o for o in opaths)
    s = "%s: %s" % (path, opath)
    s += "\n\t$(CC) $(LFLAGS) "
    s += opath2
    if is_test:
      s += " -lgtest -lpthread " 
    s += " -lprotobuf -lrt -o "
    s += "build-bin/" + path
    s += "\n"
    f.write(s)

  @staticmethod
  def write_unittest(target, opaths, f):
    path = target[2:].replace(":", "/")
    opath = " ".join(opaths)
    opath2 = " ".join("build-bin/" + o for o in opaths)
    s = "%s: %s" % (path, opath)
    s += "\n\t$(CC) $(LFLAGS) "
    s += opath2
    s += " -o "
    s += "build-bin/" + path
    s += "\n"
    f.write(s)


  @staticmethod
  def write_ar_str(target, opaths, f):
    path = target[2:].replace(":", "/")
    opath = " ".join(opaths)
    opath2 = " ".join("build-bin/" + o for o in opaths)
    s = "%s: %s" % (path, opath)
    s += "\n\t$(AR)"
    s += " -r "
    s += "build-bin/" + path + ".a "
    s += opath2
    s += "\n"
    f.write(s)

class Parser:
  def __init__(self, filename):
    self.__tokenizer = Tokenizer(filename)

  def parse(self):
    libraries =  {}
    peek = self.__tokenizer.peek()
    while peek != None:
      library = self.__parse_library_or_binary()
      assert library.name not in libraries, "Duplicate name " + library.name
      libraries[library.name] = library
      peek = self.__tokenizer.peek()
    return libraries

  def __parse_library_or_binary(self):
    library = Library()
    library_or_binary = self.__tokenizer.advance()
    if library_or_binary == "cc_library":
      library.libtype = Library.LIBRARY
    elif library_or_binary == "cc_binary":
      library.libtype = Library.BINARY
    elif library_or_binary == "cc_test":
      library.libtype = Library.UNITTEST
    elif library_or_binary == "proto_library":
      library.libtype = Library.PROTO
    else:
      assert False, "Must be cc_library or cc_binary or cc_test " + library_or_binary
    assert self.__tokenizer.advance() == "(", "Expected ("
    assert self.__tokenizer.advance() == "name", "Expected name"
    assert self.__tokenizer.advance() == "=", "Expected ="
    assert self.__tokenizer.advance() == "\"", "Expected \""
    library.name = self.__tokenizer.advance()
    assert library.name is not None, "Name is empty"
    assert self.__tokenizer.advance() == "\"", "Expected \""
    if self.__tokenizer.peek() == ")":
      assert self.__tokenizer.advance() == ")", "Expected )"
      return library
    assert self.__tokenizer.advance() == ",", "Expected ,"
    if self.__tokenizer.peek() == "srcs":
      assert self.__tokenizer.advance() == "srcs", "Expected srcs"
      assert self.__tokenizer.advance() == "=", "Expected ="
      assert self.__tokenizer.advance() == "[", "Expected ["
      while self.__tokenizer.peek() != "]":
        assert self.__tokenizer.advance() == "\"", "Expected \""
        src = self.__tokenizer.advance()
        assert src is not None, "Expected srcs"
        library.srcs.append(src)
        assert self.__tokenizer.advance() == "\"", "Expected \""
        if self.__tokenizer.peek() == ",":
          assert self.__tokenizer.advance() == ",", "Expected ," 
    elif self.__tokenizer.peek() == "src":
      assert self.__tokenizer.advance() == "src", "Expected src"
      assert self.__tokenizer.advance() == "=", "Expected ="
      assert self.__tokenizer.advance() == "\"", "Expected \""
      src = self.__tokenizer.advance()
      assert src is not None, "Expected srcs"
      library.srcs.append(src)
      assert self.__tokenizer.advance() == "\"", "Expected \""
    if self.__tokenizer.peek() == ")":
      assert self.__tokenizer.advance() == ")", "Expected )"
      return library
    # ? wrong.
    if self.__tokenizer.peek() == ",":
      assert self.__tokenizer.advance() == ",", "Expected ," 
    assert self.__tokenizer.advance() == "deps", "Expected deps"
    assert self.__tokenizer.advance() == "=", "Expected ="
    assert self.__tokenizer.advance() == "[", "Expected ["
    while self.__tokenizer.peek() != "]":
      assert self.__tokenizer.advance() == "\"", "Expected \""
      deps = self.__tokenizer.advance()
      assert deps is not None, "Expected dependencies"
      library.deps.append(deps)
      assert self.__tokenizer.advance() == "\"", "Expected \""
      if self.__tokenizer.peek() == ",":
        assert self.__tokenizer.advance() == ",", "Expected ," 

    assert self.__tokenizer.advance() == "]", "Expected ]"
    assert self.__tokenizer.advance() == ")", "Expected )"
    return library
