#!/usr/bin/env python2

import sys, os, re 

path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'generator'))
if not path in sys.path:
  sys.path.insert(1, path)
del path

import scanner, bitPack, classes, check, matrix

try:
  inputFileName = sys.argv[1]
except:
  print "Usage: %s inputFile" % sys.argv[0]
  sys.exit(-1)

DEBUG_HIGH = False
DEBUG_LOW = False

try:
  debug = sys.argv[2]
  if debug == "-d":
    DEBUG_HIGH = DEBUG_LOW = True
  if debug == "--dhigh":
    DEBUG_HIGH = True
  if debug == "--dlow":
    DEBUG_LOW = True
except:
  pass

inputFile = open(inputFileName, "r")
inputLines = inputFile.readlines()

# Parse C preamble.
lineIndex = 0
cPreamble = ""
for inputLine in inputLines:
  if re.match("%nonterminal", inputLine):
    break
  if re.match("%%", inputLine):
    lineIndex += 1
    break
  cPreamble += inputLine
  lineIndex += 1

if DEBUG_LOW:
  print "C preamble"
  print cPreamble

# Parse axiom.
axiom, lineIndex = scanner.getAxiom(inputLines, lineIndex)

# Parse reduction rules and semantic functions.
inp = ''.join(inputLines[lineIndex:])
rules = scanner.getRules(inp)
if DEBUG_HIGH:
  print "Rules:"
  for index in range(0, len(rules)):
    rule = rules[index]
    sys.stdout.write("%2d:%s\n" % (index, rule.toString()))

# Infer (non)terminals.
nonterminals, terminals = check.inferTokens(rules, axiom)
if DEBUG_HIGH:
  print "Inferred nonterminals: %r" % nonterminals
  print "Inferred terminals: %r" % terminals

# Detect bad rules.
check.detectBadRules(nonterminals, terminals, axiom, rules)

# Detect repeated rhs.
repeatedError = check.detectRepeatedRhs(rules)

# Detect unused nonterminals.
unusedNTerm = check.detectUnusedNTerm(nonterminals, axiom, rules)

# Detect non-lhs nonterminals.
check.detectUndefNTerm(nonterminals, rules, unusedNTerm)

# Detect unused terminals.
check.detectUnusedTerm(terminals, rules)

# Detect lhs terminals.
check.detectDefinedTerm(terminals, rules)

# Compute precedence matrix.
origMatrix, conflictError = matrix.buildAndCheckMatrix(nonterminals, terminals, rules)

# Interrupt generation in case of errors.
if repeatedError or conflictError:
  if raw_input("Grammar is not in the required operator precedence form. Continue (y/n)?") != "y":
    sys.exit(-1)

# Build real matrix.
realMatrix = matrix.toRealMatrix(origMatrix, terminals)
if DEBUG_HIGH:
  sys.stdout.write("Printing conceptual matrix.\n")
  sys.stdout.write("%10s " % "i\\j")
  for j in terminals:
    sys.stdout.write("%10s " % j)
  sys.stdout.write("\n")
  for i in terminals:
    sys.stdout.write("%10s " % i)
    for j in terminals:
      sys.stdout.write("%10s " % realMatrix[i][j])
    sys.stdout.write("\n")

# Build integer matrix.
intMatrix, rowLen = matrix.toIntMatrix(realMatrix, terminals)
if DEBUG_LOW:
  sys.stdout.write("Printing real matrix.\n")
  for i in range(0, len(terminals)):
    for j in range(0, rowLen):
      sys.stdout.write("0x%2x " % intMatrix[i*rowLen + j])
    sys.stdout.write("\n")
  sys.stdout.write("Printing real conceptual matrix.\n")
  sys.stdout.write("%10s " % "i\\j")
  for j in range(0, len(terminals)):
    sys.stdout.write("%10s " % terminals[j])
  sys.stdout.write("\n")
  for i in range(0, len(terminals)):
    sys.stdout.write("%10s " % terminals[i])
    for j in range(0, len(terminals)):
      sys.stdout.write("%10s " % bitPack.getPrecedence(intMatrix, i, j, rowLen))
    sys.stdout.write("\n")
  sys.stdout.write("Printing C matrix.\n")
  sys.stdout.write("uint8_t __matrix[ROW_LEN*TERM_LEN] = {%d" % intMatrix[0])
  for i in range(1, len(intMatrix)):
    sys.stdout.write(", %d" % intMatrix[i])
  sys.stdout.write("}\n")

# Create reduction tree.
root = classes.ReductionNode(len(rules))
for index in range(0, len(rules)):
  rule = rules[index]
  node = root
  for rhsToken in rule.rhs:
    tempNode = node.hasSonWith(rhsToken)
    if not tempNode:
      tempNode = classes.ReductionNode(len(rules))
      node.sons[rhsToken] = tempNode
    node = tempNode
  node.rule = index

if DEBUG_HIGH:
  sys.stdout.write("Reduction tree\nroot:%s" % root.toString(0))

# Vectorize reduction tree.
treeSize = 1 + root.getSize()
vectorTree = [0]*treeSize
offset = 1
vectorTree, vectorTree[0], offset = root.toVector(vectorTree, offset, nonterminals, terminals)

# Print vector tree.
if DEBUG_LOW:
  sys.stdout.write("Vectorized reduction tree\n")
  offset = vectorTree[0]
  level = 0
  workList = []
  workList.append([offset, level, "root"])
  while len(workList) != 0:
    item = workList.pop(0)
    offset = item[0]
    level = item[1]
    label = item[2]
    sys.stdout.write("  "*level)
    sys.stdout.write("%s:%d\n" % (label, vectorTree[offset]))
    sonsNumber = vectorTree[offset + 1]/2
    offset += 2
    for i in range(0, sonsNumber):
      label = bitPack.intToToken(vectorTree[offset + i*2], nonterminals, terminals)
      sonOffset = vectorTree[offset + i*2 + 1]
      workList.insert(i, [sonOffset, level + 1, label])

# Print C vector tree.
if DEBUG_LOW:
  sys.stdout.write("C vectorized reduction tree\n")
  sys.stdout.write("uint16_t __reduction_tree = {%d" % vectorTree[0])
  for i in range(1, len(vectorTree)):
    sys.stdout.write(", %d" % vectorTree[i])
  sys.stdout.write("};\n")

# Create rewrite rules.
rewrite = dict()
for nonterminal in nonterminals:
  rewrite[nonterminal] = []
modified = True
while modified:
  modified = False
  for rule in rules:
    lhs = rule.lhs
    token = rule.rhs[0]
    if len(rule.rhs) != 1 or token in terminals:
      continue
    if token not in rewrite[lhs]:
      modified = True
      rewrite[lhs].append(token)
    else:
      for ttoken in rewrite[token]:
        if ttoken not in rewrite[lhs]:
          modified = True
          rewrite[lhs].append(ttoken)

# Create inverse rewrite rules.
invRewrite = dict()
for nonterminal in nonterminals:
  invRewrite[nonterminal] = [nonterminal]
for nonterminal in nonterminals:
  for token in rewrite[nonterminal]:
    invRewrite[token].append(nonterminal)

# Print inverse rewrite rules.
if DEBUG_HIGH:
  print "Rewrite rules"
  for nonterminal in nonterminals:
    sys.stdout.write("Rewrite(%s) = {" % nonterminal)
    for token in invRewrite[nonterminal]:
      sys.stdout.write(" %s" % token)
    sys.stdout.write(" }\n")

# Create array rewrite rules.
rewriteSize = len(nonterminals)
for nonterminal in nonterminals:
  rewriteSize += 1 + len(invRewrite[nonterminal])
realRewrite = [0]*rewriteSize
topOfArray = len(nonterminals)
for nonterminal in nonterminals:
  index = nonterminals.index(nonterminal)
  realRewrite[index] = topOfArray
  realRewrite[topOfArray] = len(invRewrite[nonterminal])
  topOfArray += 1
  for token in invRewrite[nonterminal]:
    realRewrite[topOfArray] = bitPack.tokenToPackedInt(token, nonterminals, terminals)
    topOfArray += 1

# Print array rewrite rules.
if DEBUG_LOW:
  print "Array rewrite rules"
  for i in range(0, len(nonterminals)):
    sys.stdout.write("Rewrite(%s) = {" % nonterminals[i])
    offset = realRewrite[i]
    end = offset + realRewrite[offset] + 1
    offset += 1
    while offset != end:
      sys.stdout.write(" <%d:%s>" % (realRewrite[offset], bitPack.packedIntToToken(realRewrite[offset], nonterminals, terminals)))
      offset += 1
    sys.stdout.write(" }\n")

# Print C rewrite rules.
if DEBUG_LOW:
  print "C rewrite rules"
  sys.stdout.write("uint32_t rewrite[] = {%d" % realRewrite[0])
  for i in range(1, len(realRewrite)):
    sys.stdout.write(", %d" % realRewrite[i])
  sys.stdout.write("};\n")

# Create rhs mapping for $x substitutions and headerName.
for rule in rules:
  rule.tokenMap["lhs"] = "p_" + rule.lhs
  rule.headerName += "r_%s_" % rule.lhs
  for i in range(0, len(rule.rhs)):
    rule.tokenMap[i + 1] = "p_" + rule.rhs[i] + "%d" % (i + 1)
    rule.headerName += rule.rhs[i]

# Execute $x substitutions.
for rule in rules:
  rule.text = rule.text.replace("$$", rule.tokenMap["lhs"] + "->value")
  for i in range(0, len(rule.rhs)):
    rule.text = rule.text.replace("$%d" % (i + 1), rule.tokenMap[i + 1] + "->value")

if DEBUG_LOW:
  print "Rules tokenmaps."
  for rule in rules:
    print rule.tokenMap

# Generate output files.

# Generate grammar_tokens.h
print "Generating include/grammar_tokens.h"
grammar_tokens_h = open("include/grammar_tokens.h", "w")
grammar_tokens_h.write("#ifndef FLEX_GRAMMAR_H_\n#define FLEX_GRAMMAR_H_\n\n")
grammar_tokens_h.write("#define TOKEN_NUM %d\n" % (len(nonterminals) + len(terminals)))
grammar_tokens_h.write("#define NTERM_LEN %d\n" % len(nonterminals))
grammar_tokens_h.write("#define TERM_LEN (TOKEN_NUM - NTERM_LEN)\n")
grammar_tokens_h.write("#define S %s\n\n" % axiom)
grammar_tokens_h.write("#define is_terminal(token) ((uint8_t)((token & 0x80000000) >> 31))\n")
grammar_tokens_h.write("#define token_value(token) ((uint32_t)(token & 0x7FFFFFFF))\n")
grammar_tokens_h.write("#define gr_term_key(token) (token_value(token) - NTERM_LEN)\n")
grammar_tokens_h.write("#define gr_nterm_key(token) (token_value(token))\n")
grammar_tokens_h.write("#define gr_term_token(key) ((gr_token)(0x80000000 | (key + NTERM_LEN)))\n")
grammar_tokens_h.write("#define gr_nterm_token(key) ((gr_token)key)\n\n")
grammar_tokens_h.write("typedef enum gr_token {\n")
grammar_tokens_h.write("  %s = 0," % axiom)
for nonterminal in nonterminals:
  if not nonterminal == axiom:
    grammar_tokens_h.write(" %s," % nonterminal)
terminalHead = True
for terminal in terminals:
  if terminalHead:
    grammar_tokens_h.write("\n  %s = %s" % (terminal, hex(len(nonterminals) | 0x80000000)))
    terminalHead = False
  else:
    grammar_tokens_h.write(", %s" % terminal)
grammar_tokens_h.write("\n} gr_token;\n\n#endif\n")
grammar_tokens_h.close()

# Generate grammar_semantics.h
print "Generating include/grammar_semantics.h"
grammar_semantics_h = open("include/grammar_semantics.h", "w")
grammar_semantics_h.write("#ifndef GRAMMAR_SEMANTICS_H_\n")
grammar_semantics_h.write("#define GRAMMAR_SEMANTICS_H_\n\n")
grammar_semantics_h.write('#include "token_node.h"\n')
grammar_semantics_h.write('#include "token_node_stack.h"\n')
grammar_semantics_h.write('#include "parsing_context.h"\n\n')
for rule in rules:
  grammar_semantics_h.write("void %s(token_node *p, token_node_stack *stack, parsing_ctx *ctx);\n" % rule.headerName)
grammar_semantics_h.write("\n#endif\n")
grammar_semantics_h.close()

# Generate grammar_semantics.c
print "Generating lib/grammar_semantics.c"
grammar_semantics_c = open("lib/grammar_semantics.c", "w")
grammar_semantics_c.write('#include "grammar_semantics.h"\n\n')
grammar_semantics_c.write('/* Preamble from grammar definition. */\n%s/* End of the preamble. */\n\n' % cPreamble)
for rule in rules:
  grammar_semantics_c.write("void %s(token_node *p, token_node_stack *stack, parsing_ctx *ctx)\n{\n" % rule.headerName)
  grammar_semantics_c.write("  token_node *%s" % rule.tokenMap["lhs"])
  for rhsIndex in range(0, len(rule.rhs)):
    grammar_semantics_c.write(", *%s" % rule.tokenMap[rhsIndex + 1])
  grammar_semantics_c.write(";\n\n")
  grammar_semantics_c.write("  %s = push_token_node_on_stack(stack, %s, NULL, ctx->NODE_REALLOC_SIZE);\n" % (rule.tokenMap["lhs"], rule.lhs))
  grammar_semantics_c.write("  if (p->token == TERM) {\n")
  grammar_semantics_c.write("    %s = ctx->token_list;\n" % rule.tokenMap[1])
  grammar_semantics_c.write("    ctx->token_list = %s;\n" % rule.tokenMap["lhs"])
  grammar_semantics_c.write("  } else {\n")
  grammar_semantics_c.write("    %s = p->next;\n" % rule.tokenMap[1])
  grammar_semantics_c.write("  }\n")
  grammar_semantics_c.write("  p->next = %s;\n" % rule.tokenMap["lhs"])
  for rhsIndex in range(1, len(rule.rhs)):
    grammar_semantics_c.write("  %s = %s->next;\n" % (rule.tokenMap[rhsIndex + 1], rule.tokenMap[rhsIndex]))
  for rhsIndex in range(0, len(rule.rhs)):
    grammar_semantics_c.write("  %s->parent = %s;\n" % (rule.tokenMap[rhsIndex + 1], rule.tokenMap["lhs"]))
  grammar_semantics_c.write("  %s->next = %s->next;\n" % (rule.tokenMap["lhs"], rule.tokenMap[len(rule.rhs)]))
  grammar_semantics_c.write("  %s->child = %s;\n" % (rule.tokenMap["lhs"], rule.tokenMap[1]))
  grammar_semantics_c.write("/* Semantic action follows. */\n%s\n/* End of semantic action. */\n}\n\n" % rule.text)
grammar_semantics_c.close()

# Generate grammar.h
print "Generating include/grammar.h"
grammar_h = open("include/grammar.h", "w")
grammar_h.write('#ifndef GRAMMAR_H_\n')
grammar_h.write('#define GRAMMAR_H_\n\n')
grammar_h.write('#include "config.h"\n\n')
grammar_h.write('#include "parsing_context.h"\n')
grammar_h.write('#include "grammar_tokens.h"\n')
grammar_h.write('#include "grammar_semantics.h"\n')
grammar_h.write('#include "token_node.h"\n')
grammar_h.write('#include "token_node_stack.h"\n\n')
grammar_h.write("#define GRAMMAR_SIZE %d\n\n" % len(rules))
grammar_h.write('typedef void (*gr_function) (token_node *, token_node_stack *, parsing_ctx *);\n\n')
grammar_h.write('typedef struct gr_rule {\n')
grammar_h.write('  gr_token lhs;\n')
grammar_h.write('  gr_function semantics;\n')
grammar_h.write('  uint8_t rhs_length;\n')
grammar_h.write('  gr_token *rhs;\n')
grammar_h.write('} gr_rule;\n\n')
grammar_h.write('void init_grammar(parsing_ctx *ctx);\n\n')
grammar_h.write('#endif\n')
grammar_h.close()

# Generate grammar.c
print "Generating lib/grammar.c"
grammar_c = open("lib/grammar.c", "w")
grammar_c.write('#include "grammar.h"\n\n')
grammar_c.write('const gr_token __gr_token_alloc[] = {%s' % axiom)
for nonterminal in nonterminals:
  if nonterminal == axiom:
    continue
  grammar_c.write(', %s' % nonterminal)
for terminal in terminals:
  grammar_c.write(', %s' % terminal)
grammar_c.write('};\n\n')
grammar_c.write('const char * const __gr_token_name[] = {"%s"' % axiom)
for nonterminal in nonterminals:
  if nonterminal == axiom:
    continue
  grammar_c.write(', "%s"' % nonterminal)
for terminal in terminals:
  grammar_c.write(', "%s"' % terminal)
grammar_c.write('};\n\n')
grammar_c.write('const gr_rule __grammar[] = {')
rule = rules[0]
grammar_c.write('\n  {%s, &%s, %d, (gr_token []){%s' % (rule.lhs, rule.headerName, len(rule.rhs), rule.rhs[0]))
for rhsIndex in range(1, len(rule.rhs)):
  grammar_c.write(', %s' % rule.rhs[rhsIndex])
grammar_c.write('}}')
for rule in rules[1:]:
  grammar_c.write(',\n  {%s, &%s, %d, (gr_token []){%s' % (rule.lhs, rule.headerName, len(rule.rhs), rule.rhs[0]))
  for rhsIndex in range(1, len(rule.rhs)):
    grammar_c.write(', %s' % rule.rhs[rhsIndex])
  grammar_c.write('}}')
grammar_c.write('\n};\n\n')
grammar_c.write('void init_grammar(parsing_ctx *ctx)\n')
grammar_c.write('{\n')
grammar_c.write('  ctx->gr_token_alloc = __gr_token_alloc;\n')
grammar_c.write('  ctx->gr_token_name = __gr_token_name;\n')
grammar_c.write('  ctx->grammar = __grammar;\n')
grammar_c.write('}\n')
grammar_c.close()

# Generate matrix.h
print "Generating include/matrix.h"
matrix_h = open("include/matrix.h", "w")
matrix_h.write('#include "parsing_context.h"\n\n')
matrix_h.write('const uint8_t __matrix[ROW_LEN*TERM_LEN] = {%d' % intMatrix[0])
for i in range(1, len(intMatrix)):
  matrix_h.write(", %d" % intMatrix[i])
matrix_h.write("};\n\n")
matrix_h.write('void generate_precedence_matrix(parsing_ctx *ctx)\n')
matrix_h.write('{\n')
matrix_h.write("  ctx->matrix = __matrix;\n")
matrix_h.write("}\n")
matrix_h.close()

# Generate reduction_tree.c
print "Generating lib/reduction_tree.c"
vectorized_tree_c = open("lib/reduction_tree.c", "w")
vectorized_tree_c.write('#include "reduction_tree.h"\n\n')
vectorized_tree_c.write('const uint16_t __vectorized_tree[] = {%d' % vectorTree[0])
for i in range(1, len(vectorTree)):
  vectorized_tree_c.write(", %d" % vectorTree[i])
vectorized_tree_c.write("};\n\n")
vectorized_tree_c.write('void generate_reduction_tree(parsing_ctx *ctx)\n')
vectorized_tree_c.write('{\n')
vectorized_tree_c.write("  ctx->reduction_tree = __vectorized_tree;\n")
vectorized_tree_c.write("}\n")
vectorized_tree_c.close()

# Generate rewrite_rules.c
print "Generating lib/rewrite_rules.c"
rewrite_rules_c = open("lib/rewrite_rules.c", "w")
rewrite_rules_c.write('#include "rewrite_rules.h"\n\n')
rewrite_rules_c.write('const uint32_t __rewrite[] = {%d' % realRewrite[0])
for i in range(1, len(realRewrite)):
  rewrite_rules_c.write(", %d" % realRewrite[i])
rewrite_rules_c.write("};\n\n")
rewrite_rules_c.write('void generate_rewrite_rules(parsing_ctx *ctx)\n')
rewrite_rules_c.write('{\n')
rewrite_rules_c.write("  ctx->rewrite = __rewrite;\n")
rewrite_rules_c.write("}\n")
rewrite_rules_c.close()

# TODO: automatically generate files for Bison (grammar.h and parser.y)
