// code for tokenizing

const lexer = str => {
  const abc = str.split(' ').map(item => {
    return item.trim()
  })
  console.log(abc)
  return abc
}

lexer('add 6 with 4')// ["add", "6", "with", "4"]

function parser(tokens: string[]) {
  for (const token of tokens) {
    /\d/.test(token) ? parseNumber(token) : parseOperator(token)
  }

  let current_token_index = 0

  function parseNumber(token) {
    return { value: parseInt(token), type: 'number' }
  }

  function parseOperator(token) {
    const node = { value: token, type: 'operator', expression: [] }
    while (tokens[current_token_index]) {
      node.expression.push(parseExpression())
    }
    return node
  }

  function parseExpression() {
    return /\d/.test(tokens[current_token_index]) ? parseNumber() : parseOperator()
  }

  return parseExpression()
}

parser(['add', '6', 'with', '4'])
