package parser

import (
	"fmt"
	"go/scanner"
	"go/token"
	"io/ioutil"
	"os"
)

type Lexical struct {

	/* the last Token */
	shadow bool
	pos    token.Pos
	tok    token.Token
	sss    string

	/**/
	sc   scanner.Scanner
	fset *token.FileSet
}

func eprint(pos token.Position, msg string) {
	fmt.Printf("err: %s, %s\n", pos, msg)
	return
}

func NewLexical(fname string) (*Lexical, error) {
	var lex Lexical

	fp, err := os.Open(fname)
	if err != nil {
		return nil, err
	}
	defer fp.Close()

	src, err := ioutil.ReadAll(fp)
	if err != nil {
		return nil, err
	}

	// Initialize the scanner, positions are relative to fset
	var sc scanner.Scanner
	fset := token.NewFileSet()
	file := fset.AddFile(fname, fset.Base(), len(src))
	sc.Init(file, src, eprint, 0)

	/**/
	lex.shadow = false
	lex.pos = 0
	lex.tok = token.ILLEGAL
	lex.sss = ""
	lex.sc = sc
	lex.fset = fset

	/**/
	return &lex, nil
}

type Skip uint

const (
	skipNone Skip = iota
	skipSMC       /* SemiColon */
)

func (lex *Lexical) cvtScan() (token.Pos, token.Token, string) {
	pos, tok, sss := lex.sc.Scan()

	if tok >= token.BREAK {
		tok = token.IDENT
		// fmt.Printf("test1: %q\n", sss)
	}

	return pos, tok, sss
}

func (lex *Lexical) Next(sk Skip) (token.Pos, token.Token, string) {

	/* shadow token exist?? */
	if !lex.shadow {
		lex.pos, lex.tok, lex.sss = lex.cvtScan()
	} else {
		lex.shadow = false
	}

	/* skip line end */
	for {
		if lex.tok == token.SEMICOLON && lex.sss == "\n" {
			// fmt.Printf("semicolon, %v %q\n", lex.Position(lex.pos), lex.sss)
			lex.pos, lex.tok, lex.sss = lex.cvtScan()
			continue

		} else {
			break
		}
	}

	if (sk == skipSMC) && (lex.tok == token.SEMICOLON) && (lex.sss == ";") {
		lex.pos, lex.tok, lex.sss = lex.cvtScan()
	}

	return lex.pos, lex.tok, lex.sss
}

func (lex *Lexical) Back() {
	if lex.shadow {
		panic("too more lexcal Back")
	} else {
		lex.shadow = true
	}
}

func (lex *Lexical) Position(pos token.Pos) token.Position {
	return lex.fset.Position(pos)
}
