SuperStrict

Rem
bbdoc: Lexical analysis
End Rem
Module Otus.Lexer

ModuleInfo "Version: 0.10"
ModuleInfo "Author: Jan Varho"
ModuleInfo "License: Public domain"

ModuleInfo "History: 0.10 Beta"
ModuleInfo "History: Initial beta"

Import BRL.Stream

' Select one depending on which RegEx engine to build against
Import "re2.bmx"	'Otus.RE2	(Google RE2)
'Import "re.bmx"	'BaH.RegEx	(PCRE)

Rem
bbdoc: Lexical analyser
about:
#Create a lexer from a syntax array and use it to open TLexerStreams
End Rem
Type TLexer
	
	Field _syntax:TRegEx[]
	
	Rem
	bbdoc: Create a lexer
	returns: A lexer to be used with TLexerStreams
	about:
	The syntax array should contain regular expression patterns for valid token types.
	The index of a pattern in the array is later returned as a token id.
	
	The valid regex patterns depend on the regex engine used.
	Line start and end patterns (^, $ etc.) are NOT supported.
	If a pattern is not valid, it is thrown as an exception.
	
	The protocol specifies a OpenStream protocol.
	That can be used to open streams to be read a token at a time.
	E.g. if you create a lexer with the protocol "lex",
	you can use ReadStream("lex::file.ext") to read it.
	
	WARNING: Using a protocol creates a "Stream Factory",
	which keeps the lexer object alive and cannot be deleted.
	You should probably not use this for lexers that are dynamically created,
	unless the number of them that can be created is bounded and low.
	End Rem
	Function Create:TLexer(syntax:String[], protocol:String = Null)
		Local l:TLexer = New TLexer
		
		l._syntax = New TRegEx[syntax.length]
		For Local i:Int = 0 Until syntax.length
			l._syntax[i] = RegEx_Create("\A"+syntax[i])			'\A is the start of the string
			If Not l._syntax[i] Throw syntax[i]
		Next
		
		If protocol Then TLexerStreamFactory.Create l, protocol 
		
		Return l
	End Function
	
End Type

Rem
bbdoc: A token
about:
Tokens are returned by a call to ReadObject on a TLexerStream
End Rem
Type TToken
	
	Rem
	bbdoc: The token id means End Of File
	about:
	Thrown on Reads after EOF
	End Rem
	Const EXCEPTION_EOF:Int = -1
	
	Rem
	bbdoc: The token id means out of buffer
	about:
	Thrown when buffer is full and no pattern matches.
	End Rem
	Const EXCEPTION_BUF:Int = -2
	
	Rem
	bbdoc: The id of the pattern matching the token
	End Rem
	Field id:Int
	
	Rem
	bbdoc: The actual string matched
	End Rem
	Field str:String
	
	Rem
	bbdoc: The position of the token in the source stream
	about:
	The number of rows is calculated in newline characters.
	Any other characters (including carriage return) increment the column.
	End Rem
	Field row:Int, col:Int
	
	Method Copy:TToken()
		Local t:TToken = New TToken
		t.id = id
		t.str = str
		t.row = row
		t.col = col
		Return t
	End Method
	
	Method ToString:String()
		If id<0 Return "Lexer exception: "+id+":"+str
		Return id+":"+str
	End Method
	
End Type

Rem
bbdoc: A lexer stream
about:
A lexer stream wraps another stream to be read a token at a time.

They can be #Created with a lexer object and a stream,
or with ReadStream/OpenStream if using a lexer protocol.

Lexer streams only support some read methods and no writing.
End Rem
Type TLexerStream Extends TStream
	
	Field _in:TStream, _l:TLexer
	
	' Maximal buffer size ~= max token length
	Const BUF_MAX_SIZE:Int = 4095
	
	Field _buf:Byte[16], _len:Int
	
	Field _row:Int, _col:Int
	
	Method _Next:TToken()
		' If buffer is empty, refill
		If Not _len Then _len = _in.Read( Byte Ptr(_buf)+_len, _buf.length-1 )
		
		Local this:TToken = New TToken
		this.row = _row
		this.col = _col
		
		' Search for longest match
		Local iMax:Int = -1, lMax:Int = 0
		Repeat
			' Compare against each pattern
			For Local i:Int = 0 Until _l._syntax.length
				Local s:Int, l:Int
				If RegEx_Match(_l._syntax[i], _buf, s, l) And l > lMax
					lMax = l
					iMax = i
					If l = _len Exit
				End If
			Next
			
			' Found a valid match?
			If lMax And (lMax < _len Or _in.Eof()) Exit
			
			' EOF or out of buffer?
			If _in.Eof() Or _len = BUF_MAX_SIZE
				this.str = String.FromUTF8String(_buf)
				If _len = BUF_MAX_SIZE
					this.id = TToken.EXCEPTION_BUF
				Else
					this.id = TToken.EXCEPTION_EOF
				End If
				Throw this
			End If
			
			' Increase buffer length
			Local ol:Int = _len
			_len = Min(_len*2, BUF_MAX_SIZE)
			If _buf.length < _len*2+1
				_buf = _buf[.._buf.length*2]
			End If
			
			_len = ol + _in.Read( Byte Ptr(_buf)+ol, _len-ol )
		Forever
		
		' Extract the token that was found
		this.id = iMax
		Local tmp:Byte = _buf[lMax]
		_buf[lMax] = 0
		this.str = String.FromUTF8String(_buf)
		_buf[lMax] = tmp
		
		' Count the position
		Const NL:Int = 10
		For Local i:Int = 0 Until this.str.length
			If this.str[i] = NL
				_row :+ 1
				_col = 0
			Else
				_col :+ 1
			End If
		Next
		
		' Move data forward
		_len :- lMax
		For Local i:Int = 0 To _len
			_buf[i] = _buf[i+lMax]
		Next
		
		Return this
	End Method
	
	Rem
	bbdoc: Check for End Of File
	returns: True if the stream has been fully read
	End Rem
	Method Eof:Int()
		Return _len=0 And _in.Eof()
	End Method
	
	Rem
	bbdoc: Read the id of the next token
	returns: The id of the next token
	about:
	ReadInt skips the token.
	If you also need to know the string,
	use #ReadObject instead.
	
	On an error, ReadInt throws a TToken object as an exception.
	This contains one bufferful of data from the stream.
	This also happens on EOF, so you should check for that.
	End Rem
	Method ReadInt:Int()
		Return _Next().id
	End Method
	
	Rem
	bbdoc: Read the contents of the next token
	returns: The contents of the next token
	about:
	ReadLine skips the token.
	If you also need to know the id,
	use #ReadObject instead.
	
	On an error, ReadLine throws a TToken object as an exception.
	This contains one bufferful of data from the stream.
	This also happens on EOF, so you should check for that.
	End Rem
	Method ReadLine:String()
		Return _Next().str
	End Method
	
	Rem
	bbdoc: Read the next token
	returns: The next TToken
	about:
	On an error, ReadObject throws a TToken object as an exception.
	This contains one bufferful of data from the stream.
	This also happens on EOF, so you should check for that.
	End Rem
	Method ReadObject:TToken()
		Return _Next()
	End Method
	
	Rem
	bbdoc: Create a lexer stream
	returns: A lexer stream
	End Rem
	Function Create:TLexerStream(in:TStream, l:TLexer)
		Local s:TLexerStream = New TLexerStream
		s._in = in
		s._l = l
		Return s
	End Function
	
End Type


Type TLexerStreamFactory Extends TStreamFactory
	
	Field _l:TLexer, _p:String
	
	Method CreateStream:TStream(url:Object, proto:String, path:String, readable:Int,writeable:Int)
		If proto <> _p Return Null
		Return TLexerStream.Create( OpenStream(path, readable, writeable), _l )
	End Method
	
	Function Create(l:TLexer, protocol:String)
		Local f:TLexerStreamFactory = New TLexerStreamFactory
		f._l = l
		f._p = protocol
	End Function
	
End Type
