Lazy lexing
This commit is contained in:
parent
7d6c833e58
commit
fd6bf1346e
1 changed files with 6 additions and 6 deletions
12
lex.ml
12
lex.ml
|
@ -38,24 +38,24 @@ let rec partition_while f seq : 'a Seq.t * 'a Seq.t =
|
|||
|
||||
let tokenize (str : string) : tokens =
|
||||
let seq = String.to_seq str in
|
||||
let rec aux seq =
|
||||
let rec aux seq () =
|
||||
let open Token in
|
||||
match seq () with
|
||||
| Seq.Nil -> Seq.empty
|
||||
| Seq.Nil -> Seq.Nil
|
||||
| Seq.Cons (x, s) ->
|
||||
if is_whitespace x then
|
||||
aux s (* skip whitespace *)
|
||||
aux s () (* skip whitespace *)
|
||||
else if is_digit x then
|
||||
let n, s = partition_while is_num seq in
|
||||
let n = int_of_string @@ String.of_seq n in
|
||||
Seq.cons (Int n) (aux s)
|
||||
Seq.Cons (Int n, aux s)
|
||||
else if is_ident_start x then
|
||||
let id, s = partition_while is_ident seq in
|
||||
let id = String.of_seq id in
|
||||
Seq.cons (Ident id) (aux s)
|
||||
Seq.Cons (Ident id, aux s)
|
||||
else
|
||||
match find_token seq with
|
||||
| None -> raise Token_not_found
|
||||
| Some (t, s) -> Seq.cons t (aux s)
|
||||
| Some (t, s) -> Seq.Cons (t, aux s)
|
||||
in
|
||||
aux seq
|
||||
|
|
Loading…
Add table
Reference in a new issue