From f77c1db22a1ed7d186574f6771be4c5751f1eb05 Mon Sep 17 00:00:00 2001 From: Hyeonung Baek Date: Sat, 22 Jan 2022 03:04:00 +0900 Subject: [PATCH] Revert Lex.tokenize to be non-laxy --- lex.ml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lex.ml b/lex.ml index 93afc86..6827430 100644 --- a/lex.ml +++ b/lex.ml @@ -57,13 +57,13 @@ let rec partition_while f seq : 'a Seq.t * 'a Seq.t = let tokenize (str : string) : tokens = let seq = String.to_seq str in - let rec aux seq () = + let rec aux seq = let open Token in match seq () with - | Seq.Nil -> Seq.Nil + | Seq.Nil -> Seq.empty | Seq.Cons (x, seq) -> if is_whitespace x then - aux seq () (* skip whitespace *) + aux seq (* skip whitespace *) else if x = '"' then let str, seq = partition_while ((<>) '"') seq in @@ -71,7 +71,7 @@ let tokenize (str : string) : tokens = begin match seq () with | Seq.Nil -> raise Unclosed_quote | Seq.Cons (x, seq) -> - if x = '"' then Seq.Cons (Value str, aux seq) + if x = '"' then Seq.cons (Value str) (aux seq) else raise Unclosed_quote end @@ -83,16 +83,16 @@ let tokenize (str : string) : tokens = then Float (float_of_string n) else Int (int_of_string n) in - Seq.Cons (Value n, aux seq) + Seq.cons (Value n) (aux seq) else if is_ident_start x then let id, seq = partition_while is_ident seq in let id = String.of_seq @@ Seq.cons x id in - Seq.Cons (Ident id, aux seq) + Seq.cons (Ident id) (aux seq) else match find_token @@ Seq.cons x seq with | None -> raise Token_not_found - | Some (t, s) -> Seq.Cons (t, aux s) + | Some (t, seq) -> Seq.cons t (aux seq) in aux seq