diff --git a/src/lenient_parse.gleam b/src/lenient_parse.gleam index b056a77..642b357 100644 --- a/src/lenient_parse.gleam +++ b/src/lenient_parse.gleam @@ -1,19 +1,21 @@ import gleam/bool import lenient_parse/internal/parser +import lenient_parse/internal/tokenizer import parse_error.{type ParseError, InvalidBaseValue} /// Converts a string to a float using a more lenient parsing method than /// gleam's `float.parse()`. It behaves similarly to Python's `float()` built-in /// function. pub fn to_float(text text: String) -> Result(Float, ParseError) { - text |> parser.parse_float + let tokens = text |> tokenizer.tokenize_float + tokens |> parser.parse_float } /// Converts a string to an integer using a more lenient parsing method than /// gleam's `int.parse()`. It behaves similarly to Python's `int()` built-in /// function, using a default base of 10. pub fn to_int(text text: String) -> Result(Int, ParseError) { - text |> parser.parse_int(base: 10) + text |> to_int_with_base(base: 10) } /// Converts a string to an integer using a more lenient parsing method than @@ -25,5 +27,6 @@ pub fn to_int_with_base( ) -> Result(Int, ParseError) { let is_valid_base = base >= 2 && base <= 36 use <- bool.guard(!is_valid_base, Error(InvalidBaseValue(base))) - text |> parser.parse_int(base: base) + let tokens = text |> tokenizer.tokenize_int(base: base) + tokens |> parser.parse_int(base: base) } diff --git a/src/lenient_parse/internal/parser.gleam b/src/lenient_parse/internal/parser.gleam index 3765d6f..c04ee5b 100644 --- a/src/lenient_parse/internal/parser.gleam +++ b/src/lenient_parse/internal/parser.gleam @@ -12,15 +12,13 @@ import lenient_parse/internal/token.{ } import lenient_parse/internal/parse_data.{type ParseData, ParseData} -import lenient_parse/internal/tokenizer import parse_error.{ type ParseError, EmptyString, InvalidDecimalPosition, InvalidExponentSymbolPosition, InvalidUnderscorePosition, OutOfBaseRange, UnknownCharacter, WhitespaceOnlyString, } -pub fn parse_float(text text: String) -> Result(Float, ParseError) { - let tokens = text |> tokenizer.tokenize_float +pub fn parse_float(tokens tokens: List(Token)) -> Result(Float, ParseError) { let index = 0 let parse_data = parse_whitespace(tokens, index) @@ -103,8 +101,10 @@ pub fn parse_float(text text: String) -> Result(Float, ParseError) { } } -pub fn parse_int(text text: String, base base: Int) -> Result(Int, ParseError) { - let tokens = tokenizer.tokenize_int(text: text, base: base) +pub fn parse_int( + tokens tokens: List(Token), + base base: Int, +) -> Result(Int, ParseError) { let index = 0 let parse_data = parse_whitespace(tokens, index)