diff --git a/src/plotter/parser.py b/src/plotter/parser.py index 3b35d79..1a0ef1d 100644 --- a/src/plotter/parser.py +++ b/src/plotter/parser.py @@ -27,6 +27,7 @@ class TokenType(Enum): SEPARATOR = 2 FLOAT = 3 FUNCTION = 4 + CONSTANT = 5 # The token can optionally capture a string value. @@ -43,6 +44,7 @@ operators: set[str] = {"+", "-", "*", "/", "^"} functions: set[str] = {"abs", "cos", "sin", "tan", "atan", "exp", "ln", "log"} separators: set[str] = {"(", ")"} variables: set[str] = {"x"} +constants: set[str] = {"e", "pi"} # The lexer is a generator function that yields token as it scans the input string @@ -75,13 +77,18 @@ def lex(input: str) -> Iterator[Token]: i += 1 continue - # functions + # functions and constants if char.isalpha(): j = i + 1 while j < len(input) and input[j].isalpha(): j += 1 name = input[i:j] + if name in constants: + yield Token(type=TokenType.CONSTANT, value=name) + i = j + continue + if name not in functions: yield Token( type=TokenType.ERROR, value=f"unknown function name '{name}'" @@ -134,12 +141,18 @@ class Expression(ABC): class Atom(Expression): token: Token + _constants = { + "pi": math.pi, + "e": math.e, + } + def eval(self, x: float) -> float: if self.token.type == TokenType.VARIABLE: return x if self.token.type == TokenType.FLOAT: return float(self.token.value or 0) - + if self.token.type == TokenType.CONSTANT: + return self._constants[self.token.value] return 0.0 diff --git a/tests/test_parser.py b/tests/test_parser.py index f641239..8913053 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -1,3 +1,4 @@ +import math import pytest from plotter.parser import ( @@ -98,6 +99,21 @@ from plotter.parser import ( Token(type=TokenType.FLOAT, value="0.5"), ], ), + ( + "pi * cos(2.4 * x) ^ e", + [ + Token(type=TokenType.CONSTANT, value="pi"), + Token(type=TokenType.OPERATOR, value="*"), + Token(type=TokenType.FUNCTION, value="cos"), + Token(type=TokenType.SEPARATOR, value="("), + Token(type=TokenType.FLOAT, value="2.4"), + Token(type=TokenType.OPERATOR, value="*"), + Token(type=TokenType.VARIABLE, value="x"), + Token(type=TokenType.SEPARATOR, value=")"), + Token(type=TokenType.OPERATOR, value="^"), + Token(type=TokenType.CONSTANT, value="e"), + ], + ), ], ) def test_lexer(expression, expected): @@ -108,6 +124,13 @@ def test_lexer(expression, expected): "expression, expected", argvalues=[ ("3.14", Atom(Token(type=TokenType.FLOAT, value="3.14"))), + ("pi", Atom(Token(type=TokenType.CONSTANT, value="pi"))), + ( + "exp(e)", + FunctionExpression( + function="exp", argument=Atom(Token(type=TokenType.CONSTANT, value="e")) + ), + ), ( "2 + 2", InfixExpression( @@ -246,6 +269,8 @@ def test_unary_minus(expression, expected): ("(-2)^3", 0, -8), # (-2)^3 = -8 ("-2^2", 0, -4), # -(2^2) = -4 ("(-2)^2", 0, 4), # (-2)^2 = 4 + ("pi", 0, math.pi), + ("e", 0, math.e), ], ) def test_unary_minus_eval(expression, x, expected):