Skip to content

Commit

Permalink
feat(requirements): update to latest mathy_core/mathy_envs versions (#57
Browse files Browse the repository at this point in the history
)
  • Loading branch information
justindujardin authored Apr 8, 2021
1 parent d8a4cc0 commit 7983d88
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 28 deletions.
4 changes: 2 additions & 2 deletions libraries/mathy_python/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ srsly
pydantic>=1.0.0
typer<0.4.0,>=0.3.2
wasabi
mathy_core>=0.8.0,<0.9.0
mathy_envs[gym]>=0.9.2,<0.10.0
mathy_core>=0.8.4,<0.9.0
mathy_envs[gym]>=0.10.0,<0.11.0
fragile==0.0.47
tqdm>=4.43.0
# new python feature backports
Expand Down
19 changes: 6 additions & 13 deletions libraries/website/docs/snippets/cas/tokenizer_manual.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,14 @@
"!pip install mathy --upgrade\n",
"from typing import List\n",
"\n",
"from mathy_core import (\n",
" Token,\n",
" TokenConstant,\n",
" TokenEOF,\n",
" Tokenizer,\n",
" TokenPlus,\n",
" TokenVariable,\n",
")\n",
"from mathy_core import Token, TOKEN_TYPES, Tokenizer\n",
"\n",
"manual_tokens: List[Token] = [\n",
" Token(\"4\", TokenConstant),\n",
" Token(\"x\", TokenVariable),\n",
" Token(\"+\", TokenPlus),\n",
" Token(\"2\", TokenConstant),\n",
" Token(\"\", TokenEOF),\n",
" Token(\"4\", TOKEN_TYPES.Constant),\n",
" Token(\"x\", TOKEN_TYPES.Variable),\n",
" Token(\"+\", TOKEN_TYPES.Plus),\n",
" Token(\"2\", TOKEN_TYPES.Constant),\n",
" Token(\"\", TOKEN_TYPES.EOF),\n",
"]\n",
"auto_tokens: List[Token] = Tokenizer().tokenize(\"4x + 2\")\n",
"\n",
Expand Down
19 changes: 6 additions & 13 deletions libraries/website/docs/snippets/cas/tokenizer_manual.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,13 @@
from typing import List

from mathy_core import (
Token,
TokenConstant,
TokenEOF,
Tokenizer,
TokenPlus,
TokenVariable,
)
from mathy_core import Token, TOKEN_TYPES, Tokenizer

manual_tokens: List[Token] = [
Token("4", TokenConstant),
Token("x", TokenVariable),
Token("+", TokenPlus),
Token("2", TokenConstant),
Token("", TokenEOF),
Token("4", TOKEN_TYPES.Constant),
Token("x", TOKEN_TYPES.Variable),
Token("+", TOKEN_TYPES.Plus),
Token("2", TOKEN_TYPES.Constant),
Token("", TOKEN_TYPES.EOF),
]
auto_tokens: List[Token] = Tokenizer().tokenize("4x + 2")

Expand Down

0 comments on commit 7983d88

Please sign in to comment.