use shlex builtin to tokenize simple elisp expressions

This commit is contained in:
riscy 2021-08-29 12:51:44 -07:00
parent 9e6b032b57
commit 2e67981344

View file

@ -17,6 +17,7 @@ import glob
import operator import operator
import os import os
import re import re
import shlex
import shutil import shutil
import subprocess import subprocess
import sys import sys
@ -186,8 +187,8 @@ def _files_in_default_recipe(recipe: str, elisp_dir: str) -> List[str]:
def _default_recipe(recipe: str) -> str: def _default_recipe(recipe: str) -> str:
"""Simplify the given recipe, usually to the default. """Simplify the given recipe, usually to the default.
>>> _default_recipe('(recipe :repo a/b :fetcher hg :branch na :files ("*.el"))') >>> _default_recipe('(recipe :repo "a/b" :fetcher hg :branch na :files ("*.el"))')
'(recipe :repo a/b :fetcher hg :branch na)' '(recipe :repo "a/b" :fetcher hg :branch na)'
>>> _default_recipe('(recipe :fetcher hg :url "a/b")') >>> _default_recipe('(recipe :fetcher hg :url "a/b")')
'(recipe :url "a/b" :fetcher hg)' '(recipe :url "a/b" :fetcher hg)'
""" """
@ -202,26 +203,16 @@ def _default_recipe(recipe: str) -> str:
return '(' + ' '.join(operator.itemgetter(*indices)(tokens)) + ')' return '(' + ' '.join(operator.itemgetter(*indices)(tokens)) + ')'
@functools.lru_cache()
def _tokenize_expression(expression: str) -> List[str]: def _tokenize_expression(expression: str) -> List[str]:
"""Turn an elisp expression into a list of tokens. """Turn an elisp expression into a list of tokens.
>>> _tokenize_expression('(shx :repo "riscy/xyz" :fetcher github) ; comment') >>> _tokenize_expression('(shx :repo "riscy/xyz" :fetcher github) ; comment')
['(', 'shx', ':repo', '"riscy/xyz"', ':fetcher', 'github', ')'] ['(', 'shx', ':repo', '"riscy/xyz"', ':fetcher', 'github', ')']
""" """
with tempfile.TemporaryDirectory() as tmpdir: lexer = shlex.shlex(expression)
with open(os.path.join(tmpdir, 'scratch'), 'w') as scratch: lexer.quotes = '"'
scratch.write(expression) lexer.commenters = ';'
parsed_expression = run_build_script( lexer.wordchars = lexer.wordchars + ':-'
f""" return list(lexer)
(send-string-to-terminal
(format "%S" (with-temp-buffer (insert-file-contents "{scratch.name}")
(read (current-buffer)))))
"""
)
parsed_expression = parsed_expression.replace('(', ' ( ')
parsed_expression = parsed_expression.replace(')', ' ) ')
tokenized_expression = parsed_expression.split()
return tokenized_expression
def package_name(recipe: str) -> str: def package_name(recipe: str) -> str: