_parser.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365
  1. """Handwritten parser of dependency specifiers.
  2. The docstring for each __parse_* function contains EBNF-inspired grammar representing
  3. the implementation.
  4. """
  5. from __future__ import annotations
  6. import ast
  7. from typing import List, Literal, NamedTuple, Sequence, Tuple, Union
  8. from ._tokenizer import DEFAULT_RULES, Tokenizer
  9. class Node:
  10. __slots__ = ("value",)
  11. def __init__(self, value: str) -> None:
  12. self.value = value
  13. def __str__(self) -> str:
  14. return self.value
  15. def __repr__(self) -> str:
  16. return f"<{self.__class__.__name__}({self.value!r})>"
  17. def serialize(self) -> str:
  18. raise NotImplementedError
  19. class Variable(Node):
  20. __slots__ = ()
  21. def serialize(self) -> str:
  22. return str(self)
  23. class Value(Node):
  24. __slots__ = ()
  25. def serialize(self) -> str:
  26. return f'"{self}"'
  27. class Op(Node):
  28. __slots__ = ()
  29. def serialize(self) -> str:
  30. return str(self)
  31. MarkerLogical = Literal["and", "or"]
  32. MarkerVar = Union[Variable, Value]
  33. MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
  34. MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
  35. MarkerList = List[Union["MarkerList", MarkerAtom, MarkerLogical]]
  36. class ParsedRequirement(NamedTuple):
  37. name: str
  38. url: str
  39. extras: list[str]
  40. specifier: str
  41. marker: MarkerList | None
  42. # --------------------------------------------------------------------------------------
  43. # Recursive descent parser for dependency specifier
  44. # --------------------------------------------------------------------------------------
  45. def parse_requirement(source: str) -> ParsedRequirement:
  46. return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
  47. def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
  48. """
  49. requirement = WS? IDENTIFIER WS? extras WS? requirement_details
  50. """
  51. tokenizer.consume("WS")
  52. name_token = tokenizer.expect(
  53. "IDENTIFIER", expected="package name at the start of dependency specifier"
  54. )
  55. name = name_token.text
  56. tokenizer.consume("WS")
  57. extras = _parse_extras(tokenizer)
  58. tokenizer.consume("WS")
  59. url, specifier, marker = _parse_requirement_details(tokenizer)
  60. tokenizer.expect("END", expected="end of dependency specifier")
  61. return ParsedRequirement(name, url, extras, specifier, marker)
  62. def _parse_requirement_details(
  63. tokenizer: Tokenizer,
  64. ) -> tuple[str, str, MarkerList | None]:
  65. """
  66. requirement_details = AT URL (WS requirement_marker?)?
  67. | specifier WS? (requirement_marker)?
  68. """
  69. specifier = ""
  70. url = ""
  71. marker = None
  72. if tokenizer.check("AT"):
  73. tokenizer.read()
  74. tokenizer.consume("WS")
  75. url_start = tokenizer.position
  76. url = tokenizer.expect("URL", expected="URL after @").text
  77. if tokenizer.check("END", peek=True):
  78. return (url, specifier, marker)
  79. tokenizer.expect("WS", expected="whitespace after URL")
  80. # The input might end after whitespace.
  81. if tokenizer.check("END", peek=True):
  82. return (url, specifier, marker)
  83. marker = _parse_requirement_marker(
  84. tokenizer,
  85. span_start=url_start,
  86. expected="semicolon (after URL and whitespace)",
  87. )
  88. else:
  89. specifier_start = tokenizer.position
  90. specifier = _parse_specifier(tokenizer)
  91. tokenizer.consume("WS")
  92. if tokenizer.check("END", peek=True):
  93. return (url, specifier, marker)
  94. marker = _parse_requirement_marker(
  95. tokenizer,
  96. span_start=specifier_start,
  97. expected=(
  98. "comma (within version specifier), semicolon (after version specifier)"
  99. if specifier
  100. else "semicolon (after name with no version specifier)"
  101. ),
  102. )
  103. return (url, specifier, marker)
  104. def _parse_requirement_marker(
  105. tokenizer: Tokenizer, *, span_start: int, expected: str
  106. ) -> MarkerList:
  107. """
  108. requirement_marker = SEMICOLON marker WS?
  109. """
  110. if not tokenizer.check("SEMICOLON"):
  111. tokenizer.raise_syntax_error(
  112. f"Expected {expected} or end",
  113. span_start=span_start,
  114. span_end=None,
  115. )
  116. tokenizer.read()
  117. marker = _parse_marker(tokenizer)
  118. tokenizer.consume("WS")
  119. return marker
  120. def _parse_extras(tokenizer: Tokenizer) -> list[str]:
  121. """
  122. extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
  123. """
  124. if not tokenizer.check("LEFT_BRACKET", peek=True):
  125. return []
  126. with tokenizer.enclosing_tokens(
  127. "LEFT_BRACKET",
  128. "RIGHT_BRACKET",
  129. around="extras",
  130. ):
  131. tokenizer.consume("WS")
  132. extras = _parse_extras_list(tokenizer)
  133. tokenizer.consume("WS")
  134. return extras
  135. def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
  136. """
  137. extras_list = identifier (wsp* ',' wsp* identifier)*
  138. """
  139. extras: list[str] = []
  140. if not tokenizer.check("IDENTIFIER"):
  141. return extras
  142. extras.append(tokenizer.read().text)
  143. while True:
  144. tokenizer.consume("WS")
  145. if tokenizer.check("IDENTIFIER", peek=True):
  146. tokenizer.raise_syntax_error("Expected comma between extra names")
  147. elif not tokenizer.check("COMMA"):
  148. break
  149. tokenizer.read()
  150. tokenizer.consume("WS")
  151. extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
  152. extras.append(extra_token.text)
  153. return extras
  154. def _parse_specifier(tokenizer: Tokenizer) -> str:
  155. """
  156. specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
  157. | WS? version_many WS?
  158. """
  159. with tokenizer.enclosing_tokens(
  160. "LEFT_PARENTHESIS",
  161. "RIGHT_PARENTHESIS",
  162. around="version specifier",
  163. ):
  164. tokenizer.consume("WS")
  165. parsed_specifiers = _parse_version_many(tokenizer)
  166. tokenizer.consume("WS")
  167. return parsed_specifiers
  168. def _parse_version_many(tokenizer: Tokenizer) -> str:
  169. """
  170. version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
  171. """
  172. parsed_specifiers = ""
  173. while tokenizer.check("SPECIFIER"):
  174. span_start = tokenizer.position
  175. parsed_specifiers += tokenizer.read().text
  176. if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
  177. tokenizer.raise_syntax_error(
  178. ".* suffix can only be used with `==` or `!=` operators",
  179. span_start=span_start,
  180. span_end=tokenizer.position + 1,
  181. )
  182. if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
  183. tokenizer.raise_syntax_error(
  184. "Local version label can only be used with `==` or `!=` operators",
  185. span_start=span_start,
  186. span_end=tokenizer.position,
  187. )
  188. tokenizer.consume("WS")
  189. if not tokenizer.check("COMMA"):
  190. break
  191. parsed_specifiers += tokenizer.read().text
  192. tokenizer.consume("WS")
  193. return parsed_specifiers
  194. # --------------------------------------------------------------------------------------
  195. # Recursive descent parser for marker expression
  196. # --------------------------------------------------------------------------------------
  197. def parse_marker(source: str) -> MarkerList:
  198. return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
  199. def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
  200. retval = _parse_marker(tokenizer)
  201. tokenizer.expect("END", expected="end of marker expression")
  202. return retval
  203. def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
  204. """
  205. marker = marker_atom (BOOLOP marker_atom)+
  206. """
  207. expression = [_parse_marker_atom(tokenizer)]
  208. while tokenizer.check("BOOLOP"):
  209. token = tokenizer.read()
  210. expr_right = _parse_marker_atom(tokenizer)
  211. expression.extend((token.text, expr_right))
  212. return expression
  213. def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
  214. """
  215. marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
  216. | WS? marker_item WS?
  217. """
  218. tokenizer.consume("WS")
  219. if tokenizer.check("LEFT_PARENTHESIS", peek=True):
  220. with tokenizer.enclosing_tokens(
  221. "LEFT_PARENTHESIS",
  222. "RIGHT_PARENTHESIS",
  223. around="marker expression",
  224. ):
  225. tokenizer.consume("WS")
  226. marker: MarkerAtom = _parse_marker(tokenizer)
  227. tokenizer.consume("WS")
  228. else:
  229. marker = _parse_marker_item(tokenizer)
  230. tokenizer.consume("WS")
  231. return marker
  232. def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
  233. """
  234. marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
  235. """
  236. tokenizer.consume("WS")
  237. marker_var_left = _parse_marker_var(tokenizer)
  238. tokenizer.consume("WS")
  239. marker_op = _parse_marker_op(tokenizer)
  240. tokenizer.consume("WS")
  241. marker_var_right = _parse_marker_var(tokenizer)
  242. tokenizer.consume("WS")
  243. return (marker_var_left, marker_op, marker_var_right)
  244. def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar: # noqa: RET503
  245. """
  246. marker_var = VARIABLE | QUOTED_STRING
  247. """
  248. if tokenizer.check("VARIABLE"):
  249. return process_env_var(tokenizer.read().text.replace(".", "_"))
  250. elif tokenizer.check("QUOTED_STRING"):
  251. return process_python_str(tokenizer.read().text)
  252. else:
  253. tokenizer.raise_syntax_error(
  254. message="Expected a marker variable or quoted string"
  255. )
  256. def process_env_var(env_var: str) -> Variable:
  257. if env_var in ("platform_python_implementation", "python_implementation"):
  258. return Variable("platform_python_implementation")
  259. else:
  260. return Variable(env_var)
  261. def process_python_str(python_str: str) -> Value:
  262. value = ast.literal_eval(python_str)
  263. return Value(str(value))
  264. def _parse_marker_op(tokenizer: Tokenizer) -> Op:
  265. """
  266. marker_op = IN | NOT IN | OP
  267. """
  268. if tokenizer.check("IN"):
  269. tokenizer.read()
  270. return Op("in")
  271. elif tokenizer.check("NOT"):
  272. tokenizer.read()
  273. tokenizer.expect("WS", expected="whitespace after 'not'")
  274. tokenizer.expect("IN", expected="'in' after 'not'")
  275. return Op("not in")
  276. elif tokenizer.check("OP"):
  277. return Op(tokenizer.read().text)
  278. else:
  279. return tokenizer.raise_syntax_error(
  280. "Expected marker operator, one of <=, <, !=, ==, >=, >, ~=, ===, in, not in"
  281. )