pax_global_header00006660000000000000000000000064146216370350014521gustar00rootroot0000000000000052 comment=67a8fd1660140104ae9e35783690d58b080fcd79 ast_decompiler-0.8.0/000077500000000000000000000000001462163703500145205ustar00rootroot00000000000000ast_decompiler-0.8.0/.editorconfig000066400000000000000000000003051462163703500171730ustar00rootroot00000000000000root = true [*.{py,rst,md,yml,yaml,toml,json}] trim_trailing_whitespace = true insert_final_newline = true indent_style = space [*.{py,toml,json}] indent_size = 4 [*.{yml,yaml}] indent_size = 2 ast_decompiler-0.8.0/.github/000077500000000000000000000000001462163703500160605ustar00rootroot00000000000000ast_decompiler-0.8.0/.github/workflows/000077500000000000000000000000001462163703500201155ustar00rootroot00000000000000ast_decompiler-0.8.0/.github/workflows/fuzz.yml000066400000000000000000000017121462163703500216370ustar00rootroot00000000000000name: Fuzz on: [push, pull_request] jobs: build: # We want to run on external PRs, but not on our own internal PRs as they'll be run # by the push to the branch. Without this if check, checks are duplicated since # internal PRs match both the push and pull_request events. if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip python -m pip install --upgrade tox - name: Run fuzz tests run: | tox -e fuzz ast_decompiler-0.8.0/.github/workflows/publish.yml000066400000000000000000000034411462163703500223100ustar00rootroot00000000000000# Based on # https://packaging.python.org/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ name: Test builds and publish Python distribution to PyPI on: release: types: [published] push: branches: [master] pull_request: permissions: contents: read concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true jobs: build: name: Build distribution runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install pypa/build run: | # Be wary of running `pip install` here, since it becomes easy for us to # accidentally pick up typing_extensions as installed by a dependency python -m pip install --upgrade build python -m pip list - name: Build a binary wheel and a source tarball run: python -m build - name: Store the distribution packages uses: actions/upload-artifact@v4 with: name: python-package-distributions path: dist/ publish-to-pypi: name: >- Publish Python distribution to PyPI if: github.event_name == 'release' # only publish to PyPI on releases needs: - build runs-on: ubuntu-latest environment: name: publish url: https://pypi.org/p/ast-decompiler permissions: id-token: write # IMPORTANT: mandatory for trusted publishing steps: - name: Download all the dists uses: actions/download-artifact@v4 with: name: python-package-distributions path: dist/ - name: Publish distribution to PyPI uses: pypa/gh-action-pypi-publish@release/v1 ast_decompiler-0.8.0/.github/workflows/test.yml000066400000000000000000000010641462163703500216200ustar00rootroot00000000000000name: ast_decompiler on: - push - pull_request jobs: build: runs-on: ubuntu-latest strategy: matrix: python-version: [3.8, 3.9, "3.10", "3.11", "3.12", "3.13-dev"] steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install tox tox-gh-actions - name: Test with tox run: tox ast_decompiler-0.8.0/.gitignore000066400000000000000000000001311462163703500165030ustar00rootroot00000000000000.cache/ *.pyc build/ dist/ ast_decompiler.egg-info/ .tox/ .hypothesis/ env/ venv/ .venv/ ast_decompiler-0.8.0/CHANGELOG.md000066400000000000000000000026371462163703500163410ustar00rootroot00000000000000version 0.8.0 (May 17, 2024) - Support Python 3.12 and 3.13; stop testing Python 3.6 and 3.7 - Fix all DeprecationWarnings in the codebase version 0.7.0 (October 3, 2022) - Stop adding redundant parentheses to `complex` numbers with no real part and a negative imaginary part (thanks to Alex Waygood) version 0.6.0 (June 6, 2022) - Support Python 3.11 - Fix bug where annotations on `*args` and `**kwargs` were dropped - Stop adding redundant parentheses to tuple subscripts on Python 3.8 and lower (thanks to Alex Waygood) version 0.5.0 (May 10, 2022) - Add `py.typed` - Fix decompilation of f-strings containing escaped braces - Preserve literal newlines in docstrings - Fix decompilation of complex infinity - Add support for Python 3.10 pattern matching - Fix incorrect decompilation of lambdas in comprehension guards on Python 3.9 and higher - Fix decompilation for dict `**` unpacking - Modernize CI and packaging setup - Fix tests under Python 3.9 - Add explicit LICENSE file version 0.4.0 (May 7, 2020) - Support Python 3.7 and 3.8 (thanks to Luke Plant) - Allow keyword-only arguments without default values (thanks to Shantanu Jain) version 0.3.2 (August 22, 2017) - More f-string fixes (thanks to Shantanu Jain) version 0.3.1 (August 11, 2017) - Fix handling of f-strings version 0.3 (January 7, 2017) - Support Python 3.6 version 0.2 (July 14, 2016) - Support Python 3 version 0.1 (May 7, 2016) - Initial version ast_decompiler-0.8.0/LICENSE000066400000000000000000000236751462163703500155420ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS ast_decompiler-0.8.0/README.rst000066400000000000000000000010151462163703500162040ustar00rootroot00000000000000************** ast_decompiler ************** ast_decompiler is a module for generating Python code given an AST. A usage example:: >> import ast >> from ast_decompiler import decompile >> decompile(ast.parse('(a + b) * c')) (a + b) * c This module supports Python 3.8 through 3.13. ==================== Tests and formatting ==================== To run the tests, install ``pytest`` in a virtual environment. Then, either use ``tox``, or simply run ``pytest tests/``. The code is formatted with Black. ast_decompiler-0.8.0/ast_decompiler/000077500000000000000000000000001462163703500175125ustar00rootroot00000000000000ast_decompiler-0.8.0/ast_decompiler/__init__.py000066400000000000000000000001641462163703500216240ustar00rootroot00000000000000""" Generate Python code given an AST. """ __version__ = "0.7.0" from .decompiler import decompile as decompile ast_decompiler-0.8.0/ast_decompiler/check.py000066400000000000000000000016031462163703500211410ustar00rootroot00000000000000import ast from ast_decompiler import decompile import difflib def check(code: str) -> None: """Checks that the code remains the same when decompiled and re-parsed.""" tree = ast.parse(code) new_code = decompile(tree) try: new_tree = ast.parse(new_code) except SyntaxError as e: if e.lineno is None: raise print(">>> syntax error:") lineno = e.lineno - 1 min_lineno = max(0, lineno - 3) max_lineno = lineno + 3 for line in new_code.splitlines()[min_lineno:max_lineno]: print(line) raise dumped = ast.dump(ast.parse(code)) new_dumped = ast.dump(new_tree) if dumped != new_dumped: print(code) print(new_code) for line in difflib.unified_diff(dumped.split(), new_dumped.split()): print(line) assert False, f"{dumped} != {new_dumped}" ast_decompiler-0.8.0/ast_decompiler/decompiler.py000066400000000000000000001157601462163703500222210ustar00rootroot00000000000000""" Implementation of the decompiler class. """ import ast import cmath from contextlib import contextmanager import math import sys from typing import Any, Dict, Generator, Iterable, Optional, Sequence, Type, Union _OP_TO_STR = { ast.Add: "+", ast.Sub: "-", ast.Mult: "*", ast.Div: "/", ast.Mod: "%", ast.Pow: "**", ast.LShift: "<<", ast.RShift: ">>", ast.BitOr: "|", ast.BitXor: "^", ast.BitAnd: "&", ast.FloorDiv: "//", ast.MatMult: "@", ast.Invert: "~", ast.Not: "not ", ast.UAdd: "+", ast.USub: "-", ast.Eq: "==", ast.NotEq: "!=", ast.Lt: "<", ast.LtE: "<=", ast.Gt: ">", ast.GtE: ">=", ast.Is: "is", ast.IsNot: "is not", ast.In: "in", ast.NotIn: "not in", ast.And: "and", ast.Or: "or", } class _CallArgs(ast.AST): """Used as an entry in the precedence table. Needed to convey the high precedence of the callee but low precedence of the arguments. """ def __init__(self, args: Sequence[ast.AST]) -> None: self.args = args _PRECEDENCE: Dict[Type[ast.AST], int] = { _CallArgs: -1, ast.Or: 0, ast.And: 1, ast.Not: 2, ast.Compare: 3, ast.BitOr: 4, ast.BitXor: 5, ast.BitAnd: 6, ast.LShift: 7, ast.RShift: 7, ast.Add: 8, ast.Sub: 8, ast.Mult: 9, ast.Div: 9, ast.FloorDiv: 9, ast.Mod: 9, ast.MatMult: 9, ast.UAdd: 10, ast.USub: 10, ast.Invert: 10, ast.Pow: 11, ast.Subscript: 12, ast.Call: 12, ast.Attribute: 12, } def decompile( ast: ast.AST, indentation: int = 4, line_length: int = 100, starting_indentation: int = 0, ) -> str: """Decompiles an AST into Python code. Arguments: - ast: code to decompile, using AST objects as generated by the standard library ast module - indentation: indentation level of lines - line_length: if lines become longer than this length, ast_decompiler will try to break them up (but it will not necessarily succeed in all cases) - starting_indentation: indentation level at which to start producing code """ decompiler = Decompiler( indentation=indentation, line_length=line_length, starting_indentation=starting_indentation, ) return decompiler.run(ast) # helper ast nodes to make decompilation easier class KeyValuePair(ast.AST): """A key-value pair as used in a dictionary display.""" _fields = ("key", "value") def __init__(self, key: Optional[ast.AST], value: ast.AST) -> None: self.key = key self.value = value class StarArg(ast.AST): """A * argument.""" _fields = ("arg",) def __init__(self, arg: ast.arg) -> None: self.arg = arg class DoubleStarArg(ast.AST): """A ** argument.""" _fields = ("arg",) def __init__(self, arg: ast.arg) -> None: self.arg = arg class KeywordArg(ast.AST): """A x=3 keyword argument in a function definition.""" _fields = ("arg", "value") def __init__(self, arg: ast.arg, value: Optional[ast.AST]) -> None: self.arg = arg self.value = value class Decompiler(ast.NodeVisitor): def __init__( self, indentation: int, line_length: int, starting_indentation: int ) -> None: self.lines = [] self.current_line = [] self.current_indentation = starting_indentation self.node_stack = [] self.indentation = indentation self.max_line_length = line_length def run(self, ast: ast.AST) -> str: self.visit(ast) if self.current_line: self.lines.append("".join(self.current_line)) self.current_line = [] return "".join(self.lines) def visit(self, node: ast.AST) -> None: self.node_stack.append(node) try: super().visit(node) finally: self.node_stack.pop() def precedence_of_node(self, node: Optional[ast.AST]) -> int: if node is None: return -1 if isinstance(node, (ast.BinOp, ast.UnaryOp, ast.BoolOp)): return _PRECEDENCE[type(node.op)] return _PRECEDENCE.get(type(node), -1) def get_parent_node(self) -> Optional[ast.AST]: try: return self.node_stack[-2] except IndexError: return None def has_parent_of_type(self, node_type: Type[ast.AST]) -> bool: return any(isinstance(parent, node_type) for parent in self.node_stack) def write(self, code: str) -> None: assert isinstance(code, str), f"invalid code {code!r}" self.current_line.append(code) def write_indentation(self) -> None: self.write(" " * self.current_indentation) def write_newline(self) -> None: line = "".join(self.current_line) + "\n" self.lines.append(line) self.current_line = [] def current_line_length(self) -> int: return sum(map(len, self.current_line)) def write_expression_list( self, nodes: Sequence[ast.AST], *, separator: str = ", ", allow_newlines: bool = True, need_parens: bool = True, final_separator_if_multiline: bool = True, ) -> None: """Writes a list of nodes, separated by separator. If allow_newlines, will write the expression over multiple lines if necessary to say within max_line_length. If need_parens, will surround the expression with parentheses in this case. If final_separator_if_multiline, will write a separator at the end of the list if it is divided over multiple lines. """ first = True last_line = len(self.lines) current_line = list(self.current_line) for node in nodes: if first: first = False else: self.write(separator) self.visit(node) if allow_newlines and ( self.current_line_length() > self.max_line_length or last_line != len(self.lines) ): break else: return # stayed within the limit # reset state del self.lines[last_line:] self.current_line = current_line separator = separator.rstrip() if need_parens: self.write("(") self.write_newline() with self.add_indentation(): num_nodes = len(nodes) for i, node in enumerate(nodes): self.write_indentation() self.visit(node) if final_separator_if_multiline or i < num_nodes - 1: self.write(separator) self.write_newline() self.write_indentation() if need_parens: self.write(")") def write_suite(self, nodes: Iterable[ast.AST]) -> None: with self.add_indentation(): for line in nodes: self.visit(line) @contextmanager def add_indentation(self) -> Generator[None, None, None]: self.current_indentation += self.indentation try: yield finally: self.current_indentation -= self.indentation @contextmanager def parenthesize_if(self, condition: bool) -> Generator[None, None, None]: if condition: self.write("(") yield self.write(")") else: yield @contextmanager def f_literalise_if(self, condition: bool) -> Generator[None, None, None]: if condition: self.write("f'") yield self.write("'") else: yield def generic_visit(self, node: ast.AST) -> None: raise NotImplementedError(f"missing visit method for {node!r}") def visit_Module(self, node: Union[ast.Module, ast.Interactive]) -> None: for line in node.body: self.visit(line) visit_Interactive = visit_Module def visit_Expression(self, node: ast.Expression) -> None: self.visit(node.body) # Multi-line statements def visit_FunctionDef( self, node: Union[ast.FunctionDef, ast.AsyncFunctionDef] ) -> None: self.write_newline() for decorator in node.decorator_list: self.write_indentation() self.write("@") self.visit(decorator) self.write_newline() self.write_indentation() if isinstance(node, ast.AsyncFunctionDef): self.write("async ") self.write(f"def {node.name}") if sys.version_info >= (3, 12) and node.type_params: self.write("[") self.write_expression_list(node.type_params) self.write("]") self.write("(") self.visit(node.args) self.write(")") if node.returns is not None: self.write(" -> ") self.visit(node.returns) self.write(":") self.write_newline() self.write_suite(node.body) visit_AsyncFunctionDef = visit_FunctionDef def visit_ClassDef(self, node: ast.ClassDef) -> None: self.write_newline() self.write_newline() for decorator in node.decorator_list: self.write_indentation() self.write("@") self.visit(decorator) self.write_newline() self.write_indentation() self.write(f"class {node.name}") if sys.version_info >= (3, 12) and node.type_params: self.write("[") self.write_expression_list(node.type_params) self.write("]") self.write("(") exprs = node.bases + getattr(node, "keywords", []) self.write_expression_list(exprs, need_parens=False) self.write("):") self.write_newline() self.write_suite(node.body) def visit_For(self, node: Union[ast.For, ast.AsyncFor]) -> None: self.write_indentation() if isinstance(node, ast.AsyncFor): self.write("async ") self.write("for ") self.visit(node.target) self.write(" in ") self.visit(node.iter) self.write(":") self.write_newline() self.write_suite(node.body) self.write_else(node.orelse) visit_AsyncFor = visit_For def visit_While(self, node: ast.While) -> None: self.write_indentation() self.write("while ") self.visit(node.test) self.write(":") self.write_newline() self.write_suite(node.body) self.write_else(node.orelse) def visit_If(self, node: ast.If) -> None: self.write_indentation() self.write("if ") self.visit(node.test) self.write(":") self.write_newline() self.write_suite(node.body) while ( node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], ast.If) ): node = node.orelse[0] self.write_indentation() self.write("elif ") self.visit(node.test) self.write(":") self.write_newline() self.write_suite(node.body) self.write_else(node.orelse) def write_else(self, orelse: Sequence[ast.AST]) -> None: if orelse: self.write_indentation() self.write("else:") self.write_newline() self.write_suite(orelse) def visit_With(self, node: Union[ast.With, ast.AsyncWith]) -> None: self.write_indentation() if isinstance(node, ast.AsyncWith): self.write("async ") self.write("with ") self.write_expression_list(node.items, allow_newlines=False) self.write(":") self.write_newline() self.write_suite(node.body) visit_AsyncWith = visit_With def visit_withitem(self, node: ast.withitem) -> None: self.visit(node.context_expr) if node.optional_vars: self.write(" as ") self.visit(node.optional_vars) def visit_Try(self, node: Union[ast.Try, "ast.TryStar"]) -> None: self.write_indentation() self.write("try:") self.write_newline() self.write_suite(node.body) is_trystar = sys.version_info >= (3, 11) and isinstance(node, ast.TryStar) for handler in node.handlers: self.visit_ExceptHandler(handler, is_trystar=is_trystar) self.write_else(node.orelse) if node.finalbody: self.write_finalbody(node.finalbody) visit_TryStar = visit_Try def write_finalbody(self, body: Sequence[ast.AST]) -> None: self.write_indentation() self.write("finally:") self.write_newline() self.write_suite(body) # One-line statements def visit_Return(self, node: ast.Return) -> None: self.write_indentation() self.write("return") if node.value: self.write(" ") self.visit(node.value) self.write_newline() def visit_Delete(self, node: ast.Delete) -> None: self.write_indentation() self.write("del ") self.write_expression_list(node.targets, allow_newlines=False) self.write_newline() def visit_Assign(self, node: ast.Assign) -> None: self.write_indentation() self.write_expression_list(node.targets, separator=" = ", allow_newlines=False) self.write(" = ") self.visit(node.value) self.write_newline() def visit_AugAssign(self, node: ast.AugAssign) -> None: self.write_indentation() self.visit(node.target) self.write(" ") self.visit(node.op) self.write("= ") self.visit(node.value) self.write_newline() if sys.version_info >= (3, 12): def visit_TypeAlias(self, node: ast.TypeAlias) -> None: self.write_indentation() self.write("type ") self.visit(node.name) if node.type_params: self.write("[") self.write_expression_list(node.type_params) self.write("]") self.write(" = ") self.visit(node.value) self.write_newline() def visit_TypeVar(self, node: ast.TypeVar) -> None: self.write(node.name) if node.bound: self.write(": ") self.visit(node.bound) if sys.version_info >= (3, 13) and node.default_value: self.write(" = ") self.visit(node.default_value) def visit_TypeVarTuple(self, node: ast.TypeVarTuple) -> None: self.write("*") self.write(node.name) if sys.version_info >= (3, 13) and node.default_value: self.write(" = ") self.visit(node.default_value) def visit_ParamSpec(self, node: ast.ParamSpec) -> None: self.write("**") self.write(node.name) if sys.version_info >= (3, 13) and node.default_value: self.write(" = ") self.visit(node.default_value) def visit_AnnAssign(self, node: ast.AnnAssign) -> None: self.write_indentation() if not node.simple: self.write("(") self.visit(node.target) if not node.simple: self.write(")") self.write(": ") self.visit(node.annotation) if node.value is not None: self.write(" = ") self.visit(node.value) self.write_newline() def visit_Raise(self, node: ast.Raise) -> None: self.write_indentation() self.write("raise") if node.exc is not None: self.write(" ") self.visit(node.exc) if node.cause is not None: self.write(" from ") self.visit(node.cause) self.write_newline() def visit_Assert(self, node: ast.Assert) -> None: self.write_indentation() self.write("assert ") self.visit(node.test) if node.msg: self.write(", ") self.visit(node.msg) self.write_newline() def visit_Import(self, node: ast.Import) -> None: self.write_indentation() self.write("import ") self.write_expression_list(node.names, allow_newlines=False) self.write_newline() def visit_ImportFrom(self, node: ast.ImportFrom) -> None: self.write_indentation() dots = "." * (node.level or 0) self.write(f"from {dots}") if node.module: self.write(node.module) self.write(" import ") self.write_expression_list(node.names) self.write_newline() def visit_Global(self, node: ast.Global) -> None: self.write_indentation() self.write(f"global {', '.join(node.names)}") self.write_newline() def visit_Nonlocal(self, node: ast.Nonlocal) -> None: self.write_indentation() self.write(f"nonlocal {', '.join(node.names)}") self.write_newline() def visit_Expr(self, node: ast.Expr) -> None: self.write_indentation() self.visit(node.value) self.write_newline() def visit_Pass(self, node: ast.Pass) -> None: self.write_indentation() self.write("pass") self.write_newline() def visit_Break(self, node: ast.Break) -> None: self.write_indentation() self.write("break") self.write_newline() def visit_Continue(self, node: ast.Continue) -> None: self.write_indentation() self.write("continue") self.write_newline() # Expressions def visit_BoolOp(self, node: ast.BoolOp) -> None: my_prec = self.precedence_of_node(node) parent_prec = self.precedence_of_node(self.get_parent_node()) with self.parenthesize_if(my_prec <= parent_prec): op = "and" if isinstance(node.op, ast.And) else "or" self.write_expression_list( node.values, separator=f" {op} ", final_separator_if_multiline=False ) def visit_BinOp(self, node: ast.BinOp) -> None: parent_node = self.get_parent_node() my_prec = self.precedence_of_node(node) parent_prec = self.precedence_of_node(parent_node) if my_prec < parent_prec: should_parenthesize = True elif my_prec == parent_prec and isinstance(parent_node, ast.BinOp): if isinstance(node.op, ast.Pow): should_parenthesize = node == parent_node.left else: should_parenthesize = node == parent_node.right else: should_parenthesize = False with self.parenthesize_if(should_parenthesize): self.visit(node.left) self.write(" ") self.visit(node.op) self.write(" ") self.visit(node.right) def visit_UnaryOp(self, node: ast.UnaryOp) -> None: my_prec = self.precedence_of_node(node) parent_prec = self.precedence_of_node(self.get_parent_node()) with self.parenthesize_if(my_prec < parent_prec): self.visit(node.op) self.visit(node.operand) def visit_Lambda(self, node: ast.Lambda) -> None: parent_node = self.get_parent_node() should_parenthesize = isinstance( parent_node, ( ast.BinOp, ast.UnaryOp, ast.Compare, ast.IfExp, ast.Attribute, ast.Subscript, ast.Call, ast.BoolOp, ), ) or ( # Parens are required in 3.9+, but let's just always add them. isinstance(parent_node, ast.comprehension) and node in parent_node.ifs ) with self.parenthesize_if(should_parenthesize): self.write("lambda") if node.args.args or node.args.vararg or node.args.kwarg: self.write(" ") self.visit(node.args) self.write(": ") self.visit(node.body) def visit_NamedExpr(self, node: "ast.NamedExpr") -> None: self.write("(") self.visit(node.target) self.write(" := ") # := has the lowest precedence, so we should never need to parenthesize this self.visit(node.value) self.write(")") def visit_IfExp(self, node: ast.IfExp) -> None: parent_node = self.get_parent_node() if isinstance( parent_node, ( ast.BinOp, ast.UnaryOp, ast.Compare, ast.Attribute, ast.Subscript, ast.Call, ast.BoolOp, ast.comprehension, ), ): should_parenthesize = True elif isinstance(parent_node, ast.IfExp) and ( node is parent_node.test or node is parent_node.body ): should_parenthesize = True else: should_parenthesize = False with self.parenthesize_if(should_parenthesize): self.visit(node.body) self.write(" if ") self.visit(node.test) self.write(" else ") self.visit(node.orelse) def visit_Dict(self, node: ast.Dict) -> None: self.write("{") items = [KeyValuePair(key, value) for key, value in zip(node.keys, node.values)] self.write_expression_list(items, need_parens=False) self.write("}") def visit_KeyValuePair(self, node: KeyValuePair) -> None: if node.key is None: self.write("**") else: self.visit(node.key) self.write(": ") self.visit(node.value) def visit_Set(self, node: ast.Set) -> None: self.write("{") self.write_expression_list(node.elts, need_parens=False) self.write("}") def visit_ListComp(self, node: ast.ListComp) -> None: self.visit_comp(node, "[", "]") def visit_SetComp(self, node: ast.SetComp) -> None: self.visit_comp(node, "{", "}") def visit_DictComp(self, node: ast.DictComp) -> None: self.write("{") elts = [KeyValuePair(node.key, node.value)] + node.generators self.write_expression_list(elts, separator=" ", need_parens=False) self.write("}") def visit_GeneratorExp(self, node: ast.GeneratorExp) -> None: parent_node = self.get_parent_node() # if this is the only argument to a function, omit the extra parentheses if ( isinstance(parent_node, _CallArgs) and len(parent_node.args) == 1 and node == parent_node.args[0] ): start = end = "" else: start = "(" end = ")" self.visit_comp(node, start, end) def visit_comp( self, node: Union[ast.GeneratorExp, ast.ListComp, ast.SetComp], start: str, end: str, ) -> None: self.write(start) self.write_expression_list( [node.elt] + node.generators, separator=" ", need_parens=False ) self.write(end) def visit_Await(self, node: ast.Await) -> None: with self.parenthesize_if( not isinstance( self.get_parent_node(), (ast.Expr, ast.Assign, ast.AugAssign) ) ): self.write("await ") self.visit(node.value) def visit_Yield(self, node: ast.Yield) -> None: with self.parenthesize_if( not isinstance( self.get_parent_node(), (ast.Expr, ast.Assign, ast.AugAssign) ) ): self.write("yield") if node.value: self.write(" ") self.visit(node.value) def visit_YieldFrom(self, node: ast.YieldFrom) -> None: with self.parenthesize_if( not isinstance( self.get_parent_node(), (ast.Expr, ast.Assign, ast.AugAssign) ) ): self.write("yield from ") self.visit(node.value) def visit_Compare(self, node: ast.Compare) -> None: my_prec = self.precedence_of_node(node) parent_prec = self.precedence_of_node(self.get_parent_node()) with self.parenthesize_if(my_prec <= parent_prec): self.visit(node.left) for op, expr in zip(node.ops, node.comparators): self.write(" ") self.visit(op) self.write(" ") self.visit(expr) def visit_Call(self, node: ast.Call) -> None: self.visit(node.func) self.write("(") args = node.args + node.keywords self.node_stack.append(_CallArgs(args)) try: if args: self.write_expression_list( args, need_parens=False, final_separator_if_multiline=False, # it's illegal after *args and **kwargs ) self.write(")") finally: self.node_stack.pop() def visit_StarArg(self, node: StarArg) -> None: self.write("*") self.visit(node.arg) def visit_DoubleStarArg(self, node: DoubleStarArg) -> None: self.write("**") self.visit(node.arg) def visit_KeywordArg(self, node: KeywordArg) -> None: self.visit(node.arg) if node.value is not None: self.write("=") self.visit(node.value) def write_number(self, number: Union[int, float, complex]) -> None: should_parenthesize = ( isinstance(number, int) and number >= 0 and isinstance(self.get_parent_node(), ast.Attribute) ) if not should_parenthesize: should_parenthesize = ( isinstance(number, complex) and number.real == 0.0 and (number.imag < 0 or number.imag == -0.0) ) with self.parenthesize_if(should_parenthesize): if isinstance(number, float) and math.isinf(number): # otherwise we write inf, which won't be parsed back right # I don't know of any way to write nan with a literal self.write("1e1000" if number > 0 else "-1e1000") elif isinstance(number, complex) and cmath.isinf(number): self.write("1e1000j" if number.imag > 0 else "-1e1000j") elif isinstance(number, (int, float)) and number < 0: # needed for precedence to work correctly me = self.node_stack.pop() if isinstance(number, int): val = str(-number) else: val = repr(type(number)(-number)) # - of long may be int self.visit( ast.UnaryOp(op=ast.USub(), operand=ast.Name(id=val, ctx=ast.Load())) ) self.node_stack.append(me) else: self.write(repr(number)) def write_string(self, string_value: str, kind: Optional[str] = None) -> None: if kind is not None: self.write(kind) if isinstance(self.get_parent_node(), ast.Expr) and '"""' not in string_value: self.write('"""') s = string_value.encode("unicode-escape").decode("ascii") s = s.replace("\\n", "\n") self.write(s) self.write('"""') return if self.has_parent_of_type(ast.FormattedValue): delimiter = '"' else: delimiter = "'" self.write(delimiter) s = string_value.encode("unicode-escape").decode("ascii") s = s.replace(delimiter, "\\" + delimiter) self.write(s) self.write(delimiter) def visit_FormattedValue(self, node: ast.FormattedValue) -> None: has_parent = isinstance(self.get_parent_node(), ast.JoinedStr) with self.f_literalise_if(not has_parent): self.write("{") if isinstance(node.value, ast.JoinedStr): raise NotImplementedError( "ast_decompiler does not support nested f-strings yet" ) add_space = isinstance( node.value, (ast.Set, ast.Dict, ast.SetComp, ast.DictComp) ) if add_space: self.write(" ") self.visit(node.value) if node.conversion != -1: self.write(f"!{chr(node.conversion)}") if node.format_spec is not None: self.write(":") if isinstance(node.format_spec, ast.JoinedStr): self.visit(node.format_spec) elif isinstance(node.format_spec, ast.Constant) and isinstance( node.format_spec.value, str ): self.write(node.format_spec.value) else: raise TypeError( f"format spec must be a string, not {node.format_spec}" ) if add_space: self.write(" ") self.write("}") def visit_JoinedStr(self, node: ast.JoinedStr) -> None: has_parent = isinstance(self.get_parent_node(), ast.FormattedValue) with self.f_literalise_if(not has_parent): for value in node.values: if isinstance(value, ast.Constant) and isinstance(value.value, str): # always escape ' self.write( value.value.encode("unicode-escape") .decode("ascii") .replace("'", r"\'") .replace("{", "{{") .replace("}", "}}") ) else: self.visit(value) def visit_Constant(self, node: ast.Constant) -> None: if isinstance(node.value, str): kind = node.kind else: kind = None self.write_constant(node.value, kind) def write_constant(self, value: object, kind: Optional[str] = None) -> None: if value is Ellipsis: self.write("...") elif isinstance(value, str): self.write_string(value, kind) elif isinstance(value, bytes): self.write(repr(value)) elif isinstance(value, (int, float, complex)): self.write_number(value) elif isinstance(value, (bool, type(None))): self.write(repr(value)) else: raise NotImplementedError(repr(value)) def visit_Attribute(self, node: ast.Attribute) -> None: self.visit(node.value) self.write(f".{node.attr}") def visit_Subscript(self, node: ast.Subscript) -> None: self.visit(node.value) self.write("[") self.visit(node.slice) self.write("]") def visit_Starred(self, node: ast.Starred) -> None: # TODO precedence self.write("*") self.visit(node.value) def visit_Name(self, node: ast.Name) -> None: self.write(node.id) def visit_List(self, node: ast.List) -> None: self.write("[") self.write_expression_list(node.elts, need_parens=False) self.write("]") def visit_Tuple(self, node: ast.Tuple) -> None: if not node.elts: self.write("()") else: parent_node = self.get_parent_node() allow_parens = True should_parenthesize = not isinstance( parent_node, (ast.Expr, ast.Assign, ast.AugAssign, ast.Return, ast.Yield, ast.Index), ) if ( isinstance(parent_node, ast.comprehension) and node is parent_node.target ): should_parenthesize = False # Only relevant on 3.9+, where the ExtSlice class no longer exists. if isinstance(parent_node, ast.Subscript) and node is parent_node.slice: should_parenthesize = False allow_parens = False # https://bugs.python.org/issue32117 if ( isinstance(parent_node, (ast.Return, ast.Yield)) and any(isinstance(elt, ast.Starred) for elt in node.elts) and sys.version_info < (3, 8) ): should_parenthesize = True with self.parenthesize_if(should_parenthesize): if len(node.elts) == 1: self.visit(node.elts[0]) self.write(",") else: self.write_expression_list( node.elts, need_parens=allow_parens and not should_parenthesize ) # slice def visit_Slice(self, node: ast.Slice) -> None: if node.lower: self.visit(node.lower) self.write(":") if node.upper: self.visit(node.upper) if node.step: self.write(":") self.visit(node.step) if sys.version_info < (3, 9): # Any to avoid version-dependent errors from pyanalyze. def visit_ExtSlice(self, node: Any) -> None: if len(node.dims) == 1: self.visit(node.dims[0]) self.write(",") else: self.write_expression_list(node.dims, need_parens=False) def visit_Index(self, node: Any) -> None: self.visit(node.value) # operators for op, string in _OP_TO_STR.items(): exec(f"def visit_{op.__name__}(self, node): self.write({string!r})") # Other types visit_Load = visit_Store = visit_Del = visit_AugLoad = visit_AugStore = ( visit_Param ) = lambda self, node: None def visit_comprehension(self, node: ast.comprehension) -> None: if node.is_async: self.write("async ") self.write("for ") self.visit(node.target) self.write(" in ") self.visit(node.iter) for expr in node.ifs: self.write(" if ") self.visit(expr) def visit_ExceptHandler( self, node: ast.ExceptHandler, *, is_trystar: bool = False ) -> None: self.write_indentation() self.write("except") if is_trystar: self.write("*") if node.type: self.write(" ") self.visit(node.type) if node.name: self.write(" as ") self.write(node.name) self.write(":") self.write_newline() self.write_suite(node.body) def visit_arguments(self, node: ast.arguments) -> None: args = [] if node.posonlyargs: args += node.posonlyargs args.append(ast.Name(id="/", ctx=ast.Load())) num_defaults = len(node.defaults) if num_defaults: args += node.args[:-num_defaults] default_args = zip(node.args[-num_defaults:], node.defaults) else: args += list(node.args) default_args = [] for name, value in default_args: args.append(KeywordArg(name, value)) if node.vararg: args.append(StarArg(node.vararg)) # TODO write a * if there are kwonly args but no vararg if node.kw_defaults: if node.kwonlyargs and not node.vararg: args.append(StarArg(ast.arg(arg="", annotation=None))) num_kwarg_defaults = len(node.kw_defaults) if num_kwarg_defaults: args += node.kwonlyargs[:-num_kwarg_defaults] default_kwargs = zip( node.kwonlyargs[-num_kwarg_defaults:], node.kw_defaults ) else: args += node.kwonlyargs default_kwargs = [] for name, value in default_kwargs: args.append(KeywordArg(name, value)) if node.kwarg: args.append(DoubleStarArg(node.kwarg)) if args: # lambdas can't have a multiline arglist allow_newlines = not isinstance(self.get_parent_node(), ast.Lambda) self.write_expression_list( args, allow_newlines=allow_newlines, need_parens=False, final_separator_if_multiline=False, # illegal after **kwargs ) def visit_arg(self, node: ast.arg) -> None: self.write(node.arg) if node.annotation: self.write(": ") # TODO precedence self.visit(node.annotation) def visit_keyword(self, node: ast.keyword) -> None: if node.arg is None: # in py3, **kwargs is a keyword whose arg is None self.write("**") else: self.write(node.arg + "=") self.visit(node.value) def visit_alias(self, node: ast.alias) -> None: self.write(node.name) if node.asname is not None: self.write(f" as {node.asname}") def visit_Match(self, node: "ast.Match") -> None: self.write_indentation() self.write("match ") self.visit(node.subject) self.write(":") self.write_newline() self.write_suite(node.cases) def visit_match_case(self, node: "ast.match_case") -> None: self.write_indentation() self.write("case ") self.visit(node.pattern) if node.guard is not None: self.write(" if ") self.visit(node.guard) self.write(":") self.write_newline() self.write_suite(node.body) def visit_MatchValue(self, node: "ast.MatchValue") -> None: self.visit(node.value) def visit_MatchSingleton(self, node: "ast.MatchSingleton") -> None: self.write_constant(node.value) def visit_MatchSequence(self, node: "ast.MatchSequence") -> None: self.write("[") self.write_expression_list(node.patterns, need_parens=False) self.write("]") def visit_MatchMapping(self, node: "ast.MatchMapping") -> None: self.write("{") items = [ KeyValuePair(key, value) for key, value in zip(node.keys, node.patterns) ] self.write_expression_list(items, need_parens=False) if node.rest is not None: if node.keys: self.write(", ") self.write(f"**{node.rest}") self.write("}") def visit_MatchClass(self, node: "ast.MatchClass") -> None: self.visit(node.cls) self.write("(") self.write_expression_list(node.patterns, need_parens=False) for i, (attr, pattern) in enumerate(zip(node.kwd_attrs, node.kwd_patterns)): if i > 0 or node.patterns: self.write(", ") self.write(attr) self.write("=") self.visit(pattern) self.write(")") def visit_MatchAs(self, node: "ast.MatchAs") -> None: if node.pattern is None: if node.name is None: self.write("_") else: self.write(node.name) else: parent_node = self.get_parent_node() with self.parenthesize_if( isinstance(parent_node, (ast.MatchOr, ast.MatchAs)) ): self.visit(node.pattern) self.write(" as ") self.write(node.name) def visit_MatchOr(self, node: "ast.MatchOr") -> None: parent_node = self.get_parent_node() with self.parenthesize_if(isinstance(parent_node, ast.MatchOr)): self.write_expression_list( node.patterns, need_parens=False, separator=" | " ) def visit_MatchStar(self, node: "ast.MatchStar") -> None: self.write("*") if node.name is None: self.write("_") else: self.write(node.name) ast_decompiler-0.8.0/ast_decompiler/py.typed000066400000000000000000000000001462163703500211770ustar00rootroot00000000000000ast_decompiler-0.8.0/fuzz.py000066400000000000000000000037371462163703500161020ustar00rootroot00000000000000"""Property-based tests for ast_decompiler, based on those for Black. By Zac Hatfield-Dodds, based on my Hypothesmith tool for source code generation. You can run this file with `python`, `pytest`, or (soon) a coverage-guided fuzzer I'm working on. """ import hypothesmith from hypothesis import HealthCheck, given, settings try: import atheris except ImportError: from ast_decompiler.check import check else: with atheris.instrument_imports(): from ast_decompiler.check import check # This test uses the Hypothesis and Hypothesmith libraries to generate random # syntatically-valid Python source code and run Black in odd modes. @settings( max_examples=1000, # roughly 1k tests/minute, or half that under coverage deadline=None, # ignore Hypothesis' health checks; we already know that suppress_health_check=HealthCheck.all(), # this is slow and filter-heavy. ) @given( # Note that while Hypothesmith might generate code unlike that written by # humans, it's a general test that should pass for any *valid* source code. # (so e.g. running it against code scraped of the internet might also help) src_contents=hypothesmith.from_grammar() | hypothesmith.from_node() ) def test_idempotent_any_syntatically_valid_python(src_contents: str) -> None: # Before starting, let's confirm that the input string is valid Python: compile(src_contents, "", "exec") # else the bug is in hypothesmith check(src_contents) if __name__ == "__main__": # Run tests, including shrinking and reporting any known failures. test_idempotent_any_syntatically_valid_python() # If Atheris is available, run coverage-guided fuzzing. # (if you want only bounded fuzzing, just use `pytest fuzz.py`) try: import sys import atheris except ImportError: pass else: test = test_idempotent_any_syntatically_valid_python atheris.Setup(sys.argv, test.hypothesis.fuzz_one_input) atheris.Fuzz() ast_decompiler-0.8.0/pyproject.toml000066400000000000000000000034301462163703500174340ustar00rootroot00000000000000# Build system requirements. [build-system] requires = ["flit_core >=3.4,<4"] build-backend = "flit_core.buildapi" # Project metadata [project] name = "ast_decompiler" version = "0.8.0" description = "Python module to decompile AST to Python code" readme = "README.rst" requires-python = ">=3.8" urls.Home = "https://github.com/JelleZijlstra/ast_decompiler" license.file = "LICENSE" keywords = ["ast", "decompiler"] # Classifiers list: https://pypi.org/classifiers/ classifiers = [ "Environment :: Console", "Intended Audience :: Developers", "License :: OSI Approved :: Python Software Foundation License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Topic :: Software Development", ] # Project metadata -- authors. Flit stores this as a list of dicts, so it can't # be inline above. [[project.authors]] name = "Jelle Zijlstra" email = "jelle.zijlstra@gmail.com" [tool.flit.sdist] include = ["CHANGELOG", "README.rst", "*/test*.py"] exclude = [] [tool.pyanalyze] paths = ["ast_decompiler", "tests"] import_paths = ["."] possibly_undefined_name = true use_fstrings = true missing_return_annotation = true missing_parameter_annotation = true unused_variable = true value_always_true = true suggested_parameter_type = true suggested_return_type = true incompatible_override = true [tool.black] target_version = ['py36'] skip-magic-trailing-comma = true preview = true [tool.pytest.ini_options] filterwarnings = [ "error", ] ast_decompiler-0.8.0/test_requirements.txt000066400000000000000000000000071462163703500210400ustar00rootroot00000000000000pytest ast_decompiler-0.8.0/tests/000077500000000000000000000000001462163703500156625ustar00rootroot00000000000000ast_decompiler-0.8.0/tests/__init__.py000066400000000000000000000000001462163703500177610ustar00rootroot00000000000000ast_decompiler-0.8.0/tests/test_basic.py000066400000000000000000000156761462163703500203730ustar00rootroot00000000000000import ast from ast_decompiler import decompile from .tests import assert_decompiles, check def test_non_module() -> None: assert "3" == decompile(ast.Constant(value=3)) assert "1 + 1" == decompile( ast.BinOp(op=ast.Add(), left=ast.Constant(value=1), right=ast.Constant(value=1)) ) def test_FunctionDef() -> None: check( """ @foo def bar(x): pass """ ) check("def foo(): pass") check("def foo(a): pass") check("def foo(a, b): pass") check("def foo(a, b=3): pass") check("def foo(a, b, *args): pass") check("def foo(a, b, *args, **kwargs): pass") check("def foo(a, b=3, **kwargs): pass") def test_ClassDef() -> None: check( """ @foo class Bar(object): pass """ ) check("class Bar: pass") check("class Bar(object): pass") check("class Bar(int, str): pass") def test_Return() -> None: check("def foo(): return") check("def foo(): return 3") def test_Delete() -> None: check("del a") check("del a, b") check("del a, b[c]") def test_Assign() -> None: check("x = 3") check("x = y = 3") def test_AugAssign() -> None: check("x += 3") check("y *= 5") def test_For() -> None: check("for x in y: pass") check( """ for x in y: pass else: z = 3 """ ) def test_While() -> None: check("while foo: pass") check( """ while foo: break else: 3 """ ) def test_If() -> None: check("if x: pass") check( """ if x: pass else: pass """ ) check( """ if x: pass elif y: pass else: pass """ ) def test_With() -> None: check("with x: pass") check("with x as y: pass") check("with x as y, a as b: pass") check("with x as y, a: pass") check( """ with x as y: with a as b: with c as d: pass """ ) def test_Raise() -> None: check("raise") check("raise e") def test_TryExcept() -> None: check( """ try: 1/0 except: pass else: pass """ ) check( """ try: 1/0 except: pass """ ) check( """ try: 1/0 except Exception: pass """ ) check( """ try: 1/0 except Exception as e: pass """ ) check( """ try: 1/0 except (Exception, KeyboardInterrupt): pass """ ) def test_TryFinally() -> None: check( """ try: 1/0 finally: leave() """ ) def test_Assert() -> None: check("assert False") check('assert False, "hello"') def test_Import() -> None: check("import x") check("import x as y") check("import x, y") check("import x as y, z") def test_ImportFrom() -> None: check("from . import foo") check("from .foo import bar") check("from foo import bar") check("from ....... import bar as foo") def test_Global() -> None: check("global a") check("global a, b") def test_Expr() -> None: check("call()") def test_Pass() -> None: check("pass") def test_Break() -> None: check("while True: break") def test_Continue() -> None: check("while True: continue") def test_BoolOp() -> None: check("x and y") check("x and y and z") check("x or y") check("x or y or z") check("x and (y or z)") check("(x and y) or z") def test_Binop() -> None: check("x + y") check("x / y") check("x in y") def test_UnaryOp() -> None: check("not x") check("+x") check("-1") check("-(-1)") check("-(1+1j)") assert "-1\n" == decompile(ast.parse("-1")) def test_Lambda() -> None: check("lambda: None") check("lambda x: None") check("lambda x: x ** x") check("[x for x in y if (lambda: x)]") def test_IfExp() -> None: check("x if y else z") def test_Dict() -> None: check("{}") check("{1: 2}") check("{1: 2, 3: 4}") check("{**x, **y, 1: 2}") def test_Set() -> None: check("{1}") check("{1, 2}") def test_ListComp() -> None: check("[x for x in y]") check("[x for x in y if z]") check("[x for x in y for z in a]") assert "[a for a, b in x]\n" == decompile(ast.parse("[a for a, b in x]")) def test_SetComp() -> None: check("{x for x in y}") check("{x for x in y if z}") check("{x for x in y for z in a}") def test_DictComp() -> None: check("{x: y for x in y}") check("{x: z for x in y if z}") check("{x: a for x in y for z in a}") def test_GeneratorExp() -> None: check("(x for x in y)") check("(x for x in y if z)") check("(x for x in y for z in a)") check("f(x for x in y)") assert "f(x for x in y)\n" == decompile(ast.parse("f(x for x in y)")) def test_Yield() -> None: check("def f(): yield") check("def f(): yield 3") check("def f(): x = yield 3") def test_Compare() -> None: check("x < y") check("x > y < z") check("x == y > z") def test_Call() -> None: check("f()") check("f(1)") check("f(1, x=2)") check("f(*args, **kwargs)") check("f(foo, *args, **kwargs)") def test_Num() -> None: check("1") check("1.0") check("1.0e10") check("1+2j") check("-2147483648") # previously had a bug that made us add L check("2147483648") check("1e1000") # check that we don't turn it info inf check("-1e1000") check("1E+12_7_3J") check("-1E+12_7_3J") check("-(1)") assert_decompiles("-42", "-42\n") assert_decompiles("-42.35", "-42.35\n") assert_decompiles("3j", "3j\n") assert_decompiles("-3j", "-3j\n") assert_decompiles("1 + 3j", "1 + 3j\n") assert_decompiles("-1-42j", "-1 - 42j\n") assert_decompiles("-(1-42j)", "-(1 - 42j)\n") def test_Str() -> None: check('"foo"') check('u"foo"') check('"foo\\"bar"') check( """from __future__ import unicode_literals b'foo' """ ) check('"a\\nb"') assert_decompiles( '''def f(): """Doc. String. """ ''', ''' def f(): """Doc. String. """ ''', ) check('''def f(): "a\\rb"''') def test_Attribute() -> None: check("a.b") check("(1).b") check("(-0j).b") def test_Subscript() -> None: check("x[y]") check("(-0j)[y]") check("x[y]") check("Callable[[P, Iterator], T]") assert_decompiles("Union[str, int]", "Union[str, int]\n") def test_Name() -> None: check("x") def test_List() -> None: check("[]") check("[a]") check("[a, b]") def test_Tuple() -> None: check("()") check("(a,)") check("(a, b)") def test_Slice() -> None: check("x[:]") check("x[1:]") check("x[:1]") check("x[1::-1]") def test_ExtSlice() -> None: check("x[:, :]") check("x[1:, :1]") check("x[1:,]") def test_Ellipsis() -> None: # one of these generates an Index ast node and the other one doesn't check("self[...]") check("self[Ellipsis]") check("self[..., a]") check("self[Ellipsis, a]") def test_files() -> None: with open("ast_decompiler/decompiler.py") as f: code = f.read() check(code) ast_decompiler-0.8.0/tests/test_indentation.py000066400000000000000000000005361462163703500216130ustar00rootroot00000000000000from .tests import assert_decompiles def test_indentation() -> None: assert_decompiles( """ if x: pass """, """if x: pass """, indentation=1, ) def test_starting_indentation() -> None: assert_decompiles( """3""", """ 3 """, starting_indentation=4, do_check=False, ) ast_decompiler-0.8.0/tests/test_line_length.py000066400000000000000000000054761462163703500215770ustar00rootroot00000000000000from .tests import assert_decompiles def check_split(original: str, multiline: str, length_reduction: int = 2) -> None: assert_decompiles(original, original, line_length=len(original)) assert_decompiles( original, multiline, line_length=len(original.strip()) - length_reduction ) def test_with_prefix() -> None: prefixes = ["from x import"] for prefix in prefixes: check_split( f"{prefix} a, b, c\n", f"""{prefix} ( a, b, c, ) """, ) def test_del() -> None: original = "del a, b, c\n" check_split(original, original, length_reduction=10) def test_import() -> None: original = "import a, b, c, d\n" check_split(original, original, length_reduction=10) def test_global() -> None: original = "global a, b, c, d\n" check_split(original, original, length_reduction=10) def test_boolop() -> None: check_split( "if a and b and c:\n pass\n", """if ( a and b and c ): pass """, length_reduction=12, ) def test_display() -> None: delimiters = [("{", "}"), ("[", "]"), ("\n\nclass Foo(", "):\n pass")] for start, end in delimiters: original = f"{start}a, b, c{end}\n" assert_decompiles(original, original, line_length=len(original)) assert_decompiles( original, f"""{start} a, b, c, {end} """, line_length=len(start.lstrip()) + 5, ) def test_assign() -> None: check_split( "a, b, c = lst\n", """( a, b, c, ) = lst """, length_reduction=7, ) original = "a = b = c = 3\n" check_split(original, original, length_reduction=3) def test_tuple() -> None: check_split( "a, b, c\n", """( a, b, c, ) """, ) def test_extslice() -> None: check_split( "d[a:, b, c]\n", """d[ a:, b, c, ] """, ) def test_comprehension() -> None: check_split( "[x for y in lst for x in y]\n", """[ x for y in lst for x in y ] """, ) def test_dict() -> None: check_split( "{a: b, c: d}\n", """{ a: b, c: d, } """, ) def test_dictcomp() -> None: check_split( "{a: b for a, b in c}\n", """{ a: b for a, b in c } """, ) def test_function_def() -> None: check_split( "\ndef f(a, b, *args, **kwargs):\n pass\n", """ def f( a, b, *args, **kwargs ): pass """, length_reduction=12, ) def test_call() -> None: check_split( "f(a, b, **c)\n", """f( a, b, **c ) """, ) def test_nesting() -> None: check_split( "f(f(a, b, c), g(d, e, f))\n", """f( f(a, b, c), g(d, e, f) ) """, length_reduction=9, ) ast_decompiler-0.8.0/tests/test_literal.py000066400000000000000000000017401462163703500207310ustar00rootroot00000000000000from .tests import assert_decompiles def test_With() -> None: assert_decompiles( "with x as y, a as b: pass", """with x as y, a as b: pass """, ) def test_TryFinally() -> None: assert_decompiles( """ try: 1 / 0 except Exception as e: pass else: z = 3 finally: z = 4 """, """try: 1 / 0 except Exception as e: pass else: z = 3 finally: z = 4 """, ) def test_If() -> None: assert_decompiles( """ if x: pass else: if y: pass else: pass """, """if x: pass elif y: pass else: pass """, ) assert_decompiles( """ if x: pass elif a: if y: pass else: if z: pass else: pass else: pass """, """if x: pass elif a: if y: pass elif z: pass else: pass else: pass """, ) def test_BinOp() -> None: assert_decompiles( """ f(a * b) """, """f(a * b) """, ) ast_decompiler-0.8.0/tests/test_patma.py000066400000000000000000000014211462163703500203730ustar00rootroot00000000000000from .tests import check, skip_before @skip_before((3, 10)) def test_patma() -> None: check( """ match x: case y | z: pass case a(b, c, z=3) if whatever: pass case [1, 2, 3] if y if x else z: pass case [1, 2, 3, *_]: pass case [1, 2, 3, *rest]: pass case {"x": y, "z": 3}: pass case y: pass case 3: pass case -1: pass case _: pass """ ) @skip_before((3, 10)) def test_precedence() -> None: check( """ match x: case (y | z) as a: pass case y | (z as a): pass case (y as z) as a: pass case y | z as a: pass case y | (z | a): pass case (y | z) | a: pass """ ) ast_decompiler-0.8.0/tests/test_pep695.py000066400000000000000000000007101462163703500203210ustar00rootroot00000000000000from .tests import check, skip_before @skip_before((3, 12)) def test_generic_class() -> None: check( """ class C[T: int, *Ts, **P]: pass """ ) @skip_before((3, 12)) def test_generic_function() -> None: check( """ def f[T: int, *Ts, **P](a: T, b: Ts, c: P) -> None: pass """ ) @skip_before((3, 12)) def test_type_alias() -> None: check( """ type X = int type Y[T: (int, str), *Ts, *P] = T """ ) ast_decompiler-0.8.0/tests/test_precedence.py000066400000000000000000000034371462163703500213770ustar00rootroot00000000000000from .tests import check def test_Yield() -> None: check("def f(): yield") check("def f(): x = yield 3") check("def f(): return (yield 3)") check("def f(): (yield 3)[f] += 4") check("def f(): (yield 3)[(yield 4):(yield 5):] += yield 6") check("lambda x: (yield x)") check("def f(): (yield a), b") def test_Tuple() -> None: check("def f(): return x, y") check("def f(): yield x, y") check("[(1, 2)]") check("{(1, 2): (3, 4)}") check("[(a, b) for f in (c, d)]") check("(a, b) + 3") check("lambda x: (a, b)") check("x[(1, 2):(3, 4):(5, 6), (7, 8):]") check("()") check("x,") def test_Lambda() -> None: check("lambda x: lambda y: x + y") check("lambda x: y if z else x") check("(lambda x: y) if z else x") check("x or (lambda x: x)") check("1 + (lambda x: x)") def test_IfExp() -> None: check("y if x else a, b") check("(yield y) if (yield x) else (yield a), b") check("y if x else z if a else b") check("y if x else (z if a else b)") check("(y if x else z) if a else b") check("y if (x if z else a) else b") check("(a and b) if c else d") check("a and b if c else d") check("a and (b if c else d)") check("[x for x in (y if z else x)]") def test_BinOp() -> None: check("(a ** b) ** c") check("a ** b ** c") check("a ** (b ** c)") check("(a + b) * c") check("a + b + c") check("(a + b) + c") check("a + (b + c)") check("x * (a or b)") def test_UnaryOp() -> None: check("not not x") check("-(not x)") check("not (-x)") check("(-1) ** x") check("-((-1)**x)") def test_Call() -> None: check("f(a, b)") check("f((a, b))") check("(a, b)(a, b)") check("a.b(c, d)") check("f((yield a), b)") check("f(a, (yield b))") ast_decompiler-0.8.0/tests/test_py3_syntax.py000066400000000000000000000116471462163703500214250ustar00rootroot00000000000000from .tests import check def test_MatMult() -> None: check("a @ b") check("(a * b) @ c") check("a * (b @ c)") check("a + (b @ c)") def test_AsyncFunctionDef() -> None: check( """ async def f(a, b): pass """ ) def test_async_gen() -> None: check( """ async def f(): yield """ ) def test_async_comprehensions() -> None: check( """ async def f(lst): return [await x for x in lst] """ ) check( """ async def f(lst): a = [x async for x in lst] b = {x async for x in lst} c = {x: x async for x in lst} d = (x async for x in lst) """ ) def test_function_annotations() -> None: check( """ def f(a: int, b: str) -> float: pass """ ) def test_class_keywords() -> None: check( """ class Foo(a=3): pass """ ) check( """ class WithMeta(metaclass=type): pass """ ) def test_annotations() -> None: check("def f(a: int, b: int = 0, *args: int, c: int, **kwargs: int): pass") def test_AsyncFor() -> None: check( """ async def f(y): async for x in y: pass """ ) def test_py3_with() -> None: check( """ with a as b: pass """ ) check( """ with a as b, c as d: pass """ ) check( """ with a as b: with c as d: pass """ ) def test_async_with() -> None: check( """ async def f(a): async with a as b: pass """ ) def test_raise_with_cause() -> None: check( """ raise e from ee """ ) def test_Nonlocal() -> None: check( """ def f(x): nonlocal y """ ) check( """ def f(x): nonlocal y, z """ ) def test_Await() -> None: check( """ async def f(x): await x """ ) check( """ async def f(x): 1 + await x """ ) check( """ async def f(x): x = await x """ ) check( """ async def f(x): x += await x """ ) check( """ async def f(x): return 3, (await x) """ ) def test_YieldFrom() -> None: check("yield from x") check("1 + (yield from x)") check("x = yield from x") check("x += yield from x") check("return 3, (yield from x)") def test_FormattedValue() -> None: check('f"a"') check('f"{b}"') check('f"{b}a"') check('f"{b!r}"') check('f"{b:a}"') check('f"{b!r:a}"') check("f\"{'b'!r:'a'}\"") check('f"{a}b{c!a}s"') check('f"{a.b}c{d()}"') check("f'{1/3:.1f}'") check(r"f'{a}\''") check("f'{1/3:{5}.1}'") check("f'{ {a, b, c} }'") check("f'{ {a: b} }'") check("f'{ {a for a in b} }'") check("f'{ {a: b for a, b in c} }'") check(r"f'{a}\n'") check(r"f'{a}\t'") check("f'{a}é'") check('f"{{"') check('f"}}"') check('f"{{{a}"') def test_Bytes() -> None: check('b"a"') def test_NameConstant() -> None: check("True") def test_Starred() -> None: check("a, *b = 3") check("[a, *b]") def test_kwonlyargs() -> None: check("def f(a, *, b): pass") check("def f(a, *args, b): pass") check("def f(a, *, b=3): pass") check("def f(a, *args, b=3): pass") check("def f(a, *args, b=3, **kwargs): pass") def test_annassign() -> None: check("a: int") check("a: int = 3") check("(a): int") check( """ class A: b: int """ ) check( """ def f(): a: int """ ) def test_future_annotations() -> None: # This doesn't really affect ast_decompiler because the __future__ # import doesn't change the AST. check( """ from __future__ import annotations def f(x: int) -> str: pass y: float """ ) def test_async_await_in_fstring() -> None: check("f'{await x}'") check("f'{[x async for x in y]}'") def test_too_many_args() -> None: args = ", ".join(f"x{i}" for i in range(300)) check( """ def f({}): pass f({}) """.format( args, args ) ) def test_finally_continue() -> None: check( """ def f(): for x in y: try: whatever finally: continue """ ) def test_unpacking() -> None: check( """ def parse(family): lastname, *members = family.split() return (lastname.upper(), *members) """ ) def test_unparenthesized_unpacking() -> None: check( """ def parse(family): lastname, *members = family.split() return lastname.upper(), *members """ ) def test_assignment_expression() -> None: # Some of these can be used unparenthesized in 3.10+ but we don't bother. check( """ if (x := 3): pass {(y := 4)} {(z := 5) for a in b} lst[(alpha := 3)] lst[(beta := 4):(gamma := 5)] """ ) def test_positional_only() -> None: check( """ def f(x, /): pass """ ) def test_fstring_debug_specifier() -> None: check("f'{user=} {member_since=}'") check("f'{user=!s} {delta.days=:,d}'") ast_decompiler-0.8.0/tests/tests.py000066400000000000000000000045231462163703500174020ustar00rootroot00000000000000""" Helpers for tests. """ import ast from typing import Any, Tuple, Callable, TypeVar from ast_decompiler import decompile from ast_decompiler.check import check as check import difflib import sys VERSION = sys.version_info.major _CallableT = TypeVar("_CallableT", bound=Callable[..., None]) def assert_decompiles( code: str, result: str, do_check: bool = True, indentation: int = 4, line_length: int = 100, starting_indentation: int = 0, ) -> None: """Asserts that code, when parsed, decompiles into result.""" decompile_result = decompile( ast.parse(code), indentation=indentation, line_length=line_length, starting_indentation=starting_indentation, ) if do_check: check(decompile_result) if result != decompile_result: print(">>> expected") print(result) print(">>> actual") print(decompile_result) print(">>> diff") for line in difflib.unified_diff( result.splitlines(), decompile_result.splitlines() ): print(line) assert False, f"failed to decompile {code}" def only_on_version(py_version: int) -> Callable[[_CallableT], _CallableT]: """Decorator that runs a test only if the Python version matches.""" if py_version != VERSION: def decorator(fn: Callable[..., Any]) -> Callable[..., None]: return lambda *args: None else: def decorator(fn: _CallableT) -> _CallableT: return fn return decorator def skip_before(py_version: Tuple[int, int]) -> Callable[[_CallableT], _CallableT]: """Decorator that skips a test on Python versions before py_version.""" if sys.version_info < py_version: def decorator(fn: Callable[..., Any]) -> Callable[..., None]: return lambda *args: None else: def decorator(fn: _CallableT) -> _CallableT: return fn return decorator def skip_after(py_version: Tuple[int, int]) -> Callable[[_CallableT], _CallableT]: """Decorator that skips a test on Python versions after py_version.""" if sys.version_info > py_version: def decorator(fn: Callable[..., Any]) -> Callable[..., None]: return lambda *args, **kwargs: None else: def decorator(fn: _CallableT) -> _CallableT: return fn return decorator ast_decompiler-0.8.0/tox.ini000066400000000000000000000016211462163703500160330ustar00rootroot00000000000000[tox] minversion=2.3.1 envlist = py38,py39,py310,py311,py312,py313,black isolated_build = True [testenv] deps = black == 24.4.2 pytest commands = pytest tests/ [testenv:black] commands = black --check . [testenv:pyanalyze] deps = pyanalyze == 0.12.0 commands = # Need recent pip for PEP 660-based editable installs. pip install --upgrade pip # Need an editable install so pyanalyze imports the right version # of ast_decompiler. pip install -e . pyanalyze --config-file=pyproject.toml ast_decompiler/ tests/ [testenv:fuzz] skip_install = True deps = # Skip in CI, it runs forever. # atheris coverage hypothesmith commands = pip install -e .[d] coverage erase coverage run fuzz.py coverage report [gh-actions] python = 3.8: py38 3.9: py39 3.10: py310 3.11: py311 3.12: py312, black, pyanalyze 3.13-dev: py313