Coverage for core\test_leoAst.py: 99%
939 statements
« prev ^ index » next coverage.py v6.4, created at 2022-05-24 10:21 -0500
« prev ^ index » next coverage.py v6.4, created at 2022-05-24 10:21 -0500
1# -*- coding: utf-8 -*-
2#@+leo-ver=5-thin
3#@+node:ekr.20210902073413.1: * @file ../unittests/core/test_leoAst.py
4#@@first
5"""Tests of leoAst.py"""
6#@+<< test_leoAst imports >>
7#@+node:ekr.20210902074548.1: ** << test_leoAst imports >>
8import ast
9import os
10import sys
11import textwrap
12import time
13import token as token_module
14from typing import Any, Callable, Dict, List, Tuple
15import unittest
16import warnings
17warnings.simplefilter("ignore")
18# pylint: disable=import-error
19# Third-party.
20try:
21 import asttokens
22except Exception: # pragma: no cover
23 asttokens = None
24try:
25 # Suppress a warning about imp being deprecated.
26 with warnings.catch_warnings():
27 import black
28except Exception: # pragma: no cover
29 black = None
31# pylint: disable=wrong-import-position
32from leo.core import leoGlobals as g
34from leo.core.leoAst import AstNotEqual
35from leo.core.leoAst import Fstringify, Orange
36from leo.core.leoAst import IterativeTokenGenerator
37from leo.core.leoAst import Token, TokenOrderGenerator, TokenOrderTraverser
38from leo.core.leoAst import get_encoding_directive, read_file, strip_BOM
39from leo.core.leoAst import make_tokens, parse_ast, tokens_to_string
40from leo.core.leoAst import dump_ast, dump_contents, dump_tokens, dump_tree, _op_names
41#@-<< test_leoAst imports >>
42v1, v2, junk1, junk2, junk3 = sys.version_info
43py_version = (v1, v2)
44ActionList = List[Tuple[Callable, Any]]
45#@+others
46#@+node:ekr.20200107114620.1: ** functions: unit testing
47#@+node:ekr.20191027072126.1: *3* function: compare_asts & helpers
48def compare_asts(ast1, ast2): # pragma: no cover
49 """Compare two ast trees. Return True if they are equal."""
50 # Compare the two parse trees.
51 try:
52 _compare_asts(ast1, ast2)
53 except AstNotEqual:
54 dump_ast(ast1, tag='AST BEFORE')
55 dump_ast(ast2, tag='AST AFTER')
56 return False
57 except Exception:
58 g.trace("Unexpected exception")
59 g.es_exception()
60 return False
61 return True
62#@+node:ekr.20191027071653.2: *4* function._compare_asts
63def _compare_asts(node1, node2): # pragma: no cover
64 """
65 Compare both nodes, and recursively compare their children.
67 See also: http://stackoverflow.com/questions/3312989/
68 """
69 # Compare the nodes themselves.
70 _compare_nodes(node1, node2)
71 # Get the list of fields.
72 fields1 = getattr(node1, "_fields", []) # type:ignore
73 fields2 = getattr(node2, "_fields", []) # type:ignore
74 if fields1 != fields2:
75 raise AstNotEqual(
76 f"node1._fields: {fields1}\n" f"node2._fields: {fields2}")
77 # Recursively compare each field.
78 for field in fields1:
79 if field not in ('lineno', 'col_offset', 'ctx'):
80 attr1 = getattr(node1, field, None)
81 attr2 = getattr(node2, field, None)
82 if attr1.__class__.__name__ != attr2.__class__.__name__:
83 raise AstNotEqual(f"attrs1: {attr1},\n" f"attrs2: {attr2}")
84 _compare_asts(attr1, attr2)
85#@+node:ekr.20191027071653.3: *4* function._compare_nodes
86def _compare_nodes(node1, node2): # pragma: no cover
87 """
88 Compare node1 and node2.
89 For lists and tuples, compare elements recursively.
90 Raise AstNotEqual if not equal.
91 """
92 # Class names must always match.
93 if node1.__class__.__name__ != node2.__class__.__name__:
94 raise AstNotEqual(
95 f"node1.__class__.__name__: {node1.__class__.__name__}\n"
96 f"node2.__class__.__name__: {node2.__class__.__name_}"
97 )
98 # Special cases for strings and None
99 if node1 is None:
100 return
101 if isinstance(node1, str):
102 if node1 != node2:
103 raise AstNotEqual(f"node1: {node1!r}\n" f"node2: {node2!r}")
104 # Special cases for lists and tuples:
105 if isinstance(node1, (tuple, list)):
106 if len(node1) != len(node2):
107 raise AstNotEqual(f"node1: {node1}\n" f"node2: {node2}")
108 for i, item1 in enumerate(node1):
109 item2 = node2[i]
110 if item1.__class__.__name__ != item2.__class__.__name__:
111 raise AstNotEqual(
112 f"list item1: {i} {item1}\n" f"list item2: {i} {item2}"
113 )
114 _compare_asts(item1, item2)
115#@+node:ekr.20191121081439.1: *3* function: compare_lists
116def compare_lists(list1, list2): # pragma: no cover
117 """
118 Compare two lists of strings, showing the first mismatch.
120 Return the index of the first mismatched lines, or None if identical.
121 """
122 import itertools
123 it = itertools.zip_longest(list1, list2, fillvalue='Missing!')
124 for i, (s1, s2) in enumerate(it):
125 if s1 != s2:
126 return i
127 return None
128#@+node:ekr.20191226071135.1: *3* function: get_time
129def get_time():
130 return time.process_time()
131#@+node:ekr.20220403080350.1: ** Base Test classes
132#@+node:ekr.20191227154302.1: *3* class BaseTest (TestCase)
133class BaseTest(unittest.TestCase):
134 """
135 The base class of all tests of leoAst.py.
137 This class contains only helpers.
138 """
140 # Statistics.
141 counts: Dict[str, int] = {}
142 times: Dict[str, float] = {}
144 # Debugging traces & behavior.
145 # create_links: 'full-traceback'
146 # make_data: 'contents', 'tokens', 'tree',
147 # 'post-tokens', 'post-tree',
148 # 'unit-test'
149 debug_list: List[str] = []
150 link_error: Exception = None
152 #@+others
153 #@+node:ekr.20200110103036.1: *4* BaseTest.adjust_expected
154 def adjust_expected(self, s):
155 """Adjust leading indentation in the expected string s."""
156 return textwrap.dedent(s.lstrip('\\\n')).rstrip() + '\n'
157 #@+node:ekr.20200110092217.1: *4* BaseTest.check_roundtrip
158 def check_roundtrip(self, contents):
159 """Check that the tokenizer round-trips the given contents."""
160 contents, tokens, tree = self.make_data(contents)
161 results = tokens_to_string(tokens)
162 self.assertEqual(contents, results)
163 #@+node:ekr.20191227054856.1: *4* BaseTest.make_data
164 def make_data(self, contents, description=None):
165 """Return (contents, tokens, tree) for the given contents."""
166 contents = contents.lstrip('\\\n')
167 if not contents:
168 return '', None, None # pragma: no cover
169 self.link_error = None
170 t1 = get_time()
171 self.update_counts('characters', len(contents))
172 # Ensure all tests end in exactly one newline.
173 contents = textwrap.dedent(contents).rstrip() + '\n'
174 # Create the TOG instance.
175 self.tog = TokenOrderGenerator()
176 self.tog.filename = description or g.callers(2).split(',')[0]
177 # Pass 0: create the tokens and parse tree
178 tokens = self.make_tokens(contents)
179 if not tokens:
180 self.fail('make_tokens failed') # pragma: no cover
181 tree = self.make_tree(contents)
182 if not tree:
183 self.fail('make_tree failed') # pragma: no cover
184 if 'contents' in self.debug_list:
185 dump_contents(contents) # pragma: no cover
186 if 'ast' in self.debug_list: # pragma: no cover
187 if py_version >= (3, 9):
188 # pylint: disable=unexpected-keyword-arg
189 g.printObj(ast.dump(tree, indent=2), tag='ast.dump')
190 else:
191 g.printObj(ast.dump(tree), tag='ast.dump')
192 if 'tree' in self.debug_list: # Excellent traces for tracking down mysteries.
193 dump_ast(tree) # pragma: no cover
194 if 'tokens' in self.debug_list:
195 dump_tokens(tokens) # pragma: no cover
196 self.balance_tokens(tokens)
197 # Pass 1: create the links.
198 self.create_links(tokens, tree)
199 if 'post-tree' in self.debug_list:
200 dump_tree(tokens, tree) # pragma: no cover
201 if 'post-tokens' in self.debug_list:
202 dump_tokens(tokens) # pragma: no cover
203 t2 = get_time()
204 self.update_times('90: TOTAL', t2 - t1)
205 if self.link_error:
206 self.fail(self.link_error) # pragma: no cover
207 return contents, tokens, tree
208 #@+node:ekr.20191227103533.1: *4* BaseTest.make_file_data
209 def make_file_data(self, filename):
210 """Return (contents, tokens, tree) from the given file."""
211 directory = os.path.dirname(__file__)
212 filename = g.os_path_finalize_join(directory, '..', '..', 'core', filename)
213 assert os.path.exists(filename), repr(filename)
214 contents = read_file(filename)
215 contents, tokens, tree = self.make_data(contents, filename)
216 return contents, tokens, tree
217 #@+node:ekr.20191228101601.1: *4* BaseTest: passes...
218 #@+node:ekr.20191228095945.11: *5* 0.1: BaseTest.make_tokens
219 def make_tokens(self, contents):
220 """
221 BaseTest.make_tokens.
223 Make tokens from contents.
224 """
225 t1 = get_time()
226 # Tokenize.
227 tokens = make_tokens(contents)
228 t2 = get_time()
229 self.update_counts('tokens', len(tokens))
230 self.update_times('01: make-tokens', t2 - t1)
231 return tokens
232 #@+node:ekr.20191228102101.1: *5* 0.2: BaseTest.make_tree
233 def make_tree(self, contents):
234 """
235 BaseTest.make_tree.
237 Return the parse tree for the given contents string.
238 """
239 t1 = get_time()
240 tree = parse_ast(contents)
241 t2 = get_time()
242 self.update_times('02: parse_ast', t2 - t1)
243 return tree
244 #@+node:ekr.20191228185201.1: *5* 0.3: BaseTest.balance_tokens
245 def balance_tokens(self, tokens):
246 """
247 BastTest.balance_tokens.
249 Insert links between corresponding paren tokens.
250 """
251 t1 = get_time()
252 count = self.tog.balance_tokens(tokens)
253 t2 = get_time()
254 self.update_counts('paren-tokens', count)
255 self.update_times('03: balance-tokens', t2 - t1)
256 return count
257 #@+node:ekr.20191228101437.1: *5* 1.1: BaseTest.create_links
258 def create_links(self, tokens, tree, filename='unit test'):
259 """
260 BaseTest.create_links.
262 Insert two-way links between the tokens and ast tree.
263 """
264 tog = self.tog
265 try:
266 t1 = get_time()
267 tog.create_links(tokens, tree)
268 t2 = get_time()
269 self.update_counts('nodes', tog.n_nodes)
270 self.update_times('11: create-links', t2 - t1)
271 except Exception as e: # pragma: no cover
272 if 'full-traceback' in self.debug_list:
273 g.es_exception()
274 # Weird: calling self.fail creates ugly failures.
275 self.link_error = e
276 #@+node:ekr.20191228095945.10: *5* 2.1: BaseTest.fstringify
277 def fstringify(self, contents, tokens, tree, filename=None, silent=False):
278 """
279 BaseTest.fstringify.
280 """
281 t1 = get_time()
282 if not filename:
283 filename = g.callers(1)
284 fs = Fstringify()
285 if silent:
286 fs.silent = True
287 result_s = fs.fstringify(contents, filename, tokens, tree)
288 t2 = get_time()
289 self.update_times('21: fstringify', t2 - t1)
290 return result_s
291 #@+node:ekr.20200107175223.1: *5* 2.2: BaseTest.beautify
292 def beautify(self, contents, tokens, tree, filename=None, max_join_line_length=None, max_split_line_length=None):
293 """
294 BaseTest.beautify.
295 """
296 t1 = get_time()
297 if not contents:
298 return '' # pragma: no cover
299 if not filename:
300 filename = g.callers(2).split(',')[0]
301 orange = Orange()
302 result_s = orange.beautify(contents, filename, tokens, tree,
303 max_join_line_length=max_join_line_length,
304 max_split_line_length=max_split_line_length)
305 t2 = get_time()
306 self.update_times('22: beautify', t2 - t1)
307 self.code_list = orange.code_list
308 return result_s
309 #@+node:ekr.20191228095945.1: *4* BaseTest: stats...
310 # Actions should fail by throwing an exception.
311 #@+node:ekr.20191228095945.12: *5* BaseTest.dump_stats & helpers
312 def dump_stats(self): # pragma: no cover
313 """Show all calculated statistics."""
314 if self.counts or self.times:
315 print('')
316 self.dump_counts()
317 self.dump_times()
318 print('')
319 #@+node:ekr.20191228154757.1: *6* BaseTest.dump_counts
320 def dump_counts(self): # pragma: no cover
321 """Show all calculated counts."""
322 for key, n in self.counts.items():
323 print(f"{key:>16}: {n:>6}")
324 #@+node:ekr.20191228154801.1: *6* BaseTest.dump_times
325 def dump_times(self): # pragma: no cover
326 """
327 Show all calculated times.
329 Keys should start with a priority (sort order) of the form `[0-9][0-9]:`
330 """
331 for key in sorted(self.times):
332 t = self.times.get(key)
333 key2 = key[3:]
334 print(f"{key2:>16}: {t:6.3f} sec.")
335 #@+node:ekr.20191228181624.1: *5* BaseTest.update_counts & update_times
336 def update_counts(self, key, n): # pragma: no cover
337 """Update the count statistic given by key, n."""
338 old_n = self.counts.get(key, 0)
339 self.counts[key] = old_n + n
341 def update_times(self, key, t): # pragma: no cover
342 """Update the timing statistic given by key, t."""
343 old_t = self.times.get(key, 0.0)
344 self.times[key] = old_t + t
345 #@-others
346#@+node:ekr.20191227051737.1: *3* class TestTOG (BaseTest)
347class TestTOG(BaseTest):
348 """
349 Tests for the TokenOrderGenerator class.
351 These tests call BaseTest.make_data, which creates the two-way links
352 between tokens and the parse tree.
354 The asserts in tog.sync_tokens suffice to create strong unit tests.
355 """
357 debug_list = ['unit-test']
359 #@+others
360 #@+node:ekr.20210318213945.1: *4* TestTOG.Recent bugs & features
361 #@+node:ekr.20210321172902.1: *5* test_bug_1851
362 def test_bug_1851(self):
364 contents = r'''\
365 def foo(a1):
366 pass
367 '''
368 contents, tokens, tree = self.make_data(contents)
369 #@+node:ekr.20210914161519.1: *5* test_bug_2171
370 def test_bug_2171(self):
372 if py_version < (3, 9):
373 self.skipTest('Requires Python 3.9') # pragma: no cover
375 contents = "'HEAD:%s' % g.os_path_join( *(relative_path + [filename]) )"
376 contents, tokens, tree = self.make_data(contents)
377 #@+node:ekr.20210318213133.1: *5* test_full_grammar
378 def test_full_grammar(self):
379 # Load py3_test_grammar.py.
380 dir_ = os.path.dirname(__file__)
381 path = os.path.abspath(os.path.join(dir_, '..', 'py3_test_grammar.py'))
382 assert os.path.exists(path), path
383 if py_version < (3, 9):
384 self.skipTest('Requires Python 3.9 or above') # pragma: no cover
385 # Verify that leoAst can parse the file.
386 contents = read_file(path)
387 self.make_data(contents)
388 #@+node:ekr.20210318214057.1: *5* test_line_315
389 def test_line_315(self):
391 #
392 # Known bug: position-only args exist in Python 3.8,
393 # but there is no easy way of syncing them.
394 # This bug will not be fixed.
395 # The workaround is to require Python 3.9
396 if py_version >= (3, 9):
397 contents = '''\
398 f(1, x=2,
399 *[3, 4], y=5)
400 '''
401 elif 1: # Expected order.
402 contents = '''f(1, *[a, 3], x=2, y=5)''' # pragma: no cover
403 else: # Legacy.
404 contents = '''f(a, *args, **kwargs)'''
405 contents, tokens, tree = self.make_data(contents)
406 #@+node:ekr.20210320095504.8: *5* test_line_337
407 def test_line_337(self):
409 if py_version >= (3, 8): # Requires neither line_no nor col_offset fields.
410 contents = '''def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass'''
411 else:
412 contents = '''def f(a, b, d=4, *arg, **keys): pass''' # pragma: no cover
413 contents, tokens, tree = self.make_data(contents)
414 #@+node:ekr.20210320065202.1: *5* test_line_483
415 def test_line_483(self):
417 if py_version < (3, 8):
418 # Python 3.8: https://bugs.python.org/issue32117
419 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") # pragma: no cover
420 contents = '''def g3(): return 1, *return_list'''
421 contents, tokens, tree = self.make_data(contents)
422 #@+node:ekr.20210320065344.1: *5* test_line_494
423 def test_line_494(self):
425 """
426 https://docs.python.org/3/whatsnew/3.8.html#other-language-changes
428 Generalized iterable unpacking in yield and return statements no longer
429 requires enclosing parentheses. This brings the yield and return syntax
430 into better agreement with normal assignment syntax.
431 """
432 if py_version < (3, 8):
433 # Python 3.8: https://bugs.python.org/issue32117
434 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") # pragma: no cover
435 contents = '''def g2(): yield 1, *yield_list'''
436 contents, tokens, tree = self.make_data(contents)
437 #@+node:ekr.20210319130349.1: *5* test_line_875
438 def test_line_875(self):
440 contents = '''list((x, y) for x in 'abcd' for y in 'abcd')'''
441 contents, tokens, tree = self.make_data(contents)
442 #@+node:ekr.20210319130616.1: *5* test_line_898
443 def test_line_898(self):
445 contents = '''g = ((i,j) for i in range(x) if t for j in range(x))'''
446 contents, tokens, tree = self.make_data(contents)
447 #@+node:ekr.20210320085705.1: *5* test_walrus_operator
448 def test_walrus_operator(self):
450 if py_version < (3, 8):
451 self.skipTest(f"Python {v1}.{v2} does not support assignment expressions") # pragma: no cover
452 contents = '''if (n := len(a)) > 10: pass'''
453 contents, tokens, tree = self.make_data(contents)
454 #@+node:ekr.20191227052446.10: *4* TestTOG.Contexts...
455 #@+node:ekr.20191227052446.11: *5* test_ClassDef
456 def test_ClassDef(self):
457 contents = """\
458 class TestClass1:
459 pass
461 def decorator():
462 pass
464 @decorator
465 class TestClass2:
466 pass
468 @decorator
469 class TestClass(base1, base2):
470 pass
471 """
472 self.make_data(contents)
473 #@+node:ekr.20191227052446.12: *5* test_ClassDef2
474 def test_ClassDef2(self):
475 contents = r'''\
476 """ds 1"""
477 class TestClass:
478 """ds 2"""
479 def long_name(a, b=2):
480 """ds 3"""
481 print('done')
482 '''
483 self.make_data(contents)
484 #@+node:ekr.20191227052446.13: *5* test_FunctionDef
485 def test_FunctionDef(self):
486 contents = r"""\
487 def run(fileName=None, pymacs=None):
488 pass
489 """
490 self.make_data(contents)
491 #@+node:ekr.20200111171738.1: *5* test_FunctionDef_with_annotations
492 def test_FunctionDef_with_annotations(self):
493 contents = r"""\
494 def foo(a: 'x', b: 5 + 6, c: list) -> max(2, 9):
495 pass
496 """
497 self.make_data(contents)
498 # contents, tokens, tree = self.make_data(contents)
499 # dump_ast(tree)
500 #@+node:ekr.20210802162650.1: *5* test_FunctionDef_with_posonly_args
501 def test_FunctionDef_with_posonly_args(self):
503 if py_version < (3, 9):
504 self.skipTest('Requires Python 3.9') # pragma: no cover
506 # From PEP 570
507 contents = r"""\
508 def pos_only_arg(arg, /):
509 pass
510 def kwd_only_arg(*, arg):
511 pass
512 def combined_example(pos_only, /, standard, *, kwd_only):
513 pass
514 """
515 self.make_data(contents)
516 #@+node:ekr.20191227052446.14: *4* TestTOG.Expressions & operators...
517 #@+node:ekr.20191227052446.15: *5* test_attribute
518 def test_attribute(self):
519 contents = r"""\
520 open(os.devnull, "w")
521 """
522 self.make_data(contents)
523 #@+node:ekr.20191227052446.16: *5* test_CompareOp
524 def test_CompareOp(self):
525 contents = r"""\
526 if a and not b and c:
527 pass
528 """
529 self.make_data(contents)
530 #@+node:ekr.20191227052446.17: *5* test_Dict_1
531 def test_Dict(self):
532 contents = r"""\
533 d = {'a' if x else 'b': True,}
534 """
535 self.make_data(contents)
536 #@+node:ekr.20200111191153.1: *5* test_Dict_2
537 def test_Dict_2(self):
538 contents = r"""\
539 d = {}
540 """
541 self.make_data(contents)
542 #@+node:ekr.20191227052446.18: *5* test_DictComp
543 def test_DictComp(self):
544 # leoGlobals.py, line 3028.
545 contents = r"""\
546 d2 = {val: key for key, val in d}
547 """
548 self.make_data(contents)
549 #@+node:ekr.20200112042410.1: *5* test_ExtSlice
550 def test_ExtSlice(self):
551 contents = r"""a [1, 2: 3]"""
552 self.make_data(contents)
553 #@+node:ekr.20191227052446.19: *5* test_ListComp
554 def test_ListComp(self):
555 # ListComp and comprehension.
556 contents = r"""\
557 any([p2.isDirty() for p2 in p.subtree()])
558 """
559 self.make_data(contents)
560 #@+node:ekr.20191227052446.20: *5* test_NameConstant
561 def test_NameConstant(self):
562 contents = r"""\
563 run(a=None, b=str)
564 """
565 self.make_data(contents)
566 #@+node:ekr.20191227052446.21: *5* test_Operator: semicolon
567 def test_op_semicolon(self):
568 contents = r"""\
569 print('c');
570 print('d')
571 """
572 self.make_data(contents)
573 #@+node:ekr.20191227052446.22: *5* test_Operator: semicolon between statements
574 def test_op_semicolon2(self):
575 contents = r"""\
576 a = 1 ; b = 2
577 print('a') ; print('b')
578 """
579 self.make_data(contents)
580 #@+node:ekr.20200111194454.1: *5* test_Set
581 def test_Set(self):
582 contents = """{'a', 'b'}"""
583 self.make_data(contents)
584 #@+node:ekr.20200111195654.1: *5* test_SetComp
585 def test_SetComp(self):
586 contents = """aSet = { (x, y) for x in r for y in r if x < y }"""
587 self.make_data(contents)
588 #@+node:ekr.20191227052446.23: *5* test_UnaryOp
589 def test_UnaryOp(self):
590 contents = r"""\
591 print(-(2))
592 """
593 self.make_data(contents)
594 #@+node:ekr.20191227052446.65: *4* TestTOG.f-strings....
595 #@+node:ekr.20191227052446.66: *5* test_fstring01: complex Call
596 def test_fstring1(self):
597 # Line 1177, leoApp.py
598 contents = r"""\
599 print(
600 message = f"line 1: {old_id!r}\n" "line 2\n"
601 )
602 print('done')
603 """
604 self.make_data(contents)
605 #@+node:ekr.20191227052446.67: *5* test_fstring02: Ternary
606 def test_fstring2(self):
607 contents = r"""\
608 func(f"{b if not cond1 else ''}")
609 """
610 self.make_data(contents)
611 #@+node:ekr.20191227052446.68: *5* test_fstring03: single f-string
612 def test_fstring3(self):
613 contents = r"""\
614 print(f'{7.1}')
615 print('end')
616 """
617 self.make_data(contents)
618 #@+node:ekr.20191227052446.69: *5* test_fstring04: f-string + plain
619 def test_fstring4(self):
620 contents = r"""\
621 print(f'{7.1}' 'p7.2')
622 print('end')
623 """
624 self.make_data(contents)
625 #@+node:ekr.20191227052446.70: *5* test_fstring05: plain + f-string
626 def test_fstring5(self):
627 contents = r"""\
628 print('p1' f'{f2}')
629 'end'
630 """
631 self.make_data(contents)
632 #@+node:ekr.20191227052446.71: *5* test_fstring06: f-string + fstring
633 def test_fstring6(self):
634 contents = r"""\
635 print(f'{f1}' f'{f2}')
636 'end'
637 """
638 self.make_data(contents)
639 #@+node:ekr.20191227052446.72: *5* test_fstring07: many
640 def test_fstring7(self):
641 contents = r"""\
642 print('s1', f'{f2}' f'f3' f'{f4}' 's5')
643 'end'
644 """
645 self.make_data(contents)
646 #@+node:ekr.20191227052446.73: *5* test_fstring08: ternary op
647 def test_fstring8(self):
648 # leoFind.py line 856
649 contents = r"""\
650 a = f"{'a' if x else 'b'}"
651 f()
653 # Pass
654 # print(f"{'a' if x else 'b'}")
655 """
656 self.make_data(contents)
657 #@+node:ekr.20191227052446.74: *5* test_fstring09: leoFind.py line 856
658 def test_fstring9(self):
659 contents = r"""\
660 func(
661 "Isearch"
662 f"{' Backward' if True else ''}"
663 )
664 print('done')
665 """
666 self.make_data(contents)
667 #@+node:ekr.20191227052446.75: *5* test_fstring10: leoFind.py: line 861
668 def test_fstring10(self):
669 # leoFind.py: line 861
670 contents = r"""\
671 one(f"{'B'}" ": ")
672 """
673 self.make_data(contents)
674 #@+node:ekr.20191227052446.76: *5* test_fstring11: joins
675 def test_fstring11(self):
676 contents = r"""\
677 print(f'x3{e3+1}y3' f'x4{e4+2}y4')
678 print('done')
679 """
680 self.make_data(contents)
681 #@+node:ekr.20191227052446.77: *6* more
682 # Single f-strings.
683 # 'p1' ;
684 # f'f1' ;
685 # f'x1{e1}y1' ;
686 # f'x2{e2+1}y2{e2+2}z2' ;
688 # Concatentated strings...
689 # 'p2', 'p3' ;
690 # f'f2' 'f3' ;
692 # f'x5{e5+1}y5{e5+1}z5' f'x6{e6+1}y6{e6+1}z6' ;
693 #@+node:ekr.20191227052446.78: *5* test_fstring12: joins + 1 f-expr
694 def test_fstring12(self):
695 contents = r"""\
696 print(f'x1{e1}y1', 'p1')
697 print(f'x2{e2}y2', f'f2')
698 print(f'x3{e3}y3', f'x4{e4}y4')
699 print('end')
700 """
701 self.make_data(contents)
702 #@+node:ekr.20191227052446.79: *5* test_fstring13: joins + 2 f-exprs
703 def test_fstring13(self):
704 contents = r"""\
705 print(f'x1{e1}y1{e2}z1', 'p1')
706 print(f'x2{e3}y2{e3}z2', f'f2')
707 print(f'x3{e4}y3{e5}z3', f'x4{e6}y4{e7}z4')
708 print('end')
709 """
710 self.make_data(contents)
711 #@+node:ekr.20191227052446.80: *5* test_fstring14: complex, with commas
712 def test_fstring14(self):
713 contents = r"""\
714 print(f"{list(z for z in ('a', 'b', 'c') if z != 'b')}")
715 """
716 self.make_data(contents)
717 #@+node:ekr.20191227052446.81: *5* test_fstring15
718 def test_fstring15(self):
719 contents = r"""\
720 print(f"test {a}={2}")
721 print('done')
722 """
723 self.make_data(contents)
724 #@+node:ekr.20191227052446.83: *5* test_fstring16: simple
725 def test_fstring16(self):
726 contents = r"""\
727 'p1' ;
728 f'f1' ;
729 'done' ;
730 """
731 self.make_data(contents)
732 #@+node:ekr.20191227052446.82: *5* test_regex_fstring
733 def test_regex_fstring(self):
734 # Line 7709, leoGlobals.py
735 contents = r'''\
736 fr"""{kinds}://[^\s'"]+[\w=/]"""
737 '''
738 self.make_data(contents)
739 #@+node:ekr.20191227052446.32: *4* TestTOG.If...
740 #@+node:ekr.20191227052446.33: *5* test_from leoTips.py
741 def test_if1(self):
742 # Line 93, leoTips.py
743 contents = r"""\
744 self.make_data(contents)
745 unseen = [i for i in range(5) if i not in seen]
746 for issue in data:
747 for a in aList:
748 print('a')
749 else:
750 print('b')
751 if b:
752 print('c')
753 """
754 self.make_data(contents)
755 #@+node:ekr.20191227052446.34: *5* test_if + tuple
756 def test_if2(self):
757 contents = r"""\
758 for i, j in b:
759 pass
760 """
761 self.make_data(contents)
762 #@+node:ekr.20191227052446.35: *5* test_if + unary op
763 def test_if3(self):
764 contents = r"""\
765 if -(2):
766 pass
767 """
768 self.make_data(contents)
769 #@+node:ekr.20191227052446.36: *5* test_if, elif
770 def test_if4(self):
771 contents = r"""\
772 if 1:
773 print('a')
774 elif 2:
775 print('b')
776 elif 3:
777 print('c')
778 print('d')
779 print('-')
780 if 1:
781 print('e')
782 elif 2:
783 print('f')
784 print('g')
785 """
786 self.make_data(contents)
787 #@+node:ekr.20191227052446.37: *5* test_if, elif + 2
788 def test_if5(self):
789 contents = r"""\
790 if 1:
791 pass
792 elif 2:
793 pass
794 pass
795 """
796 self.make_data(contents)
797 #@+node:ekr.20191227052446.38: *5* test_if, elif, else
798 def test_if6(self):
799 contents = r"""\
800 if (a):
801 print('a1')
802 print('a2')
803 elif b:
804 print('b1')
805 print('b2')
806 else:
807 print('c1')
808 print('c2')
809 """
810 self.make_data(contents)
811 #@+node:ekr.20191227052446.39: *5* test_if, else
812 def test_if7(self):
813 contents = r"""\
814 if 1:
815 print('a')
816 else:
817 print('b')
818 """
819 self.make_data(contents)
820 #@+node:ekr.20191227052446.40: *5* test_if, else, if
821 def test_if8(self):
822 contents = r"""\
823 if 1:
824 print('a')
825 else:
826 if 2:
827 print('b')
828 """
829 self.make_data(contents)
830 #@+node:ekr.20191227052446.41: *5* test_Nested If's
831 def test_if9(self):
832 contents = r"""\
833 if a:
834 if b:
835 print('b')
836 else:
837 if d:
838 print('d')
839 """
840 self.make_data(contents)
841 #@+node:ekr.20191227052446.42: *5* test_ternary + if
842 def test_if10(self):
843 contents = r"""\
844 if 1:
845 a = 'class' if cond else 'def'
846 # find_pattern = prefix + ' ' + word
847 print('1')
848 else:
849 print('2')
850 """
851 self.make_data(contents)
852 #@+node:ekr.20191227145620.1: *4* TestTOG.Miscellaneous...
853 #@+node:ekr.20200206041753.1: *5* test_comment_in_set_links
854 def test_comment_in_set_links(self):
855 contents = """
856 def spam():
857 # comment
858 pass
859 """
860 self.make_data(contents)
861 #@+node:ekr.20200112065944.1: *5* test_ellipsis_1
862 def test_ellipsis_1(self):
863 contents = """
864 def spam():
865 ...
866 """
867 self.make_data(contents)
868 #@+node:ekr.20200112070228.1: *5* test_ellipsis_2
869 def test_ellipsis_2(self):
870 contents = """
871 def partial(func: Callable[..., str], *args):
872 pass
873 """
874 self.make_data(contents)
875 #@+node:ekr.20191227075951.1: *5* test_end_of_line
876 def test_end_of_line(self):
877 self.make_data("""# Only a comment.""")
878 #@+node:ekr.20191227052446.50: *4* TestTOG.Plain Strings...
879 #@+node:ekr.20191227052446.52: *5* test_\x and \o escapes
880 def test_escapes(self):
881 # Line 4609, leoGlobals.py
882 contents = r"""\
883 print("\x7e" "\0777") # tilde.
884 print('done')
885 """
886 self.make_data(contents)
887 #@+node:ekr.20191227052446.53: *5* test_backslashes in docstring
888 def test_backslashes(self):
889 # leoGlobals.py.
890 contents = r'''\
891 class SherlockTracer:
892 """before\\after"""
893 '''
894 self.make_data(contents)
895 #@+node:ekr.20191227052446.54: *5* test_bs/nl
896 def test_bs_nl(self):
897 contents = r"""\
898 print('hello\
899 world')
900 """
901 self.make_data(contents)
902 #@+node:ekr.20191227052446.55: *5* test_bytes bs-x
903 def test_bytes(self):
904 # Line 201, leoApp.py
905 contents = r"""\
906 print(b'\xfe')
907 print('done')
908 """
909 self.make_data(contents)
910 #@+node:ekr.20191227052446.56: *5* test_empty string
911 def test_empyt_string(self):
912 contents = r"""\
913 self.s = ''
914 self.i = 0
915 """
916 self.make_data(contents)
917 #@+node:ekr.20191227052446.57: *5* test_escaped string delims
918 def test_escaped_delims(self):
919 contents = r"""\
920 print("a\"b")
921 """
922 self.make_data(contents)
923 #@+node:ekr.20191227052446.58: *5* test_escaped strings
924 def test_escaped_strings(self):
925 contents = r"""\
926 f1(a='\b', b='\n', t='\t')
927 f2(f='\f', r='\r', v='\v')
928 f3(bs='\\')
929 """
930 self.make_data(contents)
931 #@+node:ekr.20191227052446.59: *5* test_f-string join
932 def test_fstring_join(self):
933 # The first newline causes the fail.
934 contents = r"""\
935 print(f"a {old_id!r}\n" "b\n")
936 print('done')
937 """
938 self.make_data(contents)
939 #@+node:ekr.20191227052446.64: *5* test_potential_fstring
940 def test_potential_fstring(self):
941 contents = r"""\
942 print('test %s=%s'%(a, 2))
943 print('done')
944 """
945 self.make_data(contents)
946 #@+node:ekr.20191227052446.60: *5* test_raw docstring
947 def test_raw_docstring(self):
948 contents = r'''\
949 # Line 1619 leoFind.py
950 print(r"""DS""")
951 '''
952 self.make_data(contents)
953 #@+node:ekr.20191227052446.61: *5* test_raw escaped strings
954 def test_raw_escapes(self):
955 contents = r"""\
956 r1(a=r'\b', b=r'\n', t=r'\t')
957 r2(f=r'\f', r=r'\r', v=r'\v')
958 r3(bs=r'\\')
959 """
960 self.make_data(contents)
961 #@+node:ekr.20191227052446.62: *5* test_single quote
962 def test_single_quote(self):
963 # leoGlobals.py line 806.
964 contents = r"""\
965 print('"')
966 """
967 self.make_data(contents)
968 #@+node:ekr.20191227052446.63: *5* test_string concatenation_1
969 def test_concatenation_1(self):
970 contents = r"""\
971 print('a' 'b')
972 print('c')
973 """
974 self.make_data(contents)
975 #@+node:ekr.20200111042825.1: *5* test_string_concatenation_2
976 def test_string_concatenation_2(self):
977 # Crash in leoCheck.py.
978 contents = """return self.Type('error', 'no member %s' % ivar)"""
979 self.make_data(contents)
980 #@+node:ekr.20191227052446.43: *4* TestTOG.Statements...
981 #@+node:ekr.20200112075707.1: *5* test_AnnAssign
982 def test_AnnAssign(self):
983 contents = """x: int = 0"""
984 self.make_data(contents)
985 #@+node:ekr.20200112071833.1: *5* test_AsyncFor
986 def test_AsyncFor(self):
987 # This may require Python 3.7.
988 contents = """\
989 async def commit(session, data):
990 async for z in session.transaction():
991 await z(data)
992 else:
993 print('oops')
994 """
995 self.make_data(contents)
996 #@+node:ekr.20200111175043.1: *5* test_AsyncFunctionDef
997 def test_AsyncFunctionDef(self):
998 contents = """\
999 @my_decorator
1000 async def count() -> 42:
1001 print("One")
1002 await asyncio.sleep(1)
1003 """
1004 self.make_data(contents)
1005 #@+node:ekr.20200112073151.1: *5* test_AsyncWith
1006 def test_AsyncWith(self):
1007 contents = """\
1008 async def commit(session, data):
1009 async with session.transaction():
1010 await session.update(data)
1011 """
1012 self.make_data(contents)
1013 #@+node:ekr.20191227052446.44: *5* test_Call
1014 def test_Call(self):
1015 contents = """func(a, b, one='one', two=2, three=4+5, *args, **kwargs)"""
1016 # contents = """func(*args, **kwargs)"""
1017 # f1(a,b=2)
1018 # f2(1 + 2)
1019 # f3(arg, *args, **kwargs)
1020 # f4(a='a', *args, **kwargs)
1021 self.make_data(contents)
1022 #@+node:ekr.20200206040732.1: *5* test_Delete
1023 def test_Delete(self):
1025 # Coverage test for spaces
1026 contents = """del x"""
1027 self.make_data(contents)
1028 #@+node:ekr.20200111175335.1: *5* test_For
1029 def test_For(self):
1030 contents = r"""\
1031 for a in b:
1032 pass
1033 """
1034 self.make_data(contents)
1035 #@+node:ekr.20191227052446.45: *5* test_Global
1036 def test_Global(self):
1037 # Line 1604, leoGlobals.py
1038 contents = r"""
1039 def spam():
1040 global gg
1041 print('')
1042 """
1043 self.make_data(contents)
1044 #@+node:ekr.20200111200424.1: *5* test_ImportFrom
1045 def test_ImportFrom(self):
1046 contents = r"""from a import b as c"""
1047 self.make_data(contents)
1048 #@+node:ekr.20210318174705.1: *5* test_ImportFromStar
1049 def test_ImportFromStar(self):
1050 contents = r"""from sys import *"""
1051 self.make_data(contents)
1052 #@+node:ekr.20200206040424.1: *5* test_Lambda
1053 def test_Lambda(self):
1055 # Coverage test for spaces
1056 contents = """f = lambda x: x"""
1057 self.make_data(contents)
1058 #@+node:ekr.20220329095904.1: *5* test_Match
1059 def test_Match(self):
1061 if py_version < (3, 10):
1062 self.skipTest('Require python 3.10')
1063 contents = r"""\
1064 match node:
1065 # Passed...
1066 case 1: pass
1067 case (2, 3): pass
1068 case BinOp("+", a, BinOp("*", b, c)): pass
1069 case {"text": message, "color": c}: pass
1070 case 401 | 403 | 404: pass
1071 case xyzzy if a > 1: pass
1072 case {"sound": _, "format": _}: pass
1073 case BinOp2("+", a, BinOp("*", d = 2)): pass
1074 case BinOp2("-", d, e = 2): pass
1075 case {"pat1": 2, **rest}: pass
1076 case _: pass
1077 case (4, 5, *rest): pass
1078 case [6, 5, *rest]: pass
1079 case ['a'|'b' as ab, c]: pass
1080 case True: pass
1081 case False: pass
1082 case None: pass
1083 case True | False | None: pass
1084 case True, False, None: pass # A tuple!
1085 """
1086 try:
1087 # self.debug_list.append('contents')
1088 # self.debug_list.append('tokens')
1089 # self.debug_list.append('tree')
1090 # self.debug_list.append('full-traceback')
1091 self.make_data(contents)
1092 finally:
1093 self.debug_list = []
1094 #@+node:ekr.20200111200640.1: *5* test_Nonlocal
1095 def test_Nonlocal(self):
1096 contents = r"""nonlocal name1, name2"""
1097 self.make_data(contents)
1098 #@+node:ekr.20220224120239.1: *5* test_Raise
1099 def test_Raise(self):
1100 contents = "raise ImportError from None"
1101 self.make_data(contents)
1102 #@+node:ekr.20191227052446.46: *5* test_Try
1103 def test_Try(self):
1104 contents = r"""\
1105 try:
1106 print('a1')
1107 print('a2')
1108 except ImportError:
1109 print('b1')
1110 print('b2')
1111 except SyntaxError:
1112 print('c1')
1113 print('c2')
1114 finally:
1115 print('d1')
1116 print('d2')
1117 """
1118 self.make_data(contents)
1119 #@+node:ekr.20191227052446.47: *5* test_TryExceptElse
1120 def test_Try2(self):
1121 # Line 240: leoDebugger.py
1122 contents = r"""\
1123 try:
1124 print('a')
1125 except ValueError:
1126 print('b')
1127 else:
1128 print('c')
1129 """
1130 self.make_data(contents)
1131 #@+node:ekr.20200206041336.1: *5* test_While
1132 def test_While(self):
1133 contents = r"""\
1134 while f():
1135 print('continue')
1136 else:
1137 print('done')
1138 """
1139 self.make_data(contents)
1140 #@+node:ekr.20191227052446.48: *5* test_With
1141 def test_With(self):
1142 # leoGlobals.py, line 1785.
1143 contents = r"""\
1144 with open(fn) as f:
1145 pass
1146 """
1147 self.make_data(contents)
1148 #@+node:ekr.20200206041611.1: *5* test_Yield
1149 def test_Yield(self):
1150 contents = r"""\
1151 def gen_test():
1152 yield self.gen_token('newline', '\n')
1153 """
1154 self.make_data(contents)
1155 #@+node:ekr.20191227052446.49: *5* test_YieldFrom
1156 def test_YieldFrom(self):
1157 # Line 1046, leoAst.py
1158 contents = r"""\
1159 def gen_test():
1160 self.node = tree
1161 yield from self.gen_token('newline', '\n')
1162 print('done')
1163 """
1164 self.make_data(contents)
1165 #@+node:ekr.20191228193740.1: *4* TestTOG.test_aa && zz
1166 def test_aaa(self):
1167 """The first test."""
1168 g.total_time = get_time()
1170 def test_zzz(self):
1171 """The last test."""
1172 t2 = get_time()
1173 self.update_times('90: TOTAL', t2 - g.total_time)
1174 # self.dump_stats()
1175 #@-others
1176#@+node:ekr.20210902074155.1: ** Test classes...
1177#@+node:ekr.20200122161530.1: *3* class Optional_TestFiles (BaseTest)
1178class Optional_TestFiles(BaseTest):
1179 """
1180 Tests for the TokenOrderGenerator class that act on files.
1182 These are optional tests. They take a long time and are not needed
1183 for 100% coverage.
1185 All of these tests failed at one time.
1186 """
1187 #@+others
1188 #@+node:ekr.20200726145235.2: *4* TestFiles.test_leoApp
1189 def test_leoApp(self):
1191 self.make_file_data('leoApp.py')
1192 #@+node:ekr.20200726145235.1: *4* TestFiles.test_leoAst
1193 def test_leoAst(self):
1195 self.make_file_data('leoAst.py')
1196 #@+node:ekr.20200726145333.1: *4* TestFiles.test_leoDebugger
1197 def test_leoDebugger(self):
1199 self.make_file_data('leoDebugger.py')
1200 #@+node:ekr.20200726145333.2: *4* TestFiles.test_leoFind
1201 def test_leoFind(self):
1203 self.make_file_data('leoFind.py')
1204 #@+node:ekr.20200726145333.3: *4* TestFiles.test_leoGlobals
1205 def test_leoGlobals(self):
1207 self.make_file_data('leoGlobals.py')
1208 #@+node:ekr.20200726145333.4: *4* TestFiles.test_leoTips
1209 def test_leoTips(self):
1211 self.make_file_data('leoTips.py')
1212 #@+node:ekr.20200726145735.1: *4* TestFiles.test_runLeo
1213 def test_runLeo(self):
1215 self.make_file_data('runLeo.py')
1216 #@+node:ekr.20200115162419.1: *4* TestFiles.compare_tog_vs_asttokens
1217 def compare_tog_vs_asttokens(self): # pragma: no cover
1218 """Compare asttokens token lists with TOG token lists."""
1219 if not asttokens:
1220 self.skipTest('requires asttokens')
1221 # Define TestToken class and helper functions.
1222 stack: List[ast.AST] = []
1223 #@+others
1224 #@+node:ekr.20200124024159.2: *5* class TestToken (internal)
1225 class TestToken:
1226 """A patchable representation of the 5-tuples created by tokenize and used by asttokens."""
1228 def __init__(self, kind, value):
1229 self.kind = kind
1230 self.value = value
1231 self.node_list: List[ast.AST] = []
1233 def __str__(self):
1234 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list])
1235 return f"{self.kind:14} {self.value:20} {tokens_s!s}"
1237 __repr__ = __str__
1238 #@+node:ekr.20200124024159.3: *5* function: atok_name
1239 def atok_name(token):
1240 """Return a good looking name for the given 5-tuple"""
1241 return token_module.tok_name[token[0]].lower() # type:ignore
1242 #@+node:ekr.20200124024159.4: *5* function: atok_value
1243 def atok_value(token):
1244 """Print a good looking value for the given 5-tuple"""
1245 return token.string if atok_name(token) == 'string' else repr(token.string)
1246 #@+node:ekr.20200124024159.5: *5* function: dump_token
1247 def dump_token(token):
1248 node_list = list(set(getattr(token, 'node_set', [])))
1249 node_list = sorted([z.__class__.__name__ for z in node_list])
1250 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}"
1251 #@+node:ekr.20200124024159.6: *5* function: postvisit
1252 def postvisit(node, par_value, value):
1253 nonlocal stack
1254 stack.pop()
1255 return par_value or []
1256 #@+node:ekr.20200124024159.7: *5* function: previsit
1257 def previsit(node, par_value):
1258 nonlocal stack
1259 if isinstance(node, ast.Module):
1260 stack = []
1261 if stack:
1262 parent = stack[-1]
1263 children: List[ast.AST] = getattr(parent, 'children', [])
1264 parent.children = children + [node] # type:ignore
1265 node.parent = parent
1266 else:
1267 node.parent = None
1268 node.children = []
1269 stack.append(node)
1270 return par_value, []
1271 #@-others
1272 directory = r'c:\Repos\leo-editor\leo\core'
1273 filename = 'leoAst.py'
1274 filename = os.path.join(directory, filename)
1275 # A fair comparison omits the read time.
1276 t0 = get_time()
1277 contents = read_file(filename)
1278 t1 = get_time()
1279 # Part 1: TOG.
1280 tog = TokenOrderGenerator()
1281 tog.filename = filename
1282 tokens = make_tokens(contents)
1283 tree = parse_ast(contents)
1284 tog.create_links(tokens, tree)
1285 tog.balance_tokens(tokens)
1286 t2 = get_time()
1287 # Part 2: Create asttokens data.
1288 atok = asttokens.ASTTokens(contents, parse=True, filename=filename)
1289 t3 = get_time()
1290 # Create a patchable list of TestToken objects.
1291 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] # type:ignore
1292 # Inject parent/child links into nodes.
1293 asttokens.util.visit_tree(atok.tree, previsit, postvisit)
1294 # Create token.token_list for each token.
1295 for node in asttokens.util.walk(atok.tree):
1296 # Inject node into token.node_list
1297 for ast_token in atok.get_tokens(node, include_extra=True):
1298 i = ast_token.index
1299 token = tokens[i]
1300 token.node_list.append(node)
1301 t4 = get_time()
1302 if 1:
1303 print(
1304 f" read: {t1-t0:5.3f} sec.\n"
1305 f" TOG: {t2-t1:5.3f} sec.\n"
1306 f"asttokens 1: {t3-t2:5.3f} sec.\n"
1307 f"asttokens 2: {t4-t3:5.3f} sec.\n")
1308 if 0:
1309 print('===== asttokens =====\n')
1310 for node in asttokens.util.walk(tree):
1311 print(f"{node.__class__.__name__:>10} {atok.get_text(node)!s}")
1312 #@-others
1313#@+node:ekr.20191229083512.1: *3* class TestFstringify (BaseTest)
1314class TestFstringify(BaseTest):
1315 """Tests for the TokenOrderGenerator class."""
1316 #@+others
1317 #@+node:ekr.20200111043311.1: *4* Bugs...
1318 #@+node:ekr.20210318054321.1: *5* TestFstringify.test_bug_1851
1319 def test_bug_1851(self):
1320 # leoCheck.py.
1321 contents = """\
1322 from dataclasses import dataclass
1324 @dataclass(frozen=True)
1325 class TestClass:
1326 value: str
1327 start: int
1328 end: int
1330 f = TestClass('abc', 0, 10)
1331 """
1332 contents, tokens, tree = self.make_data(contents)
1333 expected = textwrap.dedent(contents).rstrip() + '\n'
1334 results = self.fstringify(contents, tokens, tree)
1335 self.assertEqual(results, expected)
1336 #@+node:ekr.20200111043311.2: *5* TestFstringify.test_crash_1
1337 def test_crash_1(self):
1338 # leoCheck.py.
1339 contents = """return ('error', 'no member %s' % ivar)"""
1340 expected = """return ('error', f"no member {ivar}")\n"""
1341 contents, tokens, tree = self.make_data(contents)
1342 results = self.fstringify(contents, tokens, tree)
1343 self.assertEqual(results, expected)
1344 #@+node:ekr.20200111075114.1: *5* TestFstringify.test_crash_2
1345 def test_crash_2(self):
1346 # leoCheck.py, line 1704.
1347 # format =
1348 # 'files: %s lines: %s chars: %s classes: %s\n'
1349 # 'defs: %s calls: %s undefined calls: %s returns: %s'
1350 # )
1351 contents = r"""'files: %s\n' 'defs: %s'"""
1352 expected = contents + '\n'
1353 contents, tokens, tree = self.make_data(contents)
1354 results = self.fstringify(contents, tokens, tree)
1355 self.assertEqual(results, expected)
1356 #@+node:ekr.20200214155156.1: *4* TestFstringify.show_message
1357 def show_message(self): # pragma: no cover
1358 """Separate test of fs.message."""
1359 fs = Fstringify()
1360 fs.filename = 'test_file.py'
1361 fs.line_number = 42
1362 fs.line = 'The test line\n'
1363 fs.silent = False
1364 # Test message.
1365 fs.message(
1366 "Test:\n"
1367 "< Left align\n"
1368 ":Colon: align\n"
1369 "> Right align\n"
1370 " Default align")
1371 #
1372 # change_quotes...
1373 fs.message("can't create f-fstring: no lt_s!")
1374 lt_s = "lt_s"
1375 delim = 'Delim'
1376 token = Token('Kind', 'Value')
1377 fs.message(
1378 f"unexpected token: {token.kind} {token.value}\n"
1379 f" lt_s: {lt_s!r}")
1380 fs.message(
1381 f"can't create f-fstring: {lt_s!r}\n"
1382 f": conflicting delim: {delim!r}")
1383 fs.message(
1384 f"can't create f-fstring: {lt_s!r}\n"
1385 f":backslash in {{expr}}: {delim!r}")
1386 # Check newlines...
1387 fs.message(
1388 f" can't create f-fstring: {lt_s!r}\n"
1389 f":curly bracket underflow:")
1390 fs.message(
1391 f" can't create f-fstring: {lt_s!r}\n"
1392 f":string contains a backslash:")
1393 fs.message(
1394 f" can't create f-fstring: {lt_s!r}\n"
1395 f":unclosed curly bracket:")
1396 # Make fstring
1397 before, after = 'Before', 'After'
1398 fs.message(
1399 f"trace:\n"
1400 f":from: {before!s}\n"
1401 f": to: {after!s}")
1402 #@+node:ekr.20200106163535.1: *4* TestFstringify.test_braces
1403 def test_braces(self):
1405 # From pr.construct_stylesheet in leoPrinting.py
1406 contents = """'h1 {font-family: %s}' % (family)"""
1407 expected = """f"h1 {{font-family: {family}}}"\n"""
1408 contents, tokens, tree = self.make_data(contents)
1409 results = self.fstringify(contents, tokens, tree)
1410 self.assertEqual(results, expected)
1411 #@+node:ekr.20200217171334.1: *4* TestFstringify.test_backslash_in_expr
1412 def test_backslash_in_expr(self):
1413 # From get_flake8_config.
1414 contents = r"""print('aaa\n%s' % ('\n'.join(dir_table)))"""
1415 expected = contents.rstrip() + '\n'
1416 contents, tokens, tree = self.make_data(contents)
1417 results = self.fstringify(contents, tokens, tree, silent=True)
1418 self.assertEqual(results, expected)
1419 #@+node:ekr.20191230150653.1: *4* TestFstringify.test_call_in_rhs
1420 def test_call_in_rhs(self):
1422 contents = """'%s' % d()"""
1423 expected = """f"{d()}"\n"""
1424 contents, tokens, tree = self.make_data(contents)
1425 results = self.fstringify(contents, tokens, tree)
1426 self.assertEqual(results, expected)
1427 #@+node:ekr.20200104045907.1: *4* TestFstringify.test_call_in_rhs_2
1428 def test_call_in_rhs_2(self):
1430 # From LM.traceSettingsDict
1431 contents = """print('%s' % (len(d.keys())))"""
1432 expected = """print(f"{len(d.keys())}")\n"""
1433 contents, tokens, tree = self.make_data(contents)
1434 results = self.fstringify(contents, tokens, tree)
1435 self.assertEqual(results, expected)
1436 #@+node:ekr.20200105073155.1: *4* TestFstringify.test_call_with_attribute
1437 def test_call_with_attribute(self):
1439 contents = """g.blue('wrote %s' % p.atShadowFileNodeName())"""
1440 expected = """g.blue(f"wrote {p.atShadowFileNodeName()}")\n"""
1441 contents, tokens, tree = self.make_data(contents)
1442 results = self.fstringify(contents, tokens, tree)
1443 self.assertEqual(results, expected)
1444 #@+node:ekr.20200122035055.1: *4* TestFstringify.test_call_with_comments
1445 def test_call_with_comments(self):
1447 contents = """\
1448 print('%s in %5.2f sec' % (
1449 "done", # message
1450 2.9, # time
1451 )) # trailing comment"""
1453 expected = """\
1454 print(f'{"done"} in {2.9:5.2f} sec') # trailing comment
1455 """
1456 contents, tokens, tree = self.make_data(contents)
1457 expected = textwrap.dedent(expected).rstrip() + '\n'
1458 results = self.fstringify(contents, tokens, tree)
1459 self.assertEqual(results, expected)
1460 #@+node:ekr.20200206173126.1: *4* TestFstringify.test_change_quotes
1461 def test_change_quotes(self):
1463 contents = """ret = '[%s]' % ','.join([show(z) for z in arg])"""
1464 expected = """ret = f"[{','.join([show(z) for z in arg])}]"\n"""
1465 contents, tokens, tree = self.make_data(contents)
1466 results = self.fstringify(contents, tokens, tree)
1467 self.assertEqual(results, expected)
1468 #@+node:ekr.20200101060616.1: *4* TestFstringify.test_complex_rhs
1469 def test_complex_rhs(self):
1470 # From LM.mergeShortcutsDicts.
1471 contents = (
1472 """g.trace('--trace-binding: %20s binds %s to %s' % ("""
1473 """ c.shortFileName(), binding, d.get(binding) or []))""")
1474 expected = (
1475 """g.trace(f"--trace-binding: {c.shortFileName():20} """
1476 """binds {binding} to {d.get(binding) or []}")\n""")
1477 contents, tokens, tree = self.make_data(contents)
1478 results = self.fstringify(contents, tokens, tree)
1479 self.assertEqual(results, expected)
1480 #@+node:ekr.20200206174208.1: *4* TestFstringify.test_function_call
1481 def test_function_call(self):
1483 contents = """mods = ''.join(['%s+' % z.capitalize() for z in self.mods])"""
1484 expected = """mods = ''.join([f"{z.capitalize()}+" for z in self.mods])\n"""
1485 contents, tokens, tree = self.make_data(contents)
1486 results = self.fstringify(contents, tokens, tree)
1487 self.assertEqual(results, expected)
1488 #@+node:ekr.20200106085608.1: *4* TestFstringify.test_ImportFrom
1489 def test_ImportFrom(self):
1491 table = (
1492 """from .globals import a, b""",
1493 """from ..globals import x, y, z""",
1494 """from . import j""",
1495 )
1496 for contents in table:
1497 contents, tokens, tree = self.make_data(contents)
1498 results = self.fstringify(contents, tokens, tree)
1499 self.assertEqual(results, contents)
1500 #@+node:ekr.20200106042452.1: *4* TestFstringify.test_ListComp
1501 def test_ListComp(self):
1503 table = (
1504 """replaces = [L + c + R[1:] for L, R in splits if R for c in letters]""",
1505 """[L for L in x for c in y]""",
1506 """[L for L in x for c in y if L if not c]""",
1507 )
1508 for contents in table:
1509 contents, tokens, tree = self.make_data(contents)
1510 results = self.fstringify(contents, tokens, tree)
1511 expected = contents
1512 self.assertEqual(results, expected)
1513 #@+node:ekr.20200112163031.1: *4* TestFstringify.test_munge_spec
1514 def test_munge_spec(self):
1516 # !head:tail or :tail
1517 table = (
1518 ('+1s', '', '+1'),
1519 ('-2s', '', '>2'),
1520 ('3s', '', '3'),
1521 ('4r', 'r', '4'),
1522 )
1523 for spec, e_head, e_tail in table:
1524 head, tail = Fstringify().munge_spec(spec)
1525 assert(head, tail) == (e_head, e_tail), (
1526 f"\n"
1527 f" spec: {spec}\n"
1528 f"expected head: {e_head}\n"
1529 f" got head: {head}\n"
1530 f"expected tail: {e_tail}\n"
1531 f" got tail: {tail}\n")
1532 #@+node:ekr.20200104042705.1: *4* TestFstringify.test_newlines
1533 def test_newlines(self):
1535 contents = r"""\
1536 print("hello\n")
1537 print('world\n')
1538 print("hello\r\n")
1539 print('world\r\n')
1540 """
1541 contents, tokens, tree = self.make_data(contents)
1542 expected = contents
1543 results = self.fstringify(contents, tokens, tree)
1544 self.assertEqual(results, expected)
1545 #@+node:ekr.20191230183652.1: *4* TestFstringify.test_parens_in_rhs
1546 def test_parens_in_rhs(self):
1548 contents = """print('%20s' % (ivar), val)"""
1549 expected = """print(f"{ivar:20}", val)\n"""
1550 contents, tokens, tree = self.make_data(contents)
1551 results = self.fstringify(contents, tokens, tree)
1552 self.assertEqual(results, expected)
1553 #@+node:ekr.20200106091740.1: *4* TestFstringify.test_single_quotes
1554 def test_single_quotes(self):
1556 table = (
1557 # Case 0.
1558 ("""print('%r "default"' % style_name)""",
1559 """print(f'{style_name!r} "default"')\n"""),
1560 # Case 1.
1561 ("""print('%r' % "val")""",
1562 """print(f'{"val"!r}')\n"""),
1563 # Case 2.
1564 ("""print("%r" % "val")""",
1565 """print(f'{"val"!r}')\n"""),
1566 )
1567 for i, data in enumerate(table):
1568 contents, expected = data
1569 description = f"test_single_quotes: {i}"
1570 contents, tokens, tree = self.make_data(contents, description)
1571 results = self.fstringify(contents, tokens, tree, filename=description)
1572 self.assertEqual(results, expected, msg=i)
1573 #@+node:ekr.20200214094938.1: *4* TestFstringify.test_switch_quotes
1574 def test_switch_quotes(self):
1575 table = (
1576 (
1577 """print('%r' % 'style_name')""",
1578 """print(f"{'style_name'!r}")\n""",
1579 ),
1580 )
1581 for i, data in enumerate(table):
1582 contents, expected = data
1583 description = f"test_single_quotes: {i}"
1584 contents, tokens, tree = self.make_data(contents, description)
1585 results = self.fstringify(contents, tokens, tree, filename=description)
1586 self.assertEqual(results, expected, msg=i)
1587 #@+node:ekr.20200206173725.1: *4* TestFstringify.test_switch_quotes_2
1588 def test_switch_quotes_2(self):
1590 contents = """
1591 g.es('%s blah blah' % (
1592 g.angleBrackets('*')))
1593 """
1594 expected = """g.es(f"{g.angleBrackets(\'*\')} blah blah")\n"""
1595 contents, tokens, tree = self.make_data(contents)
1596 results = self.fstringify(contents, tokens, tree)
1597 self.assertEqual(results, expected)
1598 #@+node:ekr.20200206173628.1: *4* TestFstringify.test_switch_quotes_3
1599 def test_switch_quotes_3(self):
1601 contents = """print('Test %s' % 'one')"""
1602 expected = """print(f"Test {'one'}")\n"""
1603 contents, tokens, tree = self.make_data(contents)
1604 results = self.fstringify(contents, tokens, tree)
1605 self.assertEqual(results, expected)
1606 #@+node:ekr.20200219125956.1: *4* TestFstringify.test_switch_quotes_fail
1607 def test_switch_quotes_fail(self):
1609 contents = """print('Test %s %s' % ('one', "two"))"""
1610 contents, tokens, tree = self.make_data(contents)
1611 expected = contents
1612 results = self.fstringify(contents, tokens, tree)
1613 self.assertEqual(results, expected)
1614 #@-others
1615#@+node:ekr.20220402152331.1: *3* class TestIterative(TestTOG)
1616class TestIterative(TestTOG):
1617 """
1618 Tests for the IterativeTokenGenerator class.
1620 This class inherits:
1621 - all the tests from the TestTOG class.
1622 - most of the support code from the BaseTest class.
1623 """
1624 debug_list = [] # 'full-traceback', 'tokens', 'tree'
1626 #@+others
1627 #@+node:edreamleo.20220429071246.1: *4* TestIterative.setUp
1628 def setUp(self):
1630 if py_version < (3, 9):
1631 self.skipTest('Requires Python 3.9 or above')
1632 #@+node:ekr.20220402150424.1: *4* TestIterative.make_data (override)
1633 def make_data(self, contents, description=None): # pragma: no cover
1634 """Return (contents, tokens, tree) for the given contents."""
1635 contents = contents.lstrip('\\\n')
1636 if not contents:
1637 return '', None, None
1638 self.link_error = None
1639 t1 = get_time()
1640 self.update_counts('characters', len(contents))
1641 # Ensure all tests end in exactly one newline.
1642 contents = textwrap.dedent(contents).rstrip() + '\n'
1643 # Create the TOG instance.
1644 # This next line is why we must copy this entire method.
1645 self.tog = IterativeTokenGenerator() # Was TokenOrderGenerator().
1646 self.tog.filename = description or g.callers(2).split(',')[0]
1647 # Pass 0: create the tokens and parse tree
1648 tokens = self.make_tokens(contents)
1649 if not tokens:
1650 self.fail('make_tokens failed')
1651 tree = self.make_tree(contents)
1652 if not tree:
1653 self.fail('make_tree failed')
1654 if 'contents' in self.debug_list:
1655 dump_contents(contents)
1656 if 'ast' in self.debug_list:
1657 if py_version >= (3, 9):
1658 # pylint: disable=unexpected-keyword-arg
1659 g.printObj(ast.dump(tree, indent=2), tag='ast.dump')
1660 else:
1661 g.printObj(ast.dump(tree), tag='ast.dump')
1662 if 'tree' in self.debug_list: # Excellent traces for tracking down mysteries.
1663 dump_ast(tree) # pragma: no cover
1664 if 'tokens' in self.debug_list:
1665 dump_tokens(tokens) # pragma: no cover
1666 self.balance_tokens(tokens)
1667 # Pass 1: create the links.
1668 self.create_links(tokens, tree)
1669 if 'post-tree' in self.debug_list:
1670 dump_tree(tokens, tree) # pragma: no cover
1671 if 'post-tokens' in self.debug_list:
1672 dump_tokens(tokens) # pragma: no cover
1673 t2 = get_time()
1674 self.update_times('90: TOTAL', t2 - t1)
1675 if self.link_error:
1676 self.fail(self.link_error) # pragma: no cover
1677 return contents, tokens, tree
1678 #@+node:ekr.20220403063148.1: *4* Copies of TestOrange tests
1679 # Required for full coverage.
1680 # These might migrate to the TestTOG class.
1681 #@+node:ekr.20220403063936.1: *5* TestIterative.test_relative_imports
1682 def test_relative_imports(self):
1684 # #2533.
1685 contents = """\
1686 from .module1 import w
1687 from . module2 import x
1688 from ..module1 import y
1689 from .. module2 import z
1690 from . import a
1691 from.import b
1692 from leo.core import leoExternalFiles
1693 import leo.core.leoGlobals as g
1694 """
1695 expected = textwrap.dedent("""\
1696 from .module1 import w
1697 from .module2 import x
1698 from ..module1 import y
1699 from ..module2 import z
1700 from . import a
1701 from . import b
1702 from leo.core import leoExternalFiles
1703 import leo.core.leoGlobals as g
1704 """)
1705 contents, tokens, tree = self.make_data(contents)
1706 results = self.beautify(contents, tokens, tree)
1707 self.assertEqual(expected, results)
1708 #@+node:ekr.20220403062001.1: *5* TestIterative.test_one_line_pet_peeves
1709 def test_one_line_pet_peeves(self):
1711 # A copy of TestOrange.test_one_line_pet_peeves.
1712 # Necessary for coverage testings for slices.
1714 tag = 'test_one_line_pet_peeves'
1715 # Except where noted, all entries are expected values....
1716 if 0:
1717 # Test fails or recents...
1718 table = (
1719 # """a[: 1 if True else 2 :]""",
1720 """a[:-1]""",
1721 )
1722 else:
1723 table = (
1724 # Assignments...
1725 # Slices (colons)...
1726 """a[:-1]""",
1727 """a[: 1 if True else 2 :]""",
1728 """a[1 : 1 + 2]""",
1729 """a[lower:]""",
1730 """a[lower::]""",
1731 """a[:upper]""",
1732 """a[:upper:]""",
1733 """a[::step]""",
1734 """a[lower:upper:]""",
1735 """a[lower:upper:step]""",
1736 """a[lower + offset : upper + offset]""",
1737 """a[: upper_fn(x) :]""",
1738 """a[: upper_fn(x) : step_fn(x)]""",
1739 """a[:: step_fn(x)]""",
1740 """a[: upper_fn(x) :]""",
1741 """a[: upper_fn(x) : 2 + 1]""",
1742 """a[:]""",
1743 """a[::]""",
1744 """a[1:]""",
1745 """a[1::]""",
1746 """a[:2]""",
1747 """a[:2:]""",
1748 """a[::3]""",
1749 """a[1:2]""",
1750 """a[1:2:]""",
1751 """a[:2:3]""",
1752 """a[1:2:3]""",
1753 # * and **, inside and outside function calls.
1754 """a = b * c""",
1755 # Now done in test_star_star_operator
1756 # """a = b ** c""", # Black has changed recently.
1757 """f(*args)""",
1758 """f(**kwargs)""",
1759 """f(*args, **kwargs)""",
1760 """f(a, *args)""",
1761 """f(a=2, *args)""",
1762 # Calls...
1763 """f(-1)""",
1764 """f(-1 < 2)""",
1765 """f(1)""",
1766 """f(2 * 3)""",
1767 """f(2 + name)""",
1768 """f(a)""",
1769 """f(a.b)""",
1770 """f(a=2 + 3, b=4 - 5, c= 6 * 7, d=8 / 9, e=10 // 11)""",
1771 """f(a[1 + 2])""",
1772 """f({key: 1})""",
1773 """t = (0,)""",
1774 """x, y = y, x""",
1775 # Dicts...
1776 """d = {key: 1}""",
1777 """d['key'] = a[i]""",
1778 # Trailing comments: expect two spaces.
1779 """whatever # comment""",
1780 """whatever # comment""",
1781 """whatever # comment""",
1782 # Word ops...
1783 """v1 = v2 and v3 if v3 not in v4 or v5 in v6 else v7""",
1784 """print(v7 for v8 in v9)""",
1785 # Unary ops...
1786 """v = -1 if a < b else -2""",
1787 # Returns...
1788 """return -1""",
1789 )
1790 fails = 0
1791 for i, contents in enumerate(table):
1792 description = f"{tag} part {i}"
1793 contents, tokens, tree = self.make_data(contents, description)
1794 expected = self.blacken(contents)
1795 results = self.beautify(contents, tokens, tree, filename=description)
1796 message = (
1797 f"\n"
1798 f" contents: {contents.rstrip()}\n"
1799 f" black: {expected.rstrip()}\n"
1800 f" orange: {results.rstrip()}")
1801 if results != expected: # pragma: no cover
1802 fails += 1
1803 print(f"Fail: {fails}\n{message}")
1804 self.assertEqual(fails, 0)
1805 #@+node:ekr.20220403062532.1: *5* TestIterative.blacken
1806 def blacken(self, contents, line_length=None):
1807 """Return the results of running black on contents"""
1808 # A copy of TestOrange.blacken
1809 if not black:
1810 self.skipTest('Can not import black') # pragma: no cover
1811 # Suppress string normalization!
1812 try:
1813 mode = black.FileMode()
1814 mode.string_normalization = False
1815 if line_length is not None:
1816 mode.line_length = line_length
1817 except TypeError: # pragma: no cover
1818 self.skipTest('old version of black')
1819 return black.format_str(contents, mode=mode)
1820 #@-others
1821#@+node:ekr.20200107174645.1: *3* class TestOrange (BaseTest)
1822class TestOrange(BaseTest):
1823 """
1824 Tests for the Orange class.
1826 **Important**: All unit tests assume that black_mode is False.
1827 That is, unit tests assume that no blank lines
1828 are ever inserted or deleted.
1829 """
1830 #@+others
1831 #@+node:ekr.20200115201823.1: *4* TestOrange.blacken
1832 def blacken(self, contents, line_length=None):
1833 """Return the results of running black on contents"""
1834 if not black:
1835 self.skipTest('Can not import black') # pragma: no cover
1836 # Suppress string normalization!
1837 try:
1838 mode = black.FileMode()
1839 mode.string_normalization = False
1840 if line_length is not None:
1841 mode.line_length = line_length
1842 except TypeError: # pragma: no cover
1843 self.skipTest('old version of black')
1844 return black.format_str(contents, mode=mode)
1845 #@+node:ekr.20200219114415.1: *4* TestOrange.test_at_doc_part
1846 def test_at_doc_part(self):
1848 line_length = 40 # For testing.
1849 contents = """\
1850 #@+at Line 1
1851 # Line 2
1852 #@@c
1854 print('hi')
1855 """
1856 contents, tokens, tree = self.make_data(contents)
1857 expected = contents.rstrip() + '\n'
1858 results = self.beautify(contents, tokens, tree,
1859 max_join_line_length=line_length,
1860 max_split_line_length=line_length,
1861 )
1862 self.assertEqual(results, expected)
1863 #@+node:ekr.20200116102345.1: *4* TestOrange.test_backslash_newline
1864 def test_backslash_newline(self):
1865 """
1866 This test is necessarily different from black, because orange doesn't
1867 delete semicolon tokens.
1868 """
1869 contents = r"""
1870 print(a);\
1871 print(b)
1872 print(c); \
1873 print(d)
1874 """
1875 contents, tokens, tree = self.make_data(contents)
1876 expected = contents.rstrip() + '\n'
1877 # expected = self.blacken(contents).rstrip() + '\n'
1878 results = self.beautify(contents, tokens, tree)
1879 self.assertEqual(results, expected)
1880 #@+node:ekr.20200219145639.1: *4* TestOrange.test_blank_lines_after_function
1881 def test_blank_lines_after_function(self):
1883 contents = """\
1884 # Comment line 1.
1885 # Comment line 2.
1887 def spam():
1888 pass
1889 # Properly indented comment.
1891 # Comment line3.
1892 # Comment line4.
1893 a = 2
1894 """
1895 contents, tokens, tree = self.make_data(contents)
1896 expected = contents
1897 results = self.beautify(contents, tokens, tree)
1898 self.assertEqual(results, expected)
1899 #@+node:ekr.20200220050758.1: *4* TestOrange.test_blank_lines_after_function_2
1900 def test_blank_lines_after_function_2(self):
1902 contents = """\
1903 # Leading comment line 1.
1904 # Leading comment lines 2.
1906 def spam():
1907 pass
1909 # Trailing comment line.
1910 a = 2
1911 """
1912 contents, tokens, tree = self.make_data(contents)
1913 expected = contents
1914 results = self.beautify(contents, tokens, tree)
1915 self.assertEqual(results, expected)
1916 #@+node:ekr.20200220053212.1: *4* TestOrange.test_blank_lines_after_function_3
1917 def test_blank_lines_after_function_3(self):
1919 # From leoAtFile.py.
1920 contents = r"""\
1921 def writeAsisNode(self, p):
1922 print('1')
1924 def put(s):
1925 print('2')
1927 # Trailing comment 1.
1928 # Trailing comment 2.
1929 print('3')
1930 """
1931 contents, tokens, tree = self.make_data(contents)
1932 expected = contents
1933 results = self.beautify(contents, tokens, tree)
1934 self.assertEqual(results, expected)
1935 #@+node:ekr.20200228074455.1: *4* TestOrange.test_bug_1429
1936 def test_bug_1429(self):
1938 contents = r'''\
1939 def get_semver(tag):
1940 """bug 1429 docstring"""
1941 try:
1942 import semantic_version
1943 version = str(semantic_version.Version.coerce(tag, partial=True))
1944 # tuple of major, minor, build, pre-release, patch
1945 # 5.6b2 --> 5.6-b2
1946 except(ImportError, ValueError) as err:
1947 print('\n', err)
1948 print("""*** Failed to parse Semantic Version from git tag '{0}'.
1949 Expecting tag name like '5.7b2', 'leo-4.9.12', 'v4.3' for releases.
1950 This version can't be uploaded to PyPi.org.""".format(tag))
1951 version = tag
1952 return version
1953 '''
1954 contents, tokens, tree = self.make_data(contents)
1955 expected = contents.rstrip() + '\n'
1956 results = self.beautify(contents, tokens, tree,
1957 max_join_line_length=0, max_split_line_length=0)
1958 self.assertEqual(results, expected)
1959 #@+node:ekr.20210318055702.1: *4* TestOrange.test_bug_1851
1960 def test_bug_1851(self):
1962 contents = r'''\
1963 def foo(a1):
1964 pass
1965 '''
1966 contents, tokens, tree = self.make_data(contents)
1967 expected = contents.rstrip() + '\n'
1968 results = self.beautify(contents, tokens, tree,
1969 max_join_line_length=0, max_split_line_length=0)
1970 self.assertEqual(results, expected)
1971 #@+node:ekr.20200210120455.1: *4* TestOrange.test_decorator
1972 def test_decorator(self):
1974 table = (
1975 # Case 0.
1976 """\
1977 @my_decorator(1)
1978 def func():
1979 pass
1980 """,
1981 # Case 1.
1982 """\
1983 if 1:
1984 @my_decorator
1985 def func():
1986 pass
1987 """,
1988 # Case 2.
1989 '''\
1990 @g.commander_command('promote')
1991 def promote(self, event=None, undoFlag=True):
1992 """Make all children of the selected nodes siblings of the selected node."""
1993 ''',
1994 )
1995 for i, contents in enumerate(table):
1996 contents, tokens, tree = self.make_data(contents)
1997 expected = contents
1998 results = self.beautify(contents, tokens, tree)
1999 if results != expected:
2000 g.trace('Fail:', i) # pragma: no cover
2001 self.assertEqual(results, expected)
2002 #@+node:ekr.20200211094614.1: *4* TestOrange.test_dont_delete_blank_lines
2003 def test_dont_delete_blank_lines(self):
2005 line_length = 40 # For testing.
2006 contents = """\
2007 class Test:
2009 def test_func():
2011 pass
2013 a = 2
2014 """
2015 contents, tokens, tree = self.make_data(contents)
2016 expected = contents.rstrip() + '\n'
2017 results = self.beautify(contents, tokens, tree,
2018 max_join_line_length=line_length,
2019 max_split_line_length=line_length,
2020 )
2021 self.assertEqual(results, expected)
2022 #@+node:ekr.20200116110652.1: *4* TestOrange.test_function_defs
2023 def test_function_defs(self):
2025 table = (
2026 # Case 0.
2027 """\
2028 def f1(a=2 + 5):
2029 pass
2030 """,
2031 # Case 2
2032 """\
2033 def f1():
2034 pass
2035 """,
2036 # Case 3.
2037 """\
2038 def f1():
2039 pass
2040 """,
2041 # Case 4.
2042 '''\
2043 def should_kill_beautify(p):
2044 """Return True if p.b contains @killbeautify"""
2045 return 'killbeautify' in g.get_directives_dict(p)
2046 ''',
2047 )
2048 for i, contents in enumerate(table):
2049 contents, tokens, tree = self.make_data(contents)
2050 expected = self.blacken(contents).rstrip() + '\n'
2051 results = self.beautify(contents, tokens, tree)
2052 self.assertEqual(results, expected)
2053 #@+node:ekr.20200209152745.1: *4* TestOrange.test_indented_comment
2054 def test_indented_comment(self):
2056 line_length = 40 # For testing.
2057 table = (
2058 """\
2059 if 1:
2060 pass
2061 # An indented comment.
2062 """,
2063 """\
2064 table = (
2065 # Indented comment.
2066 )
2067 """
2068 )
2070 fails = 0
2071 for contents in table:
2072 contents, tokens, tree = self.make_data(contents)
2073 expected = contents
2074 if 0:
2075 dump_contents(contents)
2076 dump_tokens(tokens)
2077 # dump_tree(tokens, tree)
2078 results = self.beautify(contents, tokens, tree,
2079 max_join_line_length=line_length,
2080 max_split_line_length=line_length,
2081 )
2082 message = (
2083 f"\n"
2084 f" contents: {contents!r}\n"
2085 f" expected: {expected!r}\n"
2086 f" got: {results!r}")
2087 if results != expected: # pragma: no cover
2088 fails += 1
2089 print(f"Fail: {fails}\n{message}")
2090 assert not fails, fails
2091 #@+node:ekr.20200116104031.1: *4* TestOrange.test_join_and_strip_condition
2092 def test_join_and_strip_condition(self):
2094 contents = """\
2095 if (
2096 a == b or
2097 c == d
2098 ):
2099 pass
2100 """
2101 expected = """\
2102 if (a == b or c == d):
2103 pass
2104 """
2105 contents, tokens, tree = self.make_data(contents)
2106 expected = textwrap.dedent(expected)
2107 # Black also removes parens, which is beyond our scope at present.
2108 # expected = self.blacken(contents, line_length=40)
2109 results = self.beautify(contents, tokens, tree)
2110 self.assertEqual(results, expected)
2111 #@+node:ekr.20200208041446.1: *4* TestOrange.test_join_leading_whitespace
2112 def test_join_leading_whitespace(self):
2114 line_length = 40 # For testing.
2115 table = (
2116 #1234567890x1234567890x1234567890x1234567890x
2117 """\
2118 if 1:
2119 print('4444',
2120 '5555')
2121 """,
2122 """\
2123 if 1:
2124 print('4444', '5555')\n""",
2125 )
2126 fails = 0
2127 for contents in table:
2128 contents, tokens, tree = self.make_data(contents)
2129 if 0:
2130 dump_contents(contents)
2131 dump_tokens(tokens)
2132 # dump_tree(tokens, tree)
2133 expected = contents
2134 # expected = self.blacken(contents, line_length=line_length)
2135 results = self.beautify(contents, tokens, tree,
2136 max_join_line_length=line_length,
2137 max_split_line_length=line_length,
2138 )
2139 message = (
2140 f"\n"
2141 f" contents: {contents!r}\n"
2142 f" expected: {expected!r}\n"
2143 f" got: {results!r}")
2144 if results != expected: # pragma: no cover
2145 fails += 1
2146 print(f"Fail: {fails}\n{message}")
2147 assert not fails, fails
2148 #@+node:ekr.20200121093134.1: *4* TestOrange.test_join_lines
2149 def test_join_lines(self):
2151 # Except where noted, all entries are expected values....
2152 line_length = 40 # For testing.
2153 table = (
2154 #1234567890x1234567890x1234567890x1234567890x
2155 """print('4444',\n '5555')""",
2156 """print('4444', '5555')\n""",
2157 )
2158 fails = 0
2159 for contents in table:
2160 contents, tokens, tree = self.make_data(contents)
2161 if 0:
2162 dump_contents(contents)
2163 dump_tokens(tokens)
2164 # dump_tree(tokens, tree)
2165 expected = contents
2166 results = self.beautify(contents, tokens, tree,
2167 max_join_line_length=line_length,
2168 max_split_line_length=line_length,
2169 )
2170 message = (
2171 f"\n"
2172 f" contents: {contents!r}\n"
2173 f" expected: {expected!r}\n"
2174 f" orange: {results!r}")
2175 if results != expected: # pragma: no cover
2176 fails += 1
2177 print(f"Fail: {fails}\n{message}")
2178 self.assertEqual(fails, 0)
2179 #@+node:ekr.20200210051900.1: *4* TestOrange.test_join_suppression
2180 def test_join_suppression(self):
2182 contents = """\
2183 class T:
2184 a = 1
2185 print(
2186 a
2187 )
2188 """
2189 expected = """\
2190 class T:
2191 a = 1
2192 print(a)
2193 """
2194 contents, tokens, tree = self.make_data(contents)
2195 expected = textwrap.dedent(expected)
2196 results = self.beautify(contents, tokens, tree)
2197 self.assertEqual(results, expected)
2198 #@+node:ekr.20200207093606.1: *4* TestOrange.test_join_too_long_lines
2199 def test_join_too_long_lines(self):
2201 # Except where noted, all entries are expected values....
2202 line_length = 40 # For testing.
2203 table = (
2204 #1234567890x1234567890x1234567890x1234567890x
2205 (
2206 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')""",
2207 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')\n""",
2208 ),
2209 )
2210 fails = 0
2211 for contents, expected in table:
2212 contents, tokens, tree = self.make_data(contents)
2213 if 0:
2214 dump_contents(contents)
2215 dump_tokens(tokens)
2216 # dump_tree(tokens, tree)
2217 results = self.beautify(contents, tokens, tree,
2218 max_join_line_length=line_length,
2219 max_split_line_length=line_length,
2220 )
2221 message = (
2222 f"\n"
2223 f" contents: {contents!r}\n"
2224 f" expected: {expected!r}\n"
2225 f" got: {results!r}")
2226 if results != expected: # pragma: no cover
2227 fails += 1
2228 print(f"Fail: {fails}\n{message}")
2229 assert not fails, fails
2230 #@+node:ekr.20220327131225.1: *4* TestOrange.test_leading_stars
2231 def test_leading_stars(self):
2233 # #2533.
2234 contents = """\
2235 def f(
2236 arg1,
2237 *args,
2238 **kwargs
2239 ):
2240 pass
2241 """
2242 expected = textwrap.dedent("""\
2243 def f(arg1, *args, **kwargs):
2244 pass
2245 """)
2246 contents, tokens, tree = self.make_data(contents)
2247 results = self.beautify(contents, tokens, tree)
2248 self.assertEqual(expected, results)
2249 #@+node:ekr.20200108075541.1: *4* TestOrange.test_leo_sentinels
2250 def test_leo_sentinels_1(self):
2252 # Careful: don't put a sentinel into the file directly.
2253 # That would corrupt leoAst.py.
2254 sentinel = '#@+node:ekr.20200105143308.54: ** test'
2255 contents = f"""\
2256 {sentinel}
2257 def spam():
2258 pass
2259 """
2260 contents, tokens, tree = self.make_data(contents)
2261 expected = contents.rstrip() + '\n'
2262 results = self.beautify(contents, tokens, tree)
2263 self.assertEqual(results, expected)
2264 #@+node:ekr.20200209155457.1: *4* TestOrange.test_leo_sentinels_2
2265 def test_leo_sentinels_2(self):
2267 # Careful: don't put a sentinel into the file directly.
2268 # That would corrupt leoAst.py.
2269 sentinel = '#@+node:ekr.20200105143308.54: ** test'
2270 contents = f"""\
2271 {sentinel}
2272 class TestClass:
2273 pass
2274 """
2275 contents, tokens, tree = self.make_data(contents)
2276 expected = contents.rstrip() + '\n'
2277 results = self.beautify(contents, tokens, tree)
2278 self.assertEqual(results, expected)
2279 #@+node:ekr.20200108082833.1: *4* TestOrange.test_lines_before_class
2280 def test_lines_before_class(self):
2282 contents = """\
2283 a = 2
2284 class aClass:
2285 pass
2286 """
2287 contents, tokens, tree = self.make_data(contents)
2288 expected = contents
2289 results = self.beautify(contents, tokens, tree)
2290 self.assertEqual(results, expected)
2291 #@+node:ekr.20200110014220.86: *4* TestOrange.test_multi_line_pet_peeves
2292 def test_multi_line_pet_peeves(self):
2294 contents = """\
2295 if x == 4: pass
2296 if x == 4 : pass
2297 print (x, y); x, y = y, x
2298 print (x , y) ; x , y = y , x
2299 if(1):
2300 pass
2301 elif(2):
2302 pass
2303 while(3):
2304 pass
2305 """
2306 # At present Orange doesn't split lines...
2307 expected = """\
2308 if x == 4: pass
2309 if x == 4: pass
2310 print(x, y); x, y = y, x
2311 print(x, y); x, y = y, x
2312 if (1):
2313 pass
2314 elif (2):
2315 pass
2316 while (3):
2317 pass
2318 """
2319 contents, tokens, tree = self.make_data(contents)
2320 expected = self.adjust_expected(expected)
2321 results = self.beautify(contents, tokens, tree)
2322 self.assertEqual(results, expected)
2323 #@+node:ekr.20200110014220.95: *4* TestOrange.test_one_line_pet_peeves
2324 def test_one_line_pet_peeves(self):
2326 tag = 'test_one_line_pet_peeves'
2327 # Except where noted, all entries are expected values....
2328 if 0:
2329 # Test fails or recents...
2330 table = (
2331 # """a[: 1 if True else 2 :]""",
2332 """a[:-1]""",
2333 )
2334 else:
2335 table = (
2336 # Assignments...
2337 # Slices (colons)...
2338 """a[:-1]""",
2339 """a[: 1 if True else 2 :]""",
2340 """a[1 : 1 + 2]""",
2341 """a[lower:]""",
2342 """a[lower::]""",
2343 """a[:upper]""",
2344 """a[:upper:]""",
2345 """a[::step]""",
2346 """a[lower:upper:]""",
2347 """a[lower:upper:step]""",
2348 """a[lower + offset : upper + offset]""",
2349 """a[: upper_fn(x) :]""",
2350 """a[: upper_fn(x) : step_fn(x)]""",
2351 """a[:: step_fn(x)]""",
2352 """a[: upper_fn(x) :]""",
2353 """a[: upper_fn(x) : 2 + 1]""",
2354 """a[:]""",
2355 """a[::]""",
2356 """a[1:]""",
2357 """a[1::]""",
2358 """a[:2]""",
2359 """a[:2:]""",
2360 """a[::3]""",
2361 """a[1:2]""",
2362 """a[1:2:]""",
2363 """a[:2:3]""",
2364 """a[1:2:3]""",
2365 # * and **, inside and outside function calls.
2366 """a = b * c""",
2367 # Now done in test_star_star_operator
2368 # """a = b ** c""", # Black has changed recently.
2369 """f(*args)""",
2370 """f(**kwargs)""",
2371 """f(*args, **kwargs)""",
2372 """f(a, *args)""",
2373 """f(a=2, *args)""",
2374 # Calls...
2375 """f(-1)""",
2376 """f(-1 < 2)""",
2377 """f(1)""",
2378 """f(2 * 3)""",
2379 """f(2 + name)""",
2380 """f(a)""",
2381 """f(a.b)""",
2382 """f(a=2 + 3, b=4 - 5, c= 6 * 7, d=8 / 9, e=10 // 11)""",
2383 """f(a[1 + 2])""",
2384 """f({key: 1})""",
2385 """t = (0,)""",
2386 """x, y = y, x""",
2387 # Dicts...
2388 """d = {key: 1}""",
2389 """d['key'] = a[i]""",
2390 # Trailing comments: expect two spaces.
2391 """whatever # comment""",
2392 """whatever # comment""",
2393 """whatever # comment""",
2394 # Word ops...
2395 """v1 = v2 and v3 if v3 not in v4 or v5 in v6 else v7""",
2396 """print(v7 for v8 in v9)""",
2397 # Unary ops...
2398 """v = -1 if a < b else -2""",
2399 # Returns...
2400 """return -1""",
2401 )
2402 fails = 0
2403 for i, contents in enumerate(table):
2404 description = f"{tag} part {i}"
2405 contents, tokens, tree = self.make_data(contents, description)
2406 expected = self.blacken(contents)
2407 results = self.beautify(contents, tokens, tree, filename=description)
2408 message = (
2409 f"\n"
2410 f" contents: {contents.rstrip()}\n"
2411 f" black: {expected.rstrip()}\n"
2412 f" orange: {results.rstrip()}")
2413 if results != expected: # pragma: no cover
2414 fails += 1
2415 print(f"Fail: {fails}\n{message}")
2416 self.assertEqual(fails, 0)
2417 #@+node:ekr.20220327135448.1: *4* TestOrange.test_relative_imports
2418 def test_relative_imports(self):
2420 # #2533.
2421 contents = """\
2422 from .module1 import w
2423 from . module2 import x
2424 from ..module1 import y
2425 from .. module2 import z
2426 from . import a
2427 from.import b
2428 from leo.core import leoExternalFiles
2429 import leo.core.leoGlobals as g
2430 """
2431 expected = textwrap.dedent("""\
2432 from .module1 import w
2433 from .module2 import x
2434 from ..module1 import y
2435 from ..module2 import z
2436 from . import a
2437 from . import b
2438 from leo.core import leoExternalFiles
2439 import leo.core.leoGlobals as g
2440 """)
2441 contents, tokens, tree = self.make_data(contents)
2442 results = self.beautify(contents, tokens, tree)
2443 self.assertEqual(expected, results)
2444 #@+node:ekr.20200210050646.1: *4* TestOrange.test_return
2445 def test_return(self):
2447 contents = """return []"""
2448 expected = self.blacken(contents)
2449 contents, tokens, tree = self.make_data(contents)
2450 results = self.beautify(contents, tokens, tree)
2451 self.assertEqual(results, expected)
2452 #@+node:ekr.20200107174742.1: *4* TestOrange.test_single_quoted_string
2453 def test_single_quoted_string(self):
2455 contents = """print('hi')"""
2456 # blacken suppresses string normalization.
2457 expected = self.blacken(contents)
2458 contents, tokens, tree = self.make_data(contents)
2459 results = self.beautify(contents, tokens, tree)
2460 self.assertEqual(results, expected)
2461 #@+node:ekr.20200117180956.1: *4* TestOrange.test_split_lines
2462 def test_split_lines(self):
2464 line_length = 40 # For testing.
2465 table = (
2466 #1234567890x1234567890x1234567890x1234567890x
2467 """\
2468 if 1:
2469 print('1111111111', '2222222222', '3333333333')
2470 """,
2471 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc')""",
2472 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc', 'ddddddddddddddddd')""",
2473 )
2474 fails = 0
2475 for contents in table:
2476 contents, tokens, tree = self.make_data(contents)
2477 if 0:
2478 dump_tokens(tokens)
2479 # dump_tree(tokens, tree)
2480 expected = self.blacken(contents, line_length=line_length)
2481 results = self.beautify(contents, tokens, tree,
2482 max_join_line_length=line_length,
2483 max_split_line_length=line_length,
2484 )
2485 message = (
2486 f"\n"
2487 f" contents: {contents!s}\n"
2488 f" black: {expected!s}\n"
2489 f" orange: {results!s}")
2490 if results != expected: # pragma: no cover
2491 fails += 1
2492 print(f"Fail: {fails}\n{message}")
2493 self.assertEqual(fails, 0)
2494 #@+node:ekr.20200210073227.1: *4* TestOrange.test_split_lines_2
2495 def test_split_lines_2(self):
2497 line_length = 40 # For testing.
2498 # Different from how black handles things.
2499 contents = """\
2500 if not any([z.kind == 'lt' for z in line_tokens]):
2501 return False
2502 """
2503 expected = """\
2504 if not any(
2505 [z.kind == 'lt' for z in line_tokens]):
2506 return False
2507 """
2508 fails = 0
2509 contents, tokens, tree = self.make_data(contents)
2510 # expected = self.blacken(contents, line_length=line_length)
2511 expected = textwrap.dedent(expected)
2512 results = self.beautify(contents, tokens, tree,
2513 max_join_line_length=line_length,
2514 max_split_line_length=line_length,
2515 )
2516 message = (
2517 f"\n"
2518 f" contents: {contents!r}\n"
2519 f" expected: {expected!r}\n"
2520 f" got: {results!r}")
2521 if results != expected: # pragma: no cover
2522 fails += 1
2523 print(f"Fail: {fails}\n{message}")
2524 self.assertEqual(fails, 0)
2525 #@+node:ekr.20200219144837.1: *4* TestOrange.test_split_lines_3
2526 def test_split_lines_3(self):
2528 line_length = 40 # For testing.
2529 # Different from how black handles things.
2530 contents = """print('eee', ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 'jjjjjjj', 'kkkkkk')"""
2531 # This is a bit different from black, but it's good enough for now.
2532 expected = """\
2533 print(
2534 'eee',
2535 ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'),
2536 'jjjjjjj',
2537 'kkkkkk',
2538 )
2539 """
2540 fails = 0
2541 contents, tokens, tree = self.make_data(contents)
2542 # expected = self.blacken(contents, line_length=line_length)
2543 expected = textwrap.dedent(expected)
2544 results = self.beautify(contents, tokens, tree,
2545 max_join_line_length=line_length,
2546 max_split_line_length=line_length,
2547 )
2548 message = (
2549 f"\n"
2550 f" contents: {contents!r}\n"
2551 f" expected: {expected!r}\n"
2552 f" got: {results!r}")
2553 if results != expected: # pragma: no cover
2554 fails += 1
2555 print(f"Fail: {fails}\n{message}")
2556 self.assertEqual(fails, 0)
2557 #@+node:ekr.20220401191253.1: *4* TestOrange.test_star_star_operator
2558 def test_star_star_operator(self):
2559 # Was tested in pet peeves, but this is more permissive.
2560 contents = """a = b ** c"""
2561 contents, tokens, tree = self.make_data(contents)
2562 # Don't rely on black for this test.
2563 # expected = self.blacken(contents)
2564 expected = contents
2565 results = self.beautify(contents, tokens, tree)
2566 self.assertEqual(results, expected)
2567 #@+node:ekr.20200119155207.1: *4* TestOrange.test_sync_tokens
2568 def test_sync_tokens(self):
2570 contents = """if x == 4: pass"""
2571 # At present Orange doesn't split lines...
2572 expected = """if x == 4: pass"""
2573 contents, tokens, tree = self.make_data(contents)
2574 expected = self.adjust_expected(expected)
2575 results = self.beautify(contents, tokens, tree)
2576 self.assertEqual(results, expected)
2577 #@+node:ekr.20200209161226.1: *4* TestOrange.test_ternary
2578 def test_ternary(self):
2580 contents = """print(2 if name == 'class' else 1)"""
2581 contents, tokens, tree = self.make_data(contents)
2582 expected = contents
2583 results = self.beautify(contents, tokens, tree)
2584 self.assertEqual(results, expected)
2585 #@+node:ekr.20200211093359.1: *4* TestOrange.test_verbatim
2586 def test_verbatim(self):
2588 line_length = 40 # For testing.
2589 contents = """\
2590 #@@nobeautify
2592 def addOptionsToParser(self, parser, trace_m):
2594 add = parser.add_option
2596 def add_bool(option, help, dest=None):
2597 add(option, action='store_true', dest=dest, help=help)
2599 add_bool('--diff', 'use Leo as an external git diff')
2600 # add_bool('--dock', 'use a Qt dock')
2601 add_bool('--fullscreen', 'start fullscreen')
2602 add_bool('--init-docks', 'put docks in default positions')
2603 # Multiple bool values.
2604 add('-v', '--version', action='store_true',
2605 help='print version number and exit')
2607 # From leoAtFile.py
2608 noDirective = 1 # not an at-directive.
2609 allDirective = 2 # at-all (4.2)
2610 docDirective = 3 # @doc.
2612 #@@beautify
2613 """
2614 contents, tokens, tree = self.make_data(contents)
2615 expected = contents
2616 results = self.beautify(contents, tokens, tree,
2617 max_join_line_length=line_length,
2618 max_split_line_length=line_length,
2619 )
2620 self.assertEqual(results, expected, msg=contents)
2621 #@+node:ekr.20200211094209.1: *4* TestOrange.test_verbatim_with_pragma
2622 def test_verbatim_with_pragma(self):
2624 line_length = 40 # For testing.
2625 contents = """\
2626 #pragma: no beautify
2628 def addOptionsToParser(self, parser, trace_m):
2630 add = parser.add_option
2632 def add_bool(option, help, dest=None):
2633 add(option, action='store_true', dest=dest, help=help)
2635 add_bool('--diff', 'use Leo as an external git diff')
2636 # add_bool('--dock', 'use a Qt dock')
2637 add_bool('--fullscreen', 'start fullscreen')
2638 add_other('--window-size', 'initial window size (height x width)', m='SIZE')
2639 add_other('--window-spot', 'initial window position (top x left)', m='SPOT')
2640 # Multiple bool values.
2641 add('-v', '--version', action='store_true',
2642 help='print version number and exit')
2644 # pragma: beautify
2645 """
2646 contents, tokens, tree = self.make_data(contents)
2647 expected = contents
2648 results = self.beautify(contents, tokens, tree,
2649 max_join_line_length=line_length,
2650 max_split_line_length=line_length,
2651 )
2652 self.assertEqual(results, expected, msg=contents)
2653 #@+node:ekr.20200729083027.1: *4* TestOrange.verbatim2
2654 def test_verbatim2(self):
2656 contents = """\
2657 #@@beautify
2658 #@@nobeautify
2659 #@+at Starts doc part
2660 # More doc part.
2661 # The @c ends the doc part.
2662 #@@c
2663 """
2664 contents, tokens, tree = self.make_data(contents)
2665 expected = contents
2666 results = self.beautify(contents, tokens, tree)
2667 self.assertEqual(results, expected, msg=contents)
2668 #@-others
2669#@+node:ekr.20191231130208.1: *3* class TestReassignTokens (BaseTest)
2670class TestReassignTokens(BaseTest):
2671 """Test cases for the ReassignTokens class."""
2672 #@+others
2673 #@+node:ekr.20191231130320.1: *4* test_reassign_tokens (to do)
2674 def test_reassign_tokens(self):
2675 pass
2676 #@+node:ekr.20191231130334.1: *4* test_nearest_common_ancestor
2677 def test_nearest_common_ancestor(self):
2679 contents = """name='uninverted %s' % d.name()"""
2680 self.make_data(contents)
2681 #@-others
2682#@+node:ekr.20200110093802.1: *3* class TestTokens (BaseTest)
2683class TestTokens(BaseTest):
2684 """Unit tests for tokenizing."""
2685 #@+others
2686 #@+node:ekr.20200122165910.1: *4* TT.show_asttokens_script
2687 def show_asttokens_script(self): # pragma: no cover
2688 """
2689 A script showing how asttokens can *easily* do the following:
2690 - Inject parent/child links into ast nodes.
2691 - Inject many-to-many links between tokens and ast nodes.
2692 """
2693 # pylint: disable=import-error,reimported
2694 import ast
2695 import asttokens
2696 import token as token_module
2697 stack: List[ast.AST] = []
2698 # Define TestToken class and helper functions.
2699 #@+others
2700 #@+node:ekr.20200122170101.3: *5* class TestToken
2701 class TestToken:
2702 """A patchable representation of the 5-tuples created by tokenize and used by asttokens."""
2704 def __init__(self, kind, value):
2705 self.kind = kind
2706 self.value = value
2707 self.node_list: List[Any] = []
2709 def __str__(self):
2710 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list])
2711 return f"{self.kind:12} {self.value:20} {tokens_s!s}"
2713 __repr__ = __str__
2714 #@+node:ekr.20200122170101.1: *5* function: atok_name
2715 def atok_name(token):
2716 """Return a good looking name for the given 5-tuple"""
2717 return token_module.tok_name[token[0]].lower() # type:ignore
2718 #@+node:ekr.20200122170101.2: *5* function: atok_value
2719 def atok_value(token):
2720 """Print a good looking value for the given 5-tuple"""
2721 return token.string if atok_name(token) == 'string' else repr(token.string)
2722 #@+node:ekr.20200122170057.1: *5* function: dump_token
2723 def dump_token(token):
2724 node_list = list(set(getattr(token, 'node_set', [])))
2725 node_list = sorted([z.__class__.__name__ for z in node_list])
2726 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}"
2727 #@+node:ekr.20200122170337.1: *5* function: postvisit
2728 def postvisit(node, par_value, value):
2729 nonlocal stack
2730 stack.pop()
2731 return par_value or []
2732 #@+node:ekr.20200122170101.4: *5* function: previsit
2733 def previsit(node, par_value):
2734 nonlocal stack
2735 if isinstance(node, ast.Module):
2736 stack = []
2737 if stack:
2738 parent = stack[-1]
2739 children: List[ast.AST] = getattr(parent, 'children', [])
2740 parent.children = children + [node] # type:ignore
2741 node.parent = parent
2742 else:
2743 node.parent = None
2744 node.children = []
2745 stack.append(node)
2746 return par_value, []
2747 #@-others
2748 table = (
2749 # """print('%s in %5.2f sec' % ("done", 2.9))\n""",
2750 """print(a[1:2:3])\n""",
2751 )
2752 for source in table:
2753 print(f"Source...\n\n{source}")
2754 atok = asttokens.ASTTokens(source, parse=True)
2755 # Create a patchable list of Token objects.
2756 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens]
2757 # Inject parent/child links into nodes.
2758 asttokens.util.visit_tree(atok.tree, previsit, postvisit)
2759 # Create token.token_list for each token.
2760 for node in asttokens.util.walk(atok.tree):
2761 # Inject node into token.node_list
2762 for ast_token in atok.get_tokens(node, include_extra=True):
2763 i = ast_token.index
2764 token = tokens[i]
2765 token.node_list.append(node)
2766 # Print the resulting parent/child links.
2767 for node in ast.walk(atok.tree):
2768 if hasattr(node, 'first_token'):
2769 parent = getattr(node, 'parent', None)
2770 parent_s = parent.__class__.__name__ if parent else 'None'
2771 children: List[ast.AST] = getattr(node, 'children', [])
2772 if children:
2773 children_s = ', '.join(z.__class__.__name__ for z in children)
2774 else:
2775 children_s = 'None'
2776 print(
2777 f"\n"
2778 f" node: {node.__class__.__name__}\n"
2779 f" parent: {parent_s}\n"
2780 f"children: {children_s}")
2781 # Print the resulting tokens.
2782 g.printObj(tokens, tag='Tokens')
2783 #@+node:ekr.20200121025938.1: *4* TT.show_example_dump
2784 def show_example_dump(self): # pragma: no cover
2786 # Will only be run when enabled explicitly.
2788 contents = """\
2789 print('line 1')
2790 print('line 2')
2791 print('line 3')
2792 """
2793 contents, tokens, tree = self.make_data(contents)
2794 dump_contents(contents)
2795 dump_tokens(tokens)
2796 dump_tree(tokens, tree)
2797 #@+node:ekr.20200110015014.6: *4* TT.test_bs_nl_tokens
2798 def test_bs_nl_tokens(self):
2799 # Test https://bugs.python.org/issue38663.
2801 contents = """\
2802 print \
2803 ('abc')
2804 """
2805 self.check_roundtrip(contents)
2806 #@+node:ekr.20200110015014.8: *4* TT.test_continuation_1
2807 def test_continuation_1(self):
2809 contents = """\
2810 a = (3,4,
2811 5,6)
2812 y = [3, 4,
2813 5]
2814 z = {'a': 5,
2815 'b':15, 'c':True}
2816 x = len(y) + 5 - a[
2817 3] - a[2] + len(z) - z[
2818 'b']
2819 """
2820 self.check_roundtrip(contents)
2821 #@+node:ekr.20200111085210.1: *4* TT.test_continuation_2
2822 def test_continuation_2(self):
2823 # Backslash means line continuation, except for comments
2824 contents = (
2825 'x=1+\\\n 2'
2826 '# This is a comment\\\n # This also'
2827 )
2828 self.check_roundtrip(contents)
2829 #@+node:ekr.20200111085211.1: *4* TT.test_continuation_3
2830 def test_continuation_3(self):
2832 contents = """\
2833 # Comment \\\n
2834 x = 0
2835 """
2836 self.check_roundtrip(contents)
2837 #@+node:ekr.20200110015014.10: *4* TT.test_string_concatenation_1
2838 def test_string_concatentation_1(self):
2839 # Two *plain* string literals on the same line
2840 self.check_roundtrip("""'abc' 'xyz'""")
2841 #@+node:ekr.20200111081801.1: *4* TT.test_string_concatenation_2
2842 def test_string_concatentation_2(self):
2843 # f-string followed by plain string on the same line
2844 self.check_roundtrip("""f'abc' 'xyz'""")
2845 #@+node:ekr.20200111081832.1: *4* TT.test_string_concatenation_3
2846 def test_string_concatentation_3(self):
2847 # plain string followed by f-string on the same line
2848 self.check_roundtrip("""'abc' f'xyz'""")
2849 #@+node:ekr.20160521103254.1: *4* TT.test_visitors_exist
2850 def test_visitors_exist(self):
2851 """Ensure that visitors for all ast nodes exist."""
2852 import _ast
2853 # Compute all fields to BaseTest.
2854 aList = sorted(dir(_ast))
2855 remove = [
2856 'Interactive', 'Suite', # Not necessary.
2857 'AST', # The base class,
2858 # Constants...
2859 'PyCF_ALLOW_TOP_LEVEL_AWAIT',
2860 'PyCF_ONLY_AST',
2861 'PyCF_TYPE_COMMENTS',
2862 # New ast nodes for Python 3.8.
2863 # We can ignore these nodes because:
2864 # 1. ast.parse does not generate them by default.
2865 # 2. The type comments are ordinary comments.
2866 # They do not need to be specially synced.
2867 # 3. Tools such as black, orange, and fstringify will
2868 # only ever handle comments as comments.
2869 'FunctionType', 'NamedExpr', 'TypeIgnore',
2870 ]
2871 aList = [z for z in aList if not z[0].islower()]
2872 # Remove base classes.
2873 aList = [z for z in aList
2874 if not z.startswith('_') and not z in remove]
2875 # Now test them.
2876 table = (
2877 TokenOrderGenerator,
2878 )
2879 for class_ in table:
2880 traverser = class_()
2881 errors, nodes, ops = 0, 0, 0
2882 for z in aList:
2883 if hasattr(traverser, 'do_' + z):
2884 nodes += 1
2885 elif _op_names.get(z):
2886 ops += 1
2887 else: # pragma: no cover
2888 errors += 1
2889 print(
2890 f"Missing visitor: "
2891 f"{traverser.__class__.__name__}.{z}")
2892 msg = f"{nodes} node types, {ops} op types, {errors} errors"
2893 assert not errors, msg
2894 #@-others
2895#@+node:ekr.20200107144010.1: *3* class TestTopLevelFunctions (BaseTest)
2896class TestTopLevelFunctions(BaseTest):
2897 """Tests for the top-level functions in leoAst.py."""
2898 #@+others
2899 #@+node:ekr.20200107144227.1: *4* test_get_encoding_directive
2900 def test_get_encoding_directive(self):
2902 filename = __file__
2903 assert os.path.exists(filename), repr(filename)
2904 with open(filename, 'rb') as f:
2905 bb = f.read()
2906 e = get_encoding_directive(bb)
2907 self.assertEqual(e.lower(), 'utf-8')
2908 #@+node:ekr.20200107150857.1: *4* test_strip_BOM
2909 def test_strip_BOM(self):
2911 filename = __file__
2912 assert os.path.exists(filename), repr(filename)
2913 with open(filename, 'rb') as f:
2914 bb = f.read()
2915 assert bb, filename
2916 e, s = strip_BOM(bb)
2917 assert e is None or e.lower() == 'utf-8', repr(e)
2918 #@-others
2919#@+node:ekr.20191227152538.1: *3* class TestTOT (BaseTest)
2920class TestTOT(BaseTest):
2921 """Tests for the TokenOrderTraverser class."""
2922 #@+others
2923 #@+node:ekr.20200111115318.1: *4* test_tot.test_traverse
2924 def test_traverse(self):
2926 contents = """\
2927 f(1)
2928 b = 2 + 3
2929 """
2930 # print('%s = %s' % (2+3, 4*5))
2931 if 1:
2932 contents, tokens, tree = self.make_file_data('leoApp.py')
2933 else:
2934 contents, tokens, tree = self.make_data(contents)
2935 tot = TokenOrderTraverser()
2936 t1 = get_time()
2937 n_nodes = tot.traverse(tree)
2938 t2 = get_time()
2939 self.update_counts('nodes', n_nodes)
2940 self.update_times('50: TOT.traverse', t2 - t1)
2941 # self.dump_stats()
2942 #@-others
2943#@-others
2944#@-leo