Coverage for core\test_leoAst.py: 99%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

936 statements  

1# -*- coding: utf-8 -*- 

2#@+leo-ver=5-thin 

3#@+node:ekr.20210902073413.1: * @file ../unittests/core/test_leoAst.py 

4#@@first 

5"""Tests of leoAst.py""" 

6#@+<< test_leoAst imports >> 

7#@+node:ekr.20210902074548.1: ** << test_leoAst imports >> 

8import ast 

9import os 

10import sys 

11import textwrap 

12import time 

13import token as token_module 

14from typing import Any, Callable, Dict, List, Tuple 

15import unittest 

16import warnings 

17warnings.simplefilter("ignore") 

18# pylint: disable=import-error 

19# Third-party. 

20try: 

21 import asttokens 

22except Exception: # pragma: no cover 

23 asttokens = None 

24try: 

25 # Suppress a warning about imp being deprecated. 

26 with warnings.catch_warnings(): 

27 import black 

28except Exception: # pragma: no cover 

29 black = None 

30 

31# pylint: disable=wrong-import-position 

32from leo.core import leoGlobals as g 

33 

34from leo.core.leoAst import AstNotEqual 

35from leo.core.leoAst import Fstringify, Orange 

36from leo.core.leoAst import IterativeTokenGenerator ### new 

37from leo.core.leoAst import Token, TokenOrderGenerator, TokenOrderTraverser 

38from leo.core.leoAst import get_encoding_directive, read_file, strip_BOM 

39from leo.core.leoAst import make_tokens, parse_ast, tokens_to_string 

40from leo.core.leoAst import dump_ast, dump_contents, dump_tokens, dump_tree, _op_names 

41 

42### from leo.core.iterative_ast import IterativeTokenGenerator 

43#@-<< test_leoAst imports >> 

44v1, v2, junk1, junk2, junk3 = sys.version_info 

45py_version = (v1, v2) 

46ActionList = List[Tuple[Callable, Any]] 

47#@+others 

48#@+node:ekr.20200107114620.1: ** functions: unit testing 

49#@+node:ekr.20191027072126.1: *3* function: compare_asts & helpers 

50def compare_asts(ast1, ast2): # pragma: no cover 

51 """Compare two ast trees. Return True if they are equal.""" 

52 # Compare the two parse trees. 

53 try: 

54 _compare_asts(ast1, ast2) 

55 except AstNotEqual: 

56 dump_ast(ast1, tag='AST BEFORE') 

57 dump_ast(ast2, tag='AST AFTER') 

58 return False 

59 except Exception: 

60 g.trace("Unexpected exception") 

61 g.es_exception() 

62 return False 

63 return True 

64#@+node:ekr.20191027071653.2: *4* function._compare_asts 

65def _compare_asts(node1, node2): # pragma: no cover 

66 """ 

67 Compare both nodes, and recursively compare their children. 

68 

69 See also: http://stackoverflow.com/questions/3312989/ 

70 """ 

71 # Compare the nodes themselves. 

72 _compare_nodes(node1, node2) 

73 # Get the list of fields. 

74 fields1 = getattr(node1, "_fields", []) # type:ignore 

75 fields2 = getattr(node2, "_fields", []) # type:ignore 

76 if fields1 != fields2: 

77 raise AstNotEqual( 

78 f"node1._fields: {fields1}\n" f"node2._fields: {fields2}") 

79 # Recursively compare each field. 

80 for field in fields1: 

81 if field not in ('lineno', 'col_offset', 'ctx'): 

82 attr1 = getattr(node1, field, None) 

83 attr2 = getattr(node2, field, None) 

84 if attr1.__class__.__name__ != attr2.__class__.__name__: 

85 raise AstNotEqual(f"attrs1: {attr1},\n" f"attrs2: {attr2}") 

86 _compare_asts(attr1, attr2) 

87#@+node:ekr.20191027071653.3: *4* function._compare_nodes 

88def _compare_nodes(node1, node2): # pragma: no cover 

89 """ 

90 Compare node1 and node2. 

91 For lists and tuples, compare elements recursively. 

92 Raise AstNotEqual if not equal. 

93 """ 

94 # Class names must always match. 

95 if node1.__class__.__name__ != node2.__class__.__name__: 

96 raise AstNotEqual( 

97 f"node1.__class__.__name__: {node1.__class__.__name__}\n" 

98 f"node2.__class__.__name__: {node2.__class__.__name_}" 

99 ) 

100 # Special cases for strings and None 

101 if node1 is None: 

102 return 

103 if isinstance(node1, str): 

104 if node1 != node2: 

105 raise AstNotEqual(f"node1: {node1!r}\n" f"node2: {node2!r}") 

106 # Special cases for lists and tuples: 

107 if isinstance(node1, (tuple, list)): 

108 if len(node1) != len(node2): 

109 raise AstNotEqual(f"node1: {node1}\n" f"node2: {node2}") 

110 for i, item1 in enumerate(node1): 

111 item2 = node2[i] 

112 if item1.__class__.__name__ != item2.__class__.__name__: 

113 raise AstNotEqual( 

114 f"list item1: {i} {item1}\n" f"list item2: {i} {item2}" 

115 ) 

116 _compare_asts(item1, item2) 

117#@+node:ekr.20191121081439.1: *3* function: compare_lists 

118def compare_lists(list1, list2): # pragma: no cover 

119 """ 

120 Compare two lists of strings, showing the first mismatch. 

121 

122 Return the index of the first mismatched lines, or None if identical. 

123 """ 

124 import itertools 

125 it = itertools.zip_longest(list1, list2, fillvalue='Missing!') 

126 for i, (s1, s2) in enumerate(it): 

127 if s1 != s2: 

128 return i 

129 return None 

130#@+node:ekr.20191226071135.1: *3* function: get_time 

131def get_time(): 

132 return time.process_time() 

133#@+node:ekr.20220403080350.1: ** Base Test classes 

134#@+node:ekr.20191227154302.1: *3* class BaseTest (TestCase) 

135class BaseTest(unittest.TestCase): 

136 """ 

137 The base class of all tests of leoAst.py. 

138 

139 This class contains only helpers. 

140 """ 

141 

142 # Statistics. 

143 counts: Dict[str, int] = {} 

144 times: Dict[str, float] = {} 

145 

146 # Debugging traces & behavior. 

147 # create_links: 'full-traceback' 

148 # make_data: 'contents', 'tokens', 'tree', 

149 # 'post-tokens', 'post-tree', 

150 # 'unit-test' 

151 debug_list: List[str] = [] 

152 link_error: Exception = None 

153 

154 #@+others 

155 #@+node:ekr.20200110103036.1: *4* BaseTest.adjust_expected 

156 def adjust_expected(self, s): 

157 """Adjust leading indentation in the expected string s.""" 

158 return textwrap.dedent(s.lstrip('\\\n')).rstrip() + '\n' 

159 #@+node:ekr.20200110092217.1: *4* BaseTest.check_roundtrip 

160 def check_roundtrip(self, contents): 

161 """Check that the tokenizer round-trips the given contents.""" 

162 contents, tokens, tree = self.make_data(contents) 

163 results = tokens_to_string(tokens) 

164 self.assertEqual(contents, results) 

165 #@+node:ekr.20191227054856.1: *4* BaseTest.make_data 

166 def make_data(self, contents, description=None): 

167 """Return (contents, tokens, tree) for the given contents.""" 

168 contents = contents.lstrip('\\\n') 

169 if not contents: 

170 return '', None, None # pragma: no cover 

171 self.link_error = None 

172 t1 = get_time() 

173 self.update_counts('characters', len(contents)) 

174 # Ensure all tests end in exactly one newline. 

175 contents = textwrap.dedent(contents).rstrip() + '\n' 

176 # Create the TOG instance. 

177 self.tog = TokenOrderGenerator() 

178 self.tog.filename = description or g.callers(2).split(',')[0] 

179 # Pass 0: create the tokens and parse tree 

180 tokens = self.make_tokens(contents) 

181 if not tokens: 

182 self.fail('make_tokens failed') # pragma: no cover 

183 tree = self.make_tree(contents) 

184 if not tree: 

185 self.fail('make_tree failed') # pragma: no cover 

186 if 'contents' in self.debug_list: 

187 dump_contents(contents) # pragma: no cover 

188 if 'ast' in self.debug_list: # pragma: no cover 

189 if py_version >= (3, 9): 

190 # pylint: disable=unexpected-keyword-arg 

191 g.printObj(ast.dump(tree, indent=2), tag='ast.dump') 

192 else: 

193 g.printObj(ast.dump(tree), tag='ast.dump') 

194 if 'tree' in self.debug_list: # Excellent traces for tracking down mysteries. 

195 dump_ast(tree) # pragma: no cover 

196 if 'tokens' in self.debug_list: 

197 dump_tokens(tokens) # pragma: no cover 

198 self.balance_tokens(tokens) 

199 # Pass 1: create the links. 

200 self.create_links(tokens, tree) 

201 if 'post-tree' in self.debug_list: 

202 dump_tree(tokens, tree) # pragma: no cover 

203 if 'post-tokens' in self.debug_list: 

204 dump_tokens(tokens) # pragma: no cover 

205 t2 = get_time() 

206 self.update_times('90: TOTAL', t2 - t1) 

207 if self.link_error: 

208 self.fail(self.link_error) # pragma: no cover 

209 return contents, tokens, tree 

210 #@+node:ekr.20191227103533.1: *4* BaseTest.make_file_data 

211 def make_file_data(self, filename): 

212 """Return (contents, tokens, tree) from the given file.""" 

213 directory = os.path.dirname(__file__) 

214 filename = g.os_path_finalize_join(directory, '..', '..', 'core', filename) 

215 assert os.path.exists(filename), repr(filename) 

216 contents = read_file(filename) 

217 contents, tokens, tree = self.make_data(contents, filename) 

218 return contents, tokens, tree 

219 #@+node:ekr.20191228101601.1: *4* BaseTest: passes... 

220 #@+node:ekr.20191228095945.11: *5* 0.1: BaseTest.make_tokens 

221 def make_tokens(self, contents): 

222 """ 

223 BaseTest.make_tokens. 

224 

225 Make tokens from contents. 

226 """ 

227 t1 = get_time() 

228 # Tokenize. 

229 tokens = make_tokens(contents) 

230 t2 = get_time() 

231 self.update_counts('tokens', len(tokens)) 

232 self.update_times('01: make-tokens', t2 - t1) 

233 return tokens 

234 #@+node:ekr.20191228102101.1: *5* 0.2: BaseTest.make_tree 

235 def make_tree(self, contents): 

236 """ 

237 BaseTest.make_tree. 

238 

239 Return the parse tree for the given contents string. 

240 """ 

241 t1 = get_time() 

242 tree = parse_ast(contents) 

243 t2 = get_time() 

244 self.update_times('02: parse_ast', t2 - t1) 

245 return tree 

246 #@+node:ekr.20191228185201.1: *5* 0.3: BaseTest.balance_tokens 

247 def balance_tokens(self, tokens): 

248 """ 

249 BastTest.balance_tokens. 

250 

251 Insert links between corresponding paren tokens. 

252 """ 

253 t1 = get_time() 

254 count = self.tog.balance_tokens(tokens) 

255 t2 = get_time() 

256 self.update_counts('paren-tokens', count) 

257 self.update_times('03: balance-tokens', t2 - t1) 

258 return count 

259 #@+node:ekr.20191228101437.1: *5* 1.1: BaseTest.create_links 

260 def create_links(self, tokens, tree, filename='unit test'): 

261 """ 

262 BaseTest.create_links. 

263 

264 Insert two-way links between the tokens and ast tree. 

265 """ 

266 tog = self.tog 

267 try: 

268 t1 = get_time() 

269 tog.create_links(tokens, tree) 

270 t2 = get_time() 

271 self.update_counts('nodes', tog.n_nodes) 

272 self.update_times('11: create-links', t2 - t1) 

273 except Exception as e: # pragma: no cover 

274 if 'full-traceback' in self.debug_list: 

275 g.es_exception() 

276 # Weird: calling self.fail creates ugly failures. 

277 self.link_error = e 

278 #@+node:ekr.20191228095945.10: *5* 2.1: BaseTest.fstringify 

279 def fstringify(self, contents, tokens, tree, filename=None, silent=False): 

280 """ 

281 BaseTest.fstringify. 

282 """ 

283 t1 = get_time() 

284 if not filename: 

285 filename = g.callers(1) 

286 fs = Fstringify() 

287 if silent: 

288 fs.silent = True 

289 result_s = fs.fstringify(contents, filename, tokens, tree) 

290 t2 = get_time() 

291 self.update_times('21: fstringify', t2 - t1) 

292 return result_s 

293 #@+node:ekr.20200107175223.1: *5* 2.2: BaseTest.beautify 

294 def beautify(self, contents, tokens, tree, filename=None, max_join_line_length=None, max_split_line_length=None): 

295 """ 

296 BaseTest.beautify. 

297 """ 

298 t1 = get_time() 

299 if not contents: 

300 return '' # pragma: no cover 

301 if not filename: 

302 filename = g.callers(2).split(',')[0] 

303 orange = Orange() 

304 result_s = orange.beautify(contents, filename, tokens, tree, 

305 max_join_line_length=max_join_line_length, 

306 max_split_line_length=max_split_line_length) 

307 t2 = get_time() 

308 self.update_times('22: beautify', t2 - t1) 

309 self.code_list = orange.code_list 

310 return result_s 

311 #@+node:ekr.20191228095945.1: *4* BaseTest: stats... 

312 # Actions should fail by throwing an exception. 

313 #@+node:ekr.20191228095945.12: *5* BaseTest.dump_stats & helpers 

314 def dump_stats(self): # pragma: no cover 

315 """Show all calculated statistics.""" 

316 if self.counts or self.times: 

317 print('') 

318 self.dump_counts() 

319 self.dump_times() 

320 print('') 

321 #@+node:ekr.20191228154757.1: *6* BaseTest.dump_counts 

322 def dump_counts(self): # pragma: no cover 

323 """Show all calculated counts.""" 

324 for key, n in self.counts.items(): 

325 print(f"{key:>16}: {n:>6}") 

326 #@+node:ekr.20191228154801.1: *6* BaseTest.dump_times 

327 def dump_times(self): # pragma: no cover 

328 """ 

329 Show all calculated times. 

330 

331 Keys should start with a priority (sort order) of the form `[0-9][0-9]:` 

332 """ 

333 for key in sorted(self.times): 

334 t = self.times.get(key) 

335 key2 = key[3:] 

336 print(f"{key2:>16}: {t:6.3f} sec.") 

337 #@+node:ekr.20191228181624.1: *5* BaseTest.update_counts & update_times 

338 def update_counts(self, key, n): # pragma: no cover 

339 """Update the count statistic given by key, n.""" 

340 old_n = self.counts.get(key, 0) 

341 self.counts[key] = old_n + n 

342 

343 def update_times(self, key, t): # pragma: no cover 

344 """Update the timing statistic given by key, t.""" 

345 old_t = self.times.get(key, 0.0) 

346 self.times[key] = old_t + t 

347 #@-others 

348#@+node:ekr.20191227051737.1: *3* class TestTOG (BaseTest) 

349class TestTOG(BaseTest): 

350 """ 

351 Tests for the TokenOrderGenerator class. 

352 

353 These tests call BaseTest.make_data, which creates the two-way links 

354 between tokens and the parse tree. 

355 

356 The asserts in tog.sync_tokens suffice to create strong unit tests. 

357 """ 

358 

359 debug_list = ['unit-test'] 

360 

361 #@+others 

362 #@+node:ekr.20210318213945.1: *4* TestTOG.Recent bugs & features 

363 #@+node:ekr.20210321172902.1: *5* test_bug_1851 

364 def test_bug_1851(self): 

365 

366 contents = r'''\ 

367 def foo(a1): 

368 pass 

369 ''' 

370 contents, tokens, tree = self.make_data(contents) 

371 #@+node:ekr.20210914161519.1: *5* test_bug_2171 

372 def test_bug_2171(self): 

373 

374 if py_version < (3, 9): 

375 self.skipTest('Requires Python 3.9') # pragma: no cover 

376 

377 contents = "'HEAD:%s' % g.os_path_join( *(relative_path + [filename]) )" 

378 contents, tokens, tree = self.make_data(contents) 

379 #@+node:ekr.20210318213133.1: *5* test_full_grammar 

380 def test_full_grammar(self): 

381 # Load py3_test_grammar.py. 

382 dir_ = os.path.dirname(__file__) 

383 path = os.path.abspath(os.path.join(dir_, '..', 'py3_test_grammar.py')) 

384 assert os.path.exists(path), path 

385 if py_version < (3, 8): 

386 self.skipTest('Requires Python 3.8 or above') # pragma: no cover 

387 # Verify that leoAst can parse the file. 

388 contents = read_file(path) 

389 self.make_data(contents) 

390 #@+node:ekr.20210318214057.1: *5* test_line_315 

391 def test_line_315(self): 

392 

393 # 

394 # Known bug: position-only args exist in Python 3.8, 

395 # but there is no easy way of syncing them. 

396 # This bug will not be fixed. 

397 # The workaround is to require Python 3.9 

398 if py_version >= (3, 9): 

399 contents = '''\ 

400 f(1, x=2, 

401 *[3, 4], y=5) 

402 ''' 

403 elif 1: # Expected order. 

404 contents = '''f(1, *[a, 3], x=2, y=5)''' # pragma: no cover 

405 else: # Legacy. 

406 contents = '''f(a, *args, **kwargs)''' 

407 contents, tokens, tree = self.make_data(contents) 

408 #@+node:ekr.20210320095504.8: *5* test_line_337 

409 def test_line_337(self): 

410 

411 if py_version >= (3, 8): # Requires neither line_no nor col_offset fields. 

412 contents = '''def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass''' 

413 else: 

414 contents = '''def f(a, b, d=4, *arg, **keys): pass''' # pragma: no cover 

415 contents, tokens, tree = self.make_data(contents) 

416 #@+node:ekr.20210320065202.1: *5* test_line_483 

417 def test_line_483(self): 

418 

419 if py_version < (3, 8): 

420 # Python 3.8: https://bugs.python.org/issue32117 

421 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") # pragma: no cover 

422 contents = '''def g3(): return 1, *return_list''' 

423 contents, tokens, tree = self.make_data(contents) 

424 #@+node:ekr.20210320065344.1: *5* test_line_494 

425 def test_line_494(self): 

426 

427 """ 

428 https://docs.python.org/3/whatsnew/3.8.html#other-language-changes 

429 

430 Generalized iterable unpacking in yield and return statements no longer 

431 requires enclosing parentheses. This brings the yield and return syntax 

432 into better agreement with normal assignment syntax. 

433 """ 

434 if py_version < (3, 8): 

435 # Python 3.8: https://bugs.python.org/issue32117 

436 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") # pragma: no cover 

437 contents = '''def g2(): yield 1, *yield_list''' 

438 contents, tokens, tree = self.make_data(contents) 

439 #@+node:ekr.20210319130349.1: *5* test_line_875 

440 def test_line_875(self): 

441 

442 contents = '''list((x, y) for x in 'abcd' for y in 'abcd')''' 

443 contents, tokens, tree = self.make_data(contents) 

444 #@+node:ekr.20210319130616.1: *5* test_line_898 

445 def test_line_898(self): 

446 

447 contents = '''g = ((i,j) for i in range(x) if t for j in range(x))''' 

448 contents, tokens, tree = self.make_data(contents) 

449 #@+node:ekr.20210320085705.1: *5* test_walrus_operator 

450 def test_walrus_operator(self): 

451 

452 if py_version < (3, 8): 

453 self.skipTest(f"Python {v1}.{v2} does not support assignment expressions") # pragma: no cover 

454 contents = '''if (n := len(a)) > 10: pass''' 

455 contents, tokens, tree = self.make_data(contents) 

456 #@+node:ekr.20191227052446.10: *4* TestTOG.Contexts... 

457 #@+node:ekr.20191227052446.11: *5* test_ClassDef 

458 def test_ClassDef(self): 

459 contents = """\ 

460 class TestClass1: 

461 pass 

462 

463 def decorator(): 

464 pass 

465 

466 @decorator 

467 class TestClass2: 

468 pass 

469 

470 @decorator 

471 class TestClass(base1, base2): 

472 pass 

473 """ 

474 self.make_data(contents) 

475 #@+node:ekr.20191227052446.12: *5* test_ClassDef2 

476 def test_ClassDef2(self): 

477 contents = r'''\ 

478 """ds 1""" 

479 class TestClass: 

480 """ds 2""" 

481 def long_name(a, b=2): 

482 """ds 3""" 

483 print('done') 

484 ''' 

485 self.make_data(contents) 

486 #@+node:ekr.20191227052446.13: *5* test_FunctionDef 

487 def test_FunctionDef(self): 

488 contents = r"""\ 

489 def run(fileName=None, pymacs=None): 

490 pass 

491 """ 

492 self.make_data(contents) 

493 #@+node:ekr.20200111171738.1: *5* test_FunctionDef_with_annotations 

494 def test_FunctionDef_with_annotations(self): 

495 contents = r"""\ 

496 def foo(a: 'x', b: 5 + 6, c: list) -> max(2, 9): 

497 pass 

498 """ 

499 self.make_data(contents) 

500 # contents, tokens, tree = self.make_data(contents) 

501 # dump_ast(tree) 

502 #@+node:ekr.20210802162650.1: *5* test_FunctionDef_with_posonly_args 

503 def test_FunctionDef_with_posonly_args(self): 

504 

505 if py_version < (3, 9): 

506 self.skipTest('Requires Python 3.9') # pragma: no cover 

507 

508 # From PEP 570 

509 contents = r"""\ 

510 def pos_only_arg(arg, /): 

511 pass 

512 def kwd_only_arg(*, arg): 

513 pass 

514 def combined_example(pos_only, /, standard, *, kwd_only): 

515 pass 

516 """ 

517 self.make_data(contents) 

518 #@+node:ekr.20191227052446.14: *4* TestTOG.Expressions & operators... 

519 #@+node:ekr.20191227052446.15: *5* test_attribute 

520 def test_attribute(self): 

521 contents = r"""\ 

522 open(os.devnull, "w") 

523 """ 

524 self.make_data(contents) 

525 #@+node:ekr.20191227052446.16: *5* test_CompareOp 

526 def test_CompareOp(self): 

527 contents = r"""\ 

528 if a and not b and c: 

529 pass 

530 """ 

531 self.make_data(contents) 

532 #@+node:ekr.20191227052446.17: *5* test_Dict_1 

533 def test_Dict(self): 

534 contents = r"""\ 

535 d = {'a' if x else 'b': True,} 

536 """ 

537 self.make_data(contents) 

538 #@+node:ekr.20200111191153.1: *5* test_Dict_2 

539 def test_Dict_2(self): 

540 contents = r"""\ 

541 d = {} 

542 """ 

543 self.make_data(contents) 

544 #@+node:ekr.20191227052446.18: *5* test_DictComp 

545 def test_DictComp(self): 

546 # leoGlobals.py, line 3028. 

547 contents = r"""\ 

548 d2 = {val: key for key, val in d} 

549 """ 

550 self.make_data(contents) 

551 #@+node:ekr.20200112042410.1: *5* test_ExtSlice 

552 def test_ExtSlice(self): 

553 contents = r"""a [1, 2: 3]""" 

554 self.make_data(contents) 

555 #@+node:ekr.20191227052446.19: *5* test_ListComp 

556 def test_ListComp(self): 

557 # ListComp and comprehension. 

558 contents = r"""\ 

559 any([p2.isDirty() for p2 in p.subtree()]) 

560 """ 

561 self.make_data(contents) 

562 #@+node:ekr.20191227052446.20: *5* test_NameConstant 

563 def test_NameConstant(self): 

564 contents = r"""\ 

565 run(a=None, b=str) 

566 """ 

567 self.make_data(contents) 

568 #@+node:ekr.20191227052446.21: *5* test_Operator: semicolon 

569 def test_op_semicolon(self): 

570 contents = r"""\ 

571 print('c'); 

572 print('d') 

573 """ 

574 self.make_data(contents) 

575 #@+node:ekr.20191227052446.22: *5* test_Operator: semicolon between statements 

576 def test_op_semicolon2(self): 

577 contents = r"""\ 

578 a = 1 ; b = 2 

579 print('a') ; print('b') 

580 """ 

581 self.make_data(contents) 

582 #@+node:ekr.20200111194454.1: *5* test_Set 

583 def test_Set(self): 

584 contents = """{'a', 'b'}""" 

585 self.make_data(contents) 

586 #@+node:ekr.20200111195654.1: *5* test_SetComp 

587 def test_SetComp(self): 

588 contents = """aSet = { (x, y) for x in r for y in r if x < y }""" 

589 self.make_data(contents) 

590 #@+node:ekr.20191227052446.23: *5* test_UnaryOp 

591 def test_UnaryOp(self): 

592 contents = r"""\ 

593 print(-(2)) 

594 """ 

595 self.make_data(contents) 

596 #@+node:ekr.20191227052446.65: *4* TestTOG.f-strings.... 

597 #@+node:ekr.20191227052446.66: *5* test_fstring01: complex Call 

598 def test_fstring1(self): 

599 # Line 1177, leoApp.py 

600 contents = r"""\ 

601 print( 

602 message = f"line 1: {old_id!r}\n" "line 2\n" 

603 ) 

604 print('done') 

605 """ 

606 self.make_data(contents) 

607 #@+node:ekr.20191227052446.67: *5* test_fstring02: Ternary 

608 def test_fstring2(self): 

609 contents = r"""\ 

610 func(f"{b if not cond1 else ''}") 

611 """ 

612 self.make_data(contents) 

613 #@+node:ekr.20191227052446.68: *5* test_fstring03: single f-string 

614 def test_fstring3(self): 

615 contents = r"""\ 

616 print(f'{7.1}') 

617 print('end') 

618 """ 

619 self.make_data(contents) 

620 #@+node:ekr.20191227052446.69: *5* test_fstring04: f-string + plain 

621 def test_fstring4(self): 

622 contents = r"""\ 

623 print(f'{7.1}' 'p7.2') 

624 print('end') 

625 """ 

626 self.make_data(contents) 

627 #@+node:ekr.20191227052446.70: *5* test_fstring05: plain + f-string 

628 def test_fstring5(self): 

629 contents = r"""\ 

630 print('p1' f'{f2}') 

631 'end' 

632 """ 

633 self.make_data(contents) 

634 #@+node:ekr.20191227052446.71: *5* test_fstring06: f-string + fstring 

635 def test_fstring6(self): 

636 contents = r"""\ 

637 print(f'{f1}' f'{f2}') 

638 'end' 

639 """ 

640 self.make_data(contents) 

641 #@+node:ekr.20191227052446.72: *5* test_fstring07: many 

642 def test_fstring7(self): 

643 contents = r"""\ 

644 print('s1', f'{f2}' f'f3' f'{f4}' 's5') 

645 'end' 

646 """ 

647 self.make_data(contents) 

648 #@+node:ekr.20191227052446.73: *5* test_fstring08: ternary op 

649 def test_fstring8(self): 

650 # leoFind.py line 856 

651 contents = r"""\ 

652 a = f"{'a' if x else 'b'}" 

653 f() 

654 

655 # Pass 

656 # print(f"{'a' if x else 'b'}") 

657 """ 

658 self.make_data(contents) 

659 #@+node:ekr.20191227052446.74: *5* test_fstring09: leoFind.py line 856 

660 def test_fstring9(self): 

661 contents = r"""\ 

662 func( 

663 "Isearch" 

664 f"{' Backward' if True else ''}" 

665 ) 

666 print('done') 

667 """ 

668 self.make_data(contents) 

669 #@+node:ekr.20191227052446.75: *5* test_fstring10: leoFind.py: line 861 

670 def test_fstring10(self): 

671 # leoFind.py: line 861 

672 contents = r"""\ 

673 one(f"{'B'}" ": ") 

674 """ 

675 self.make_data(contents) 

676 #@+node:ekr.20191227052446.76: *5* test_fstring11: joins 

677 def test_fstring11(self): 

678 contents = r"""\ 

679 print(f'x3{e3+1}y3' f'x4{e4+2}y4') 

680 print('done') 

681 """ 

682 self.make_data(contents) 

683 #@+node:ekr.20191227052446.77: *6* more 

684 # Single f-strings. 

685 # 'p1' ; 

686 # f'f1' ; 

687 # f'x1{e1}y1' ; 

688 # f'x2{e2+1}y2{e2+2}z2' ; 

689 

690 # Concatentated strings... 

691 # 'p2', 'p3' ; 

692 # f'f2' 'f3' ; 

693 

694 # f'x5{e5+1}y5{e5+1}z5' f'x6{e6+1}y6{e6+1}z6' ; 

695 #@+node:ekr.20191227052446.78: *5* test_fstring12: joins + 1 f-expr 

696 def test_fstring12(self): 

697 contents = r"""\ 

698 print(f'x1{e1}y1', 'p1') 

699 print(f'x2{e2}y2', f'f2') 

700 print(f'x3{e3}y3', f'x4{e4}y4') 

701 print('end') 

702 """ 

703 self.make_data(contents) 

704 #@+node:ekr.20191227052446.79: *5* test_fstring13: joins + 2 f-exprs 

705 def test_fstring13(self): 

706 contents = r"""\ 

707 print(f'x1{e1}y1{e2}z1', 'p1') 

708 print(f'x2{e3}y2{e3}z2', f'f2') 

709 print(f'x3{e4}y3{e5}z3', f'x4{e6}y4{e7}z4') 

710 print('end') 

711 """ 

712 self.make_data(contents) 

713 #@+node:ekr.20191227052446.80: *5* test_fstring14: complex, with commas 

714 def test_fstring14(self): 

715 contents = r"""\ 

716 print(f"{list(z for z in ('a', 'b', 'c') if z != 'b')}") 

717 """ 

718 self.make_data(contents) 

719 #@+node:ekr.20191227052446.81: *5* test_fstring15 

720 def test_fstring15(self): 

721 contents = r"""\ 

722 print(f"test {a}={2}") 

723 print('done') 

724 """ 

725 self.make_data(contents) 

726 #@+node:ekr.20191227052446.83: *5* test_fstring16: simple 

727 def test_fstring16(self): 

728 contents = r"""\ 

729 'p1' ; 

730 f'f1' ; 

731 'done' ; 

732 """ 

733 self.make_data(contents) 

734 #@+node:ekr.20191227052446.82: *5* test_regex_fstring 

735 def test_regex_fstring(self): 

736 # Line 7709, leoGlobals.py 

737 contents = r'''\ 

738 fr"""{kinds}://[^\s'"]+[\w=/]""" 

739 ''' 

740 self.make_data(contents) 

741 #@+node:ekr.20191227052446.32: *4* TestTOG.If... 

742 #@+node:ekr.20191227052446.33: *5* test_from leoTips.py 

743 def test_if1(self): 

744 # Line 93, leoTips.py 

745 contents = r"""\ 

746 self.make_data(contents) 

747 unseen = [i for i in range(5) if i not in seen] 

748 for issue in data: 

749 for a in aList: 

750 print('a') 

751 else: 

752 print('b') 

753 if b: 

754 print('c') 

755 """ 

756 self.make_data(contents) 

757 #@+node:ekr.20191227052446.34: *5* test_if + tuple 

758 def test_if2(self): 

759 contents = r"""\ 

760 for i, j in b: 

761 pass 

762 """ 

763 self.make_data(contents) 

764 #@+node:ekr.20191227052446.35: *5* test_if + unary op 

765 def test_if3(self): 

766 contents = r"""\ 

767 if -(2): 

768 pass 

769 """ 

770 self.make_data(contents) 

771 #@+node:ekr.20191227052446.36: *5* test_if, elif 

772 def test_if4(self): 

773 contents = r"""\ 

774 if 1: 

775 print('a') 

776 elif 2: 

777 print('b') 

778 elif 3: 

779 print('c') 

780 print('d') 

781 print('-') 

782 if 1: 

783 print('e') 

784 elif 2: 

785 print('f') 

786 print('g') 

787 """ 

788 self.make_data(contents) 

789 #@+node:ekr.20191227052446.37: *5* test_if, elif + 2 

790 def test_if5(self): 

791 contents = r"""\ 

792 if 1: 

793 pass 

794 elif 2: 

795 pass 

796 pass 

797 """ 

798 self.make_data(contents) 

799 #@+node:ekr.20191227052446.38: *5* test_if, elif, else 

800 def test_if6(self): 

801 contents = r"""\ 

802 if (a): 

803 print('a1') 

804 print('a2') 

805 elif b: 

806 print('b1') 

807 print('b2') 

808 else: 

809 print('c1') 

810 print('c2') 

811 """ 

812 self.make_data(contents) 

813 #@+node:ekr.20191227052446.39: *5* test_if, else 

814 def test_if7(self): 

815 contents = r"""\ 

816 if 1: 

817 print('a') 

818 else: 

819 print('b') 

820 """ 

821 self.make_data(contents) 

822 #@+node:ekr.20191227052446.40: *5* test_if, else, if 

823 def test_if8(self): 

824 contents = r"""\ 

825 if 1: 

826 print('a') 

827 else: 

828 if 2: 

829 print('b') 

830 """ 

831 self.make_data(contents) 

832 #@+node:ekr.20191227052446.41: *5* test_Nested If's 

833 def test_if9(self): 

834 contents = r"""\ 

835 if a: 

836 if b: 

837 print('b') 

838 else: 

839 if d: 

840 print('d') 

841 """ 

842 self.make_data(contents) 

843 #@+node:ekr.20191227052446.42: *5* test_ternary + if 

844 def test_if10(self): 

845 contents = r"""\ 

846 if 1: 

847 a = 'class' if cond else 'def' 

848 # find_pattern = prefix + ' ' + word 

849 print('1') 

850 else: 

851 print('2') 

852 """ 

853 self.make_data(contents) 

854 #@+node:ekr.20191227145620.1: *4* TestTOG.Miscellaneous... 

855 #@+node:ekr.20200206041753.1: *5* test_comment_in_set_links 

856 def test_comment_in_set_links(self): 

857 contents = """ 

858 def spam(): 

859 # comment 

860 pass 

861 """ 

862 self.make_data(contents) 

863 #@+node:ekr.20200112065944.1: *5* test_ellipsis_1 

864 def test_ellipsis_1(self): 

865 contents = """ 

866 def spam(): 

867 ... 

868 """ 

869 self.make_data(contents) 

870 #@+node:ekr.20200112070228.1: *5* test_ellipsis_2 

871 def test_ellipsis_2(self): 

872 contents = """ 

873 def partial(func: Callable[..., str], *args): 

874 pass 

875 """ 

876 self.make_data(contents) 

877 #@+node:ekr.20191227075951.1: *5* test_end_of_line 

878 def test_end_of_line(self): 

879 self.make_data("""# Only a comment.""") 

880 #@+node:ekr.20191227052446.50: *4* TestTOG.Plain Strings... 

881 #@+node:ekr.20191227052446.52: *5* test_\x and \o escapes 

882 def test_escapes(self): 

883 # Line 4609, leoGlobals.py 

884 contents = r"""\ 

885 print("\x7e" "\0777") # tilde. 

886 print('done') 

887 """ 

888 self.make_data(contents) 

889 #@+node:ekr.20191227052446.53: *5* test_backslashes in docstring 

890 def test_backslashes(self): 

891 # leoGlobals.py. 

892 contents = r'''\ 

893 class SherlockTracer: 

894 """before\\after""" 

895 ''' 

896 self.make_data(contents) 

897 #@+node:ekr.20191227052446.54: *5* test_bs/nl 

898 def test_bs_nl(self): 

899 contents = r"""\ 

900 print('hello\ 

901 world') 

902 """ 

903 self.make_data(contents) 

904 #@+node:ekr.20191227052446.55: *5* test_bytes bs-x 

905 def test_bytes(self): 

906 # Line 201, leoApp.py 

907 contents = r"""\ 

908 print(b'\xfe') 

909 print('done') 

910 """ 

911 self.make_data(contents) 

912 #@+node:ekr.20191227052446.56: *5* test_empty string 

913 def test_empyt_string(self): 

914 contents = r"""\ 

915 self.s = '' 

916 self.i = 0 

917 """ 

918 self.make_data(contents) 

919 #@+node:ekr.20191227052446.57: *5* test_escaped string delims 

920 def test_escaped_delims(self): 

921 contents = r"""\ 

922 print("a\"b") 

923 """ 

924 self.make_data(contents) 

925 #@+node:ekr.20191227052446.58: *5* test_escaped strings 

926 def test_escaped_strings(self): 

927 contents = r"""\ 

928 f1(a='\b', b='\n', t='\t') 

929 f2(f='\f', r='\r', v='\v') 

930 f3(bs='\\') 

931 """ 

932 self.make_data(contents) 

933 #@+node:ekr.20191227052446.59: *5* test_f-string join 

934 def test_fstring_join(self): 

935 # The first newline causes the fail. 

936 contents = r"""\ 

937 print(f"a {old_id!r}\n" "b\n") 

938 print('done') 

939 """ 

940 self.make_data(contents) 

941 #@+node:ekr.20191227052446.64: *5* test_potential_fstring 

942 def test_potential_fstring(self): 

943 contents = r"""\ 

944 print('test %s=%s'%(a, 2)) 

945 print('done') 

946 """ 

947 self.make_data(contents) 

948 #@+node:ekr.20191227052446.60: *5* test_raw docstring 

949 def test_raw_docstring(self): 

950 contents = r'''\ 

951 # Line 1619 leoFind.py 

952 print(r"""DS""") 

953 ''' 

954 self.make_data(contents) 

955 #@+node:ekr.20191227052446.61: *5* test_raw escaped strings 

956 def test_raw_escapes(self): 

957 contents = r"""\ 

958 r1(a=r'\b', b=r'\n', t=r'\t') 

959 r2(f=r'\f', r=r'\r', v=r'\v') 

960 r3(bs=r'\\') 

961 """ 

962 self.make_data(contents) 

963 #@+node:ekr.20191227052446.62: *5* test_single quote 

964 def test_single_quote(self): 

965 # leoGlobals.py line 806. 

966 contents = r"""\ 

967 print('"') 

968 """ 

969 self.make_data(contents) 

970 #@+node:ekr.20191227052446.63: *5* test_string concatenation_1 

971 def test_concatenation_1(self): 

972 contents = r"""\ 

973 print('a' 'b') 

974 print('c') 

975 """ 

976 self.make_data(contents) 

977 #@+node:ekr.20200111042825.1: *5* test_string_concatenation_2 

978 def test_string_concatenation_2(self): 

979 # Crash in leoCheck.py. 

980 contents = """return self.Type('error', 'no member %s' % ivar)""" 

981 self.make_data(contents) 

982 #@+node:ekr.20191227052446.43: *4* TestTOG.Statements... 

983 #@+node:ekr.20200112075707.1: *5* test_AnnAssign 

984 def test_AnnAssign(self): 

985 contents = """x: int = 0""" 

986 self.make_data(contents) 

987 #@+node:ekr.20200112071833.1: *5* test_AsyncFor 

988 def test_AsyncFor(self): 

989 # This may require Python 3.7. 

990 contents = """\ 

991 async def commit(session, data): 

992 async for z in session.transaction(): 

993 await z(data) 

994 else: 

995 print('oops') 

996 """ 

997 self.make_data(contents) 

998 #@+node:ekr.20200111175043.1: *5* test_AsyncFunctionDef 

999 def test_AsyncFunctionDef(self): 

1000 contents = """\ 

1001 @my_decorator 

1002 async def count() -> 42: 

1003 print("One") 

1004 await asyncio.sleep(1) 

1005 """ 

1006 self.make_data(contents) 

1007 #@+node:ekr.20200112073151.1: *5* test_AsyncWith 

1008 def test_AsyncWith(self): 

1009 contents = """\ 

1010 async def commit(session, data): 

1011 async with session.transaction(): 

1012 await session.update(data) 

1013 """ 

1014 self.make_data(contents) 

1015 #@+node:ekr.20191227052446.44: *5* test_Call 

1016 def test_Call(self): 

1017 contents = """func(a, b, one='one', two=2, three=4+5, *args, **kwargs)""" 

1018 # contents = """func(*args, **kwargs)""" 

1019 # f1(a,b=2) 

1020 # f2(1 + 2) 

1021 # f3(arg, *args, **kwargs) 

1022 # f4(a='a', *args, **kwargs) 

1023 self.make_data(contents) 

1024 #@+node:ekr.20200206040732.1: *5* test_Delete 

1025 def test_Delete(self): 

1026 

1027 # Coverage test for spaces 

1028 contents = """del x""" 

1029 self.make_data(contents) 

1030 #@+node:ekr.20200111175335.1: *5* test_For 

1031 def test_For(self): 

1032 contents = r"""\ 

1033 for a in b: 

1034 pass 

1035 """ 

1036 self.make_data(contents) 

1037 #@+node:ekr.20191227052446.45: *5* test_Global 

1038 def test_Global(self): 

1039 # Line 1604, leoGlobals.py 

1040 contents = r""" 

1041 def spam(): 

1042 global gg 

1043 print('') 

1044 """ 

1045 self.make_data(contents) 

1046 #@+node:ekr.20200111200424.1: *5* test_ImportFrom 

1047 def test_ImportFrom(self): 

1048 contents = r"""from a import b as c""" 

1049 self.make_data(contents) 

1050 #@+node:ekr.20210318174705.1: *5* test_ImportFromStar 

1051 def test_ImportFromStar(self): 

1052 contents = r"""from sys import *""" 

1053 self.make_data(contents) 

1054 #@+node:ekr.20200206040424.1: *5* test_Lambda 

1055 def test_Lambda(self): 

1056 

1057 # Coverage test for spaces 

1058 contents = """f = lambda x: x""" 

1059 self.make_data(contents) 

1060 #@+node:ekr.20220329095904.1: *5* test_Match 

1061 def test_Match(self): 

1062 

1063 if py_version < (3, 10): 

1064 self.skipTest('Require python 3.10') 

1065 contents = r"""\ 

1066 match node: 

1067 # Passed... 

1068 case 1: pass 

1069 case (2, 3): pass 

1070 case BinOp("+", a, BinOp("*", b, c)): pass 

1071 case {"text": message, "color": c}: pass 

1072 case 401 | 403 | 404: pass 

1073 case xyzzy if a > 1: pass 

1074 case {"sound": _, "format": _}: pass 

1075 case BinOp2("+", a, BinOp("*", d = 2)): pass 

1076 case BinOp2("-", d, e = 2): pass 

1077 case {"pat1": 2, **rest}: pass 

1078 case _: pass 

1079 case (4, 5, *rest): pass 

1080 case [6, 5, *rest]: pass 

1081 case ['a'|'b' as ab, c]: pass 

1082 case True: pass 

1083 case False: pass 

1084 case None: pass 

1085 case True | False | None: pass 

1086 case True, False, None: pass # A tuple! 

1087 """ 

1088 try: 

1089 # self.debug_list.append('contents') 

1090 # self.debug_list.append('tokens') 

1091 # self.debug_list.append('tree') 

1092 # self.debug_list.append('full-traceback') 

1093 self.make_data(contents) 

1094 finally: 

1095 self.debug_list = [] 

1096 #@+node:ekr.20200111200640.1: *5* test_Nonlocal 

1097 def test_Nonlocal(self): 

1098 contents = r"""nonlocal name1, name2""" 

1099 self.make_data(contents) 

1100 #@+node:ekr.20220224120239.1: *5* test_Raise 

1101 def test_Raise(self): 

1102 contents = "raise ImportError from None" 

1103 self.make_data(contents) 

1104 #@+node:ekr.20191227052446.46: *5* test_Try 

1105 def test_Try(self): 

1106 contents = r"""\ 

1107 try: 

1108 print('a1') 

1109 print('a2') 

1110 except ImportError: 

1111 print('b1') 

1112 print('b2') 

1113 except SyntaxError: 

1114 print('c1') 

1115 print('c2') 

1116 finally: 

1117 print('d1') 

1118 print('d2') 

1119 """ 

1120 self.make_data(contents) 

1121 #@+node:ekr.20191227052446.47: *5* test_TryExceptElse 

1122 def test_Try2(self): 

1123 # Line 240: leoDebugger.py 

1124 contents = r"""\ 

1125 try: 

1126 print('a') 

1127 except ValueError: 

1128 print('b') 

1129 else: 

1130 print('c') 

1131 """ 

1132 self.make_data(contents) 

1133 #@+node:ekr.20200206041336.1: *5* test_While 

1134 def test_While(self): 

1135 contents = r"""\ 

1136 while f(): 

1137 print('continue') 

1138 else: 

1139 print('done') 

1140 """ 

1141 self.make_data(contents) 

1142 #@+node:ekr.20191227052446.48: *5* test_With 

1143 def test_With(self): 

1144 # leoGlobals.py, line 1785. 

1145 contents = r"""\ 

1146 with open(fn) as f: 

1147 pass 

1148 """ 

1149 self.make_data(contents) 

1150 #@+node:ekr.20200206041611.1: *5* test_Yield 

1151 def test_Yield(self): 

1152 contents = r"""\ 

1153 def gen_test(): 

1154 yield self.gen_token('newline', '\n') 

1155 """ 

1156 self.make_data(contents) 

1157 #@+node:ekr.20191227052446.49: *5* test_YieldFrom 

1158 def test_YieldFrom(self): 

1159 # Line 1046, leoAst.py 

1160 contents = r"""\ 

1161 def gen_test(): 

1162 self.node = tree 

1163 yield from self.gen_token('newline', '\n') 

1164 print('done') 

1165 """ 

1166 self.make_data(contents) 

1167 #@+node:ekr.20191228193740.1: *4* TestTOG.test_aa && zz 

1168 def test_aaa(self): 

1169 """The first test.""" 

1170 g.total_time = get_time() 

1171 

1172 def test_zzz(self): 

1173 """The last test.""" 

1174 t2 = get_time() 

1175 self.update_times('90: TOTAL', t2 - g.total_time) 

1176 # self.dump_stats() 

1177 #@-others 

1178#@+node:ekr.20210902074155.1: ** Test classes... 

1179#@+node:ekr.20200122161530.1: *3* class Optional_TestFiles (BaseTest) 

1180class Optional_TestFiles(BaseTest): 

1181 """ 

1182 Tests for the TokenOrderGenerator class that act on files. 

1183 

1184 These are optional tests. They take a long time and are not needed 

1185 for 100% coverage. 

1186 

1187 All of these tests failed at one time. 

1188 """ 

1189 #@+others 

1190 #@+node:ekr.20200726145235.2: *4* TestFiles.test_leoApp 

1191 def test_leoApp(self): 

1192 

1193 self.make_file_data('leoApp.py') 

1194 #@+node:ekr.20200726145235.1: *4* TestFiles.test_leoAst 

1195 def test_leoAst(self): 

1196 

1197 self.make_file_data('leoAst.py') 

1198 #@+node:ekr.20200726145333.1: *4* TestFiles.test_leoDebugger 

1199 def test_leoDebugger(self): 

1200 

1201 self.make_file_data('leoDebugger.py') 

1202 #@+node:ekr.20200726145333.2: *4* TestFiles.test_leoFind 

1203 def test_leoFind(self): 

1204 

1205 self.make_file_data('leoFind.py') 

1206 #@+node:ekr.20200726145333.3: *4* TestFiles.test_leoGlobals 

1207 def test_leoGlobals(self): 

1208 

1209 self.make_file_data('leoGlobals.py') 

1210 #@+node:ekr.20200726145333.4: *4* TestFiles.test_leoTips 

1211 def test_leoTips(self): 

1212 

1213 self.make_file_data('leoTips.py') 

1214 #@+node:ekr.20200726145735.1: *4* TestFiles.test_runLeo 

1215 def test_runLeo(self): 

1216 

1217 self.make_file_data('runLeo.py') 

1218 #@+node:ekr.20200115162419.1: *4* TestFiles.compare_tog_vs_asttokens 

1219 def compare_tog_vs_asttokens(self): # pragma: no cover 

1220 """Compare asttokens token lists with TOG token lists.""" 

1221 if not asttokens: 

1222 self.skipTest('requires asttokens') 

1223 # Define TestToken class and helper functions. 

1224 stack: List[ast.AST] = [] 

1225 #@+others 

1226 #@+node:ekr.20200124024159.2: *5* class TestToken (internal) 

1227 class TestToken: 

1228 """A patchable representation of the 5-tuples created by tokenize and used by asttokens.""" 

1229 

1230 def __init__(self, kind, value): 

1231 self.kind = kind 

1232 self.value = value 

1233 self.node_list: List[ast.AST] = [] 

1234 

1235 def __str__(self): 

1236 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list]) 

1237 return f"{self.kind:14} {self.value:20} {tokens_s!s}" 

1238 

1239 __repr__ = __str__ 

1240 #@+node:ekr.20200124024159.3: *5* function: atok_name 

1241 def atok_name(token): 

1242 """Return a good looking name for the given 5-tuple""" 

1243 return token_module.tok_name[token[0]].lower() # type:ignore 

1244 #@+node:ekr.20200124024159.4: *5* function: atok_value 

1245 def atok_value(token): 

1246 """Print a good looking value for the given 5-tuple""" 

1247 return token.string if atok_name(token) == 'string' else repr(token.string) 

1248 #@+node:ekr.20200124024159.5: *5* function: dump_token 

1249 def dump_token(token): 

1250 node_list = list(set(getattr(token, 'node_set', []))) 

1251 node_list = sorted([z.__class__.__name__ for z in node_list]) 

1252 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}" 

1253 #@+node:ekr.20200124024159.6: *5* function: postvisit 

1254 def postvisit(node, par_value, value): 

1255 nonlocal stack 

1256 stack.pop() 

1257 return par_value or [] 

1258 #@+node:ekr.20200124024159.7: *5* function: previsit 

1259 def previsit(node, par_value): 

1260 nonlocal stack 

1261 if isinstance(node, ast.Module): 

1262 stack = [] 

1263 if stack: 

1264 parent = stack[-1] 

1265 children: List[ast.AST] = getattr(parent, 'children', []) 

1266 parent.children = children + [node] # type:ignore 

1267 node.parent = parent 

1268 else: 

1269 node.parent = None 

1270 node.children = [] 

1271 stack.append(node) 

1272 return par_value, [] 

1273 #@-others 

1274 directory = r'c:\leo.repo\leo-editor\leo\core' 

1275 filename = 'leoAst.py' 

1276 filename = os.path.join(directory, filename) 

1277 # A fair comparison omits the read time. 

1278 t0 = get_time() 

1279 contents = read_file(filename) 

1280 t1 = get_time() 

1281 # Part 1: TOG. 

1282 tog = TokenOrderGenerator() 

1283 tog.filename = filename 

1284 tokens = make_tokens(contents) 

1285 tree = parse_ast(contents) 

1286 tog.create_links(tokens, tree) 

1287 tog.balance_tokens(tokens) 

1288 t2 = get_time() 

1289 # Part 2: Create asttokens data. 

1290 atok = asttokens.ASTTokens(contents, parse=True, filename=filename) 

1291 t3 = get_time() 

1292 # Create a patchable list of TestToken objects. 

1293 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] # type:ignore 

1294 # Inject parent/child links into nodes. 

1295 asttokens.util.visit_tree(atok.tree, previsit, postvisit) 

1296 # Create token.token_list for each token. 

1297 for node in asttokens.util.walk(atok.tree): 

1298 # Inject node into token.node_list 

1299 for ast_token in atok.get_tokens(node, include_extra=True): 

1300 i = ast_token.index 

1301 token = tokens[i] 

1302 token.node_list.append(node) 

1303 t4 = get_time() 

1304 if 1: 

1305 print( 

1306 f" read: {t1-t0:5.3f} sec.\n" 

1307 f" TOG: {t2-t1:5.3f} sec.\n" 

1308 f"asttokens 1: {t3-t2:5.3f} sec.\n" 

1309 f"asttokens 2: {t4-t3:5.3f} sec.\n") 

1310 if 0: 

1311 print('===== asttokens =====\n') 

1312 for node in asttokens.util.walk(tree): 

1313 print(f"{node.__class__.__name__:>10} {atok.get_text(node)!s}") 

1314 #@-others 

1315#@+node:ekr.20191229083512.1: *3* class TestFstringify (BaseTest) 

1316class TestFstringify(BaseTest): 

1317 """Tests for the TokenOrderGenerator class.""" 

1318 #@+others 

1319 #@+node:ekr.20200111043311.1: *4* Bugs... 

1320 #@+node:ekr.20210318054321.1: *5* TestFstringify.test_bug_1851 

1321 def test_bug_1851(self): 

1322 # leoCheck.py. 

1323 contents = """\ 

1324 from dataclasses import dataclass 

1325 

1326 @dataclass(frozen=True) 

1327 class TestClass: 

1328 value: str 

1329 start: int 

1330 end: int 

1331 

1332 f = TestClass('abc', 0, 10) 

1333 """ 

1334 contents, tokens, tree = self.make_data(contents) 

1335 expected = textwrap.dedent(contents).rstrip() + '\n' 

1336 results = self.fstringify(contents, tokens, tree) 

1337 self.assertEqual(results, expected) 

1338 #@+node:ekr.20200111043311.2: *5* TestFstringify.test_crash_1 

1339 def test_crash_1(self): 

1340 # leoCheck.py. 

1341 contents = """return ('error', 'no member %s' % ivar)""" 

1342 expected = """return ('error', f"no member {ivar}")\n""" 

1343 contents, tokens, tree = self.make_data(contents) 

1344 results = self.fstringify(contents, tokens, tree) 

1345 self.assertEqual(results, expected) 

1346 #@+node:ekr.20200111075114.1: *5* TestFstringify.test_crash_2 

1347 def test_crash_2(self): 

1348 # leoCheck.py, line 1704. 

1349 # format = 

1350 # 'files: %s lines: %s chars: %s classes: %s\n' 

1351 # 'defs: %s calls: %s undefined calls: %s returns: %s' 

1352 # ) 

1353 contents = r"""'files: %s\n' 'defs: %s'""" 

1354 expected = contents + '\n' 

1355 contents, tokens, tree = self.make_data(contents) 

1356 results = self.fstringify(contents, tokens, tree) 

1357 self.assertEqual(results, expected) 

1358 #@+node:ekr.20200214155156.1: *4* TestFstringify.show_message 

1359 def show_message(self): # pragma: no cover 

1360 """Separate test of fs.message.""" 

1361 fs = Fstringify() 

1362 fs.filename = 'test_file.py' 

1363 fs.line_number = 42 

1364 fs.line = 'The test line\n' 

1365 fs.silent = False 

1366 # Test message. 

1367 fs.message( 

1368 "Test:\n" 

1369 "< Left align\n" 

1370 ":Colon: align\n" 

1371 "> Right align\n" 

1372 " Default align") 

1373 # 

1374 # change_quotes... 

1375 fs.message("can't create f-fstring: no lt_s!") 

1376 lt_s = "lt_s" 

1377 delim = 'Delim' 

1378 token = Token('Kind', 'Value') 

1379 fs.message( 

1380 f"unexpected token: {token.kind} {token.value}\n" 

1381 f" lt_s: {lt_s!r}") 

1382 fs.message( 

1383 f"can't create f-fstring: {lt_s!r}\n" 

1384 f": conflicting delim: {delim!r}") 

1385 fs.message( 

1386 f"can't create f-fstring: {lt_s!r}\n" 

1387 f":backslash in {{expr}}: {delim!r}") 

1388 # Check newlines... 

1389 fs.message( 

1390 f" can't create f-fstring: {lt_s!r}\n" 

1391 f":curly bracket underflow:") 

1392 fs.message( 

1393 f" can't create f-fstring: {lt_s!r}\n" 

1394 f":string contains a backslash:") 

1395 fs.message( 

1396 f" can't create f-fstring: {lt_s!r}\n" 

1397 f":unclosed curly bracket:") 

1398 # Make fstring 

1399 before, after = 'Before', 'After' 

1400 fs.message( 

1401 f"trace:\n" 

1402 f":from: {before!s}\n" 

1403 f": to: {after!s}") 

1404 #@+node:ekr.20200106163535.1: *4* TestFstringify.test_braces 

1405 def test_braces(self): 

1406 

1407 # From pr.construct_stylesheet in leoPrinting.py 

1408 contents = """'h1 {font-family: %s}' % (family)""" 

1409 expected = """f"h1 {{font-family: {family}}}"\n""" 

1410 contents, tokens, tree = self.make_data(contents) 

1411 results = self.fstringify(contents, tokens, tree) 

1412 self.assertEqual(results, expected) 

1413 #@+node:ekr.20200217171334.1: *4* TestFstringify.test_backslash_in_expr 

1414 def test_backslash_in_expr(self): 

1415 # From get_flake8_config. 

1416 contents = r"""print('aaa\n%s' % ('\n'.join(dir_table)))""" 

1417 expected = contents.rstrip() + '\n' 

1418 contents, tokens, tree = self.make_data(contents) 

1419 results = self.fstringify(contents, tokens, tree, silent=True) 

1420 self.assertEqual(results, expected) 

1421 #@+node:ekr.20191230150653.1: *4* TestFstringify.test_call_in_rhs 

1422 def test_call_in_rhs(self): 

1423 

1424 contents = """'%s' % d()""" 

1425 expected = """f"{d()}"\n""" 

1426 contents, tokens, tree = self.make_data(contents) 

1427 results = self.fstringify(contents, tokens, tree) 

1428 self.assertEqual(results, expected) 

1429 #@+node:ekr.20200104045907.1: *4* TestFstringify.test_call_in_rhs_2 

1430 def test_call_in_rhs_2(self): 

1431 

1432 # From LM.traceSettingsDict 

1433 contents = """print('%s' % (len(d.keys())))""" 

1434 expected = """print(f"{len(d.keys())}")\n""" 

1435 contents, tokens, tree = self.make_data(contents) 

1436 results = self.fstringify(contents, tokens, tree) 

1437 self.assertEqual(results, expected) 

1438 #@+node:ekr.20200105073155.1: *4* TestFstringify.test_call_with_attribute 

1439 def test_call_with_attribute(self): 

1440 

1441 contents = """g.blue('wrote %s' % p.atShadowFileNodeName())""" 

1442 expected = """g.blue(f"wrote {p.atShadowFileNodeName()}")\n""" 

1443 contents, tokens, tree = self.make_data(contents) 

1444 results = self.fstringify(contents, tokens, tree) 

1445 self.assertEqual(results, expected) 

1446 #@+node:ekr.20200122035055.1: *4* TestFstringify.test_call_with_comments 

1447 def test_call_with_comments(self): 

1448 

1449 contents = """\ 

1450 print('%s in %5.2f sec' % ( 

1451 "done", # message 

1452 2.9, # time 

1453 )) # trailing comment""" 

1454 

1455 expected = """\ 

1456 print(f'{"done"} in {2.9:5.2f} sec') # trailing comment 

1457 """ 

1458 contents, tokens, tree = self.make_data(contents) 

1459 expected = textwrap.dedent(expected).rstrip() + '\n' 

1460 results = self.fstringify(contents, tokens, tree) 

1461 self.assertEqual(results, expected) 

1462 #@+node:ekr.20200206173126.1: *4* TestFstringify.test_change_quotes 

1463 def test_change_quotes(self): 

1464 

1465 contents = """ret = '[%s]' % ','.join([show(z) for z in arg])""" 

1466 expected = """ret = f"[{','.join([show(z) for z in arg])}]"\n""" 

1467 contents, tokens, tree = self.make_data(contents) 

1468 results = self.fstringify(contents, tokens, tree) 

1469 self.assertEqual(results, expected) 

1470 #@+node:ekr.20200101060616.1: *4* TestFstringify.test_complex_rhs 

1471 def test_complex_rhs(self): 

1472 # From LM.mergeShortcutsDicts. 

1473 contents = ( 

1474 """g.trace('--trace-binding: %20s binds %s to %s' % (""" 

1475 """ c.shortFileName(), binding, d.get(binding) or []))""") 

1476 expected = ( 

1477 """g.trace(f"--trace-binding: {c.shortFileName():20} """ 

1478 """binds {binding} to {d.get(binding) or []}")\n""") 

1479 contents, tokens, tree = self.make_data(contents) 

1480 results = self.fstringify(contents, tokens, tree) 

1481 self.assertEqual(results, expected) 

1482 #@+node:ekr.20200206174208.1: *4* TestFstringify.test_function_call 

1483 def test_function_call(self): 

1484 

1485 contents = """mods = ''.join(['%s+' % z.capitalize() for z in self.mods])""" 

1486 expected = """mods = ''.join([f"{z.capitalize()}+" for z in self.mods])\n""" 

1487 contents, tokens, tree = self.make_data(contents) 

1488 results = self.fstringify(contents, tokens, tree) 

1489 self.assertEqual(results, expected) 

1490 #@+node:ekr.20200106085608.1: *4* TestFstringify.test_ImportFrom 

1491 def test_ImportFrom(self): 

1492 

1493 table = ( 

1494 """from .globals import a, b""", 

1495 """from ..globals import x, y, z""", 

1496 """from . import j""", 

1497 ) 

1498 for contents in table: 

1499 contents, tokens, tree = self.make_data(contents) 

1500 results = self.fstringify(contents, tokens, tree) 

1501 self.assertEqual(results, contents) 

1502 #@+node:ekr.20200106042452.1: *4* TestFstringify.test_ListComp 

1503 def test_ListComp(self): 

1504 

1505 table = ( 

1506 """replaces = [L + c + R[1:] for L, R in splits if R for c in letters]""", 

1507 """[L for L in x for c in y]""", 

1508 """[L for L in x for c in y if L if not c]""", 

1509 ) 

1510 for contents in table: 

1511 contents, tokens, tree = self.make_data(contents) 

1512 results = self.fstringify(contents, tokens, tree) 

1513 expected = contents 

1514 self.assertEqual(results, expected) 

1515 #@+node:ekr.20200112163031.1: *4* TestFstringify.test_munge_spec 

1516 def test_munge_spec(self): 

1517 

1518 # !head:tail or :tail 

1519 table = ( 

1520 ('+1s', '', '+1'), 

1521 ('-2s', '', '>2'), 

1522 ('3s', '', '3'), 

1523 ('4r', 'r', '4'), 

1524 ) 

1525 for spec, e_head, e_tail in table: 

1526 head, tail = Fstringify().munge_spec(spec) 

1527 assert(head, tail) == (e_head, e_tail), ( 

1528 f"\n" 

1529 f" spec: {spec}\n" 

1530 f"expected head: {e_head}\n" 

1531 f" got head: {head}\n" 

1532 f"expected tail: {e_tail}\n" 

1533 f" got tail: {tail}\n") 

1534 #@+node:ekr.20200104042705.1: *4* TestFstringify.test_newlines 

1535 def test_newlines(self): 

1536 

1537 contents = r"""\ 

1538 print("hello\n") 

1539 print('world\n') 

1540 print("hello\r\n") 

1541 print('world\r\n') 

1542 """ 

1543 contents, tokens, tree = self.make_data(contents) 

1544 expected = contents 

1545 results = self.fstringify(contents, tokens, tree) 

1546 self.assertEqual(results, expected) 

1547 #@+node:ekr.20191230183652.1: *4* TestFstringify.test_parens_in_rhs 

1548 def test_parens_in_rhs(self): 

1549 

1550 contents = """print('%20s' % (ivar), val)""" 

1551 expected = """print(f"{ivar:20}", val)\n""" 

1552 contents, tokens, tree = self.make_data(contents) 

1553 results = self.fstringify(contents, tokens, tree) 

1554 self.assertEqual(results, expected) 

1555 #@+node:ekr.20200106091740.1: *4* TestFstringify.test_single_quotes 

1556 def test_single_quotes(self): 

1557 

1558 table = ( 

1559 # Case 0. 

1560 ("""print('%r "default"' % style_name)""", 

1561 """print(f'{style_name!r} "default"')\n"""), 

1562 # Case 1. 

1563 ("""print('%r' % "val")""", 

1564 """print(f'{"val"!r}')\n"""), 

1565 # Case 2. 

1566 ("""print("%r" % "val")""", 

1567 """print(f'{"val"!r}')\n"""), 

1568 ) 

1569 for i, data in enumerate(table): 

1570 contents, expected = data 

1571 description = f"test_single_quotes: {i}" 

1572 contents, tokens, tree = self.make_data(contents, description) 

1573 results = self.fstringify(contents, tokens, tree, filename=description) 

1574 self.assertEqual(results, expected, msg=i) 

1575 #@+node:ekr.20200214094938.1: *4* TestFstringify.test_switch_quotes 

1576 def test_switch_quotes(self): 

1577 table = ( 

1578 ( 

1579 """print('%r' % 'style_name')""", 

1580 """print(f"{'style_name'!r}")\n""", 

1581 ), 

1582 ) 

1583 for i, data in enumerate(table): 

1584 contents, expected = data 

1585 description = f"test_single_quotes: {i}" 

1586 contents, tokens, tree = self.make_data(contents, description) 

1587 results = self.fstringify(contents, tokens, tree, filename=description) 

1588 self.assertEqual(results, expected, msg=i) 

1589 #@+node:ekr.20200206173725.1: *4* TestFstringify.test_switch_quotes_2 

1590 def test_switch_quotes_2(self): 

1591 

1592 contents = """ 

1593 g.es('%s blah blah' % ( 

1594 g.angleBrackets('*'))) 

1595 """ 

1596 expected = """g.es(f"{g.angleBrackets(\'*\')} blah blah")\n""" 

1597 contents, tokens, tree = self.make_data(contents) 

1598 results = self.fstringify(contents, tokens, tree) 

1599 self.assertEqual(results, expected) 

1600 #@+node:ekr.20200206173628.1: *4* TestFstringify.test_switch_quotes_3 

1601 def test_switch_quotes_3(self): 

1602 

1603 contents = """print('Test %s' % 'one')""" 

1604 expected = """print(f"Test {'one'}")\n""" 

1605 contents, tokens, tree = self.make_data(contents) 

1606 results = self.fstringify(contents, tokens, tree) 

1607 self.assertEqual(results, expected) 

1608 #@+node:ekr.20200219125956.1: *4* TestFstringify.test_switch_quotes_fail 

1609 def test_switch_quotes_fail(self): 

1610 

1611 contents = """print('Test %s %s' % ('one', "two"))""" 

1612 contents, tokens, tree = self.make_data(contents) 

1613 expected = contents 

1614 results = self.fstringify(contents, tokens, tree) 

1615 self.assertEqual(results, expected) 

1616 #@-others 

1617#@+node:ekr.20220402152331.1: *3* class TestIterative(TestTOG) 

1618class TestIterative(TestTOG): 

1619 """ 

1620 Tests for the IterativeTokenGenerator class. 

1621  

1622 This class inherits: 

1623 - all the tests from the TestTOG class. 

1624 - most of the support code from the BaseTest class. 

1625 """ 

1626 debug_list = [] # 'full-traceback', 'tokens', 'tree' 

1627 

1628 #@+others 

1629 #@+node:ekr.20220402150424.1: *4* TestIterative.make_data (override) 

1630 def make_data(self, contents, description=None): # pragma: no cover 

1631 """Return (contents, tokens, tree) for the given contents.""" 

1632 contents = contents.lstrip('\\\n') 

1633 if not contents: 

1634 return '', None, None 

1635 self.link_error = None 

1636 t1 = get_time() 

1637 self.update_counts('characters', len(contents)) 

1638 # Ensure all tests end in exactly one newline. 

1639 contents = textwrap.dedent(contents).rstrip() + '\n' 

1640 # Create the TOG instance. 

1641 ### This next line is why we must copy this entire method. 

1642 self.tog = IterativeTokenGenerator() # Was TokenOrderGenerator(). 

1643 self.tog.filename = description or g.callers(2).split(',')[0] 

1644 # Pass 0: create the tokens and parse tree 

1645 tokens = self.make_tokens(contents) 

1646 if not tokens: 

1647 self.fail('make_tokens failed') 

1648 tree = self.make_tree(contents) 

1649 if not tree: 

1650 self.fail('make_tree failed') 

1651 if 'contents' in self.debug_list: 

1652 dump_contents(contents) 

1653 if 'ast' in self.debug_list: 

1654 if py_version >= (3, 9): 

1655 # pylint: disable=unexpected-keyword-arg 

1656 g.printObj(ast.dump(tree, indent=2), tag='ast.dump') 

1657 else: 

1658 g.printObj(ast.dump(tree), tag='ast.dump') 

1659 if 'tree' in self.debug_list: # Excellent traces for tracking down mysteries. 

1660 dump_ast(tree) # pragma: no cover 

1661 if 'tokens' in self.debug_list: 

1662 dump_tokens(tokens) # pragma: no cover 

1663 self.balance_tokens(tokens) 

1664 # Pass 1: create the links. 

1665 self.create_links(tokens, tree) 

1666 if 'post-tree' in self.debug_list: 

1667 dump_tree(tokens, tree) # pragma: no cover 

1668 if 'post-tokens' in self.debug_list: 

1669 dump_tokens(tokens) # pragma: no cover 

1670 t2 = get_time() 

1671 self.update_times('90: TOTAL', t2 - t1) 

1672 if self.link_error: 

1673 self.fail(self.link_error) # pragma: no cover 

1674 return contents, tokens, tree 

1675 #@+node:ekr.20220403063148.1: *4* Copies of TestOrange tests 

1676 # Required for full coverage. 

1677 # These might migrate to the TestTOG class. 

1678 #@+node:ekr.20220403063936.1: *5* TestIterative.test_relative_imports 

1679 def test_relative_imports(self): 

1680 

1681 # #2533. 

1682 contents = """\ 

1683 from .module1 import w 

1684 from . module2 import x 

1685 from ..module1 import y 

1686 from .. module2 import z 

1687 from . import a 

1688 from.import b 

1689 from leo.core import leoExternalFiles 

1690 import leo.core.leoGlobals as g 

1691 """ 

1692 expected = textwrap.dedent("""\ 

1693 from .module1 import w 

1694 from .module2 import x 

1695 from ..module1 import y 

1696 from ..module2 import z 

1697 from . import a 

1698 from . import b 

1699 from leo.core import leoExternalFiles 

1700 import leo.core.leoGlobals as g 

1701 """) 

1702 contents, tokens, tree = self.make_data(contents) 

1703 results = self.beautify(contents, tokens, tree) 

1704 self.assertEqual(expected, results) 

1705 #@+node:ekr.20220403062001.1: *5* TestIterative.test_one_line_pet_peeves 

1706 def test_one_line_pet_peeves(self): 

1707 

1708 # A copy of TestOrange.test_one_line_pet_peeves. 

1709 # Necessary for coverage testings for slices. 

1710 

1711 tag = 'test_one_line_pet_peeves' 

1712 # Except where noted, all entries are expected values.... 

1713 if 0: 

1714 # Test fails or recents... 

1715 table = ( 

1716 # """a[: 1 if True else 2 :]""", 

1717 """a[:-1]""", 

1718 ) 

1719 else: 

1720 table = ( 

1721 # Assignments... 

1722 # Slices (colons)... 

1723 """a[:-1]""", 

1724 """a[: 1 if True else 2 :]""", 

1725 """a[1 : 1 + 2]""", 

1726 """a[lower:]""", 

1727 """a[lower::]""", 

1728 """a[:upper]""", 

1729 """a[:upper:]""", 

1730 """a[::step]""", 

1731 """a[lower:upper:]""", 

1732 """a[lower:upper:step]""", 

1733 """a[lower + offset : upper + offset]""", 

1734 """a[: upper_fn(x) :]""", 

1735 """a[: upper_fn(x) : step_fn(x)]""", 

1736 """a[:: step_fn(x)]""", 

1737 """a[: upper_fn(x) :]""", 

1738 """a[: upper_fn(x) : 2 + 1]""", 

1739 """a[:]""", 

1740 """a[::]""", 

1741 """a[1:]""", 

1742 """a[1::]""", 

1743 """a[:2]""", 

1744 """a[:2:]""", 

1745 """a[::3]""", 

1746 """a[1:2]""", 

1747 """a[1:2:]""", 

1748 """a[:2:3]""", 

1749 """a[1:2:3]""", 

1750 # * and **, inside and outside function calls. 

1751 """a = b * c""", 

1752 # Now done in test_star_star_operator 

1753 # """a = b ** c""", # Black has changed recently. 

1754 """f(*args)""", 

1755 """f(**kwargs)""", 

1756 """f(*args, **kwargs)""", 

1757 """f(a, *args)""", 

1758 """f(a=2, *args)""", 

1759 # Calls... 

1760 """f(-1)""", 

1761 """f(-1 < 2)""", 

1762 """f(1)""", 

1763 """f(2 * 3)""", 

1764 """f(2 + name)""", 

1765 """f(a)""", 

1766 """f(a.b)""", 

1767 """f(a=2 + 3, b=4 - 5, c= 6 * 7, d=8 / 9, e=10 // 11)""", 

1768 """f(a[1 + 2])""", 

1769 """f({key: 1})""", 

1770 """t = (0,)""", 

1771 """x, y = y, x""", 

1772 # Dicts... 

1773 """d = {key: 1}""", 

1774 """d['key'] = a[i]""", 

1775 # Trailing comments: expect two spaces. 

1776 """whatever # comment""", 

1777 """whatever # comment""", 

1778 """whatever # comment""", 

1779 # Word ops... 

1780 """v1 = v2 and v3 if v3 not in v4 or v5 in v6 else v7""", 

1781 """print(v7 for v8 in v9)""", 

1782 # Unary ops... 

1783 """v = -1 if a < b else -2""", 

1784 # Returns... 

1785 """return -1""", 

1786 ) 

1787 fails = 0 

1788 for i, contents in enumerate(table): 

1789 description = f"{tag} part {i}" 

1790 contents, tokens, tree = self.make_data(contents, description) 

1791 expected = self.blacken(contents) 

1792 results = self.beautify(contents, tokens, tree, filename=description) 

1793 message = ( 

1794 f"\n" 

1795 f" contents: {contents.rstrip()}\n" 

1796 f" black: {expected.rstrip()}\n" 

1797 f" orange: {results.rstrip()}") 

1798 if results != expected: # pragma: no cover 

1799 fails += 1 

1800 print(f"Fail: {fails}\n{message}") 

1801 self.assertEqual(fails, 0) 

1802 #@+node:ekr.20220403062532.1: *5* TestIterative.blacken 

1803 def blacken(self, contents, line_length=None): 

1804 """Return the results of running black on contents""" 

1805 # A copy of TestOrange.blacken 

1806 if not black: 

1807 self.skipTest('Can not import black') # pragma: no cover 

1808 # Suppress string normalization! 

1809 try: 

1810 mode = black.FileMode() 

1811 mode.string_normalization = False 

1812 if line_length is not None: 

1813 mode.line_length = line_length 

1814 except TypeError: # pragma: no cover 

1815 self.skipTest('old version of black') 

1816 return black.format_str(contents, mode=mode) 

1817 #@-others 

1818#@+node:ekr.20200107174645.1: *3* class TestOrange (BaseTest) 

1819class TestOrange(BaseTest): 

1820 """ 

1821 Tests for the Orange class. 

1822 

1823 **Important**: All unit tests assume that black_mode is False. 

1824 That is, unit tests assume that no blank lines 

1825 are ever inserted or deleted. 

1826 """ 

1827 #@+others 

1828 #@+node:ekr.20200115201823.1: *4* TestOrange.blacken 

1829 def blacken(self, contents, line_length=None): 

1830 """Return the results of running black on contents""" 

1831 if not black: 

1832 self.skipTest('Can not import black') # pragma: no cover 

1833 # Suppress string normalization! 

1834 try: 

1835 mode = black.FileMode() 

1836 mode.string_normalization = False 

1837 if line_length is not None: 

1838 mode.line_length = line_length 

1839 except TypeError: # pragma: no cover 

1840 self.skipTest('old version of black') 

1841 return black.format_str(contents, mode=mode) 

1842 #@+node:ekr.20200219114415.1: *4* TestOrange.test_at_doc_part 

1843 def test_at_doc_part(self): 

1844 

1845 line_length = 40 # For testing. 

1846 contents = """\ 

1847 #@+at Line 1 

1848 # Line 2 

1849 #@@c 

1850 

1851 print('hi') 

1852 """ 

1853 contents, tokens, tree = self.make_data(contents) 

1854 expected = contents.rstrip() + '\n' 

1855 results = self.beautify(contents, tokens, tree, 

1856 max_join_line_length=line_length, 

1857 max_split_line_length=line_length, 

1858 ) 

1859 self.assertEqual(results, expected) 

1860 #@+node:ekr.20200116102345.1: *4* TestOrange.test_backslash_newline 

1861 def test_backslash_newline(self): 

1862 """ 

1863 This test is necessarily different from black, because orange doesn't 

1864 delete semicolon tokens. 

1865 """ 

1866 contents = r""" 

1867 print(a);\ 

1868 print(b) 

1869 print(c); \ 

1870 print(d) 

1871 """ 

1872 contents, tokens, tree = self.make_data(contents) 

1873 expected = contents.rstrip() + '\n' 

1874 # expected = self.blacken(contents).rstrip() + '\n' 

1875 results = self.beautify(contents, tokens, tree) 

1876 self.assertEqual(results, expected) 

1877 #@+node:ekr.20200219145639.1: *4* TestOrange.test_blank_lines_after_function 

1878 def test_blank_lines_after_function(self): 

1879 

1880 contents = """\ 

1881 # Comment line 1. 

1882 # Comment line 2. 

1883 

1884 def spam(): 

1885 pass 

1886 # Properly indented comment. 

1887 

1888 # Comment line3. 

1889 # Comment line4. 

1890 a = 2 

1891 """ 

1892 contents, tokens, tree = self.make_data(contents) 

1893 expected = contents 

1894 results = self.beautify(contents, tokens, tree) 

1895 self.assertEqual(results, expected) 

1896 #@+node:ekr.20200220050758.1: *4* TestOrange.test_blank_lines_after_function_2 

1897 def test_blank_lines_after_function_2(self): 

1898 

1899 contents = """\ 

1900 # Leading comment line 1. 

1901 # Leading comment lines 2. 

1902 

1903 def spam(): 

1904 pass 

1905 

1906 # Trailing comment line. 

1907 a = 2 

1908 """ 

1909 contents, tokens, tree = self.make_data(contents) 

1910 expected = contents 

1911 results = self.beautify(contents, tokens, tree) 

1912 self.assertEqual(results, expected) 

1913 #@+node:ekr.20200220053212.1: *4* TestOrange.test_blank_lines_after_function_3 

1914 def test_blank_lines_after_function_3(self): 

1915 

1916 # From leoAtFile.py. 

1917 contents = r"""\ 

1918 def writeAsisNode(self, p): 

1919 print('1') 

1920 

1921 def put(s): 

1922 print('2') 

1923 

1924 # Trailing comment 1. 

1925 # Trailing comment 2. 

1926 print('3') 

1927 """ 

1928 contents, tokens, tree = self.make_data(contents) 

1929 expected = contents 

1930 results = self.beautify(contents, tokens, tree) 

1931 self.assertEqual(results, expected) 

1932 #@+node:ekr.20200228074455.1: *4* TestOrange.test_bug_1429 

1933 def test_bug_1429(self): 

1934 

1935 contents = r'''\ 

1936 def get_semver(tag): 

1937 """bug 1429 docstring""" 

1938 try: 

1939 import semantic_version 

1940 version = str(semantic_version.Version.coerce(tag, partial=True)) 

1941 # tuple of major, minor, build, pre-release, patch 

1942 # 5.6b2 --> 5.6-b2 

1943 except(ImportError, ValueError) as err: 

1944 print('\n', err) 

1945 print("""*** Failed to parse Semantic Version from git tag '{0}'. 

1946 Expecting tag name like '5.7b2', 'leo-4.9.12', 'v4.3' for releases. 

1947 This version can't be uploaded to PyPi.org.""".format(tag)) 

1948 version = tag 

1949 return version 

1950 ''' 

1951 contents, tokens, tree = self.make_data(contents) 

1952 expected = contents.rstrip() + '\n' 

1953 results = self.beautify(contents, tokens, tree, 

1954 max_join_line_length=0, max_split_line_length=0) 

1955 self.assertEqual(results, expected) 

1956 #@+node:ekr.20210318055702.1: *4* TestOrange.test_bug_1851 

1957 def test_bug_1851(self): 

1958 

1959 contents = r'''\ 

1960 def foo(a1): 

1961 pass 

1962 ''' 

1963 contents, tokens, tree = self.make_data(contents) 

1964 expected = contents.rstrip() + '\n' 

1965 results = self.beautify(contents, tokens, tree, 

1966 max_join_line_length=0, max_split_line_length=0) 

1967 self.assertEqual(results, expected) 

1968 #@+node:ekr.20200210120455.1: *4* TestOrange.test_decorator 

1969 def test_decorator(self): 

1970 

1971 table = ( 

1972 # Case 0. 

1973 """\ 

1974 @my_decorator(1) 

1975 def func(): 

1976 pass 

1977 """, 

1978 # Case 1. 

1979 """\ 

1980 if 1: 

1981 @my_decorator 

1982 def func(): 

1983 pass 

1984 """, 

1985 # Case 2. 

1986 '''\ 

1987 @g.commander_command('promote') 

1988 def promote(self, event=None, undoFlag=True): 

1989 """Make all children of the selected nodes siblings of the selected node.""" 

1990 ''', 

1991 ) 

1992 for i, contents in enumerate(table): 

1993 contents, tokens, tree = self.make_data(contents) 

1994 expected = contents 

1995 results = self.beautify(contents, tokens, tree) 

1996 if results != expected: 

1997 g.trace('Fail:', i) # pragma: no cover 

1998 self.assertEqual(results, expected) 

1999 #@+node:ekr.20200211094614.1: *4* TestOrange.test_dont_delete_blank_lines 

2000 def test_dont_delete_blank_lines(self): 

2001 

2002 line_length = 40 # For testing. 

2003 contents = """\ 

2004 class Test: 

2005 

2006 def test_func(): 

2007 

2008 pass 

2009 

2010 a = 2 

2011 """ 

2012 contents, tokens, tree = self.make_data(contents) 

2013 expected = contents.rstrip() + '\n' 

2014 results = self.beautify(contents, tokens, tree, 

2015 max_join_line_length=line_length, 

2016 max_split_line_length=line_length, 

2017 ) 

2018 self.assertEqual(results, expected) 

2019 #@+node:ekr.20200116110652.1: *4* TestOrange.test_function_defs 

2020 def test_function_defs(self): 

2021 

2022 table = ( 

2023 # Case 0. 

2024 """\ 

2025 def f1(a=2 + 5): 

2026 pass 

2027 """, 

2028 # Case 2 

2029 """\ 

2030 def f1(): 

2031 pass 

2032 """, 

2033 # Case 3. 

2034 """\ 

2035 def f1(): 

2036 pass 

2037 """, 

2038 # Case 4. 

2039 '''\ 

2040 def should_kill_beautify(p): 

2041 """Return True if p.b contains @killbeautify""" 

2042 return 'killbeautify' in g.get_directives_dict(p) 

2043 ''', 

2044 ) 

2045 for i, contents in enumerate(table): 

2046 contents, tokens, tree = self.make_data(contents) 

2047 expected = self.blacken(contents).rstrip() + '\n' 

2048 results = self.beautify(contents, tokens, tree) 

2049 self.assertEqual(results, expected) 

2050 #@+node:ekr.20200209152745.1: *4* TestOrange.test_indented_comment 

2051 def test_indented_comment(self): 

2052 

2053 line_length = 40 # For testing. 

2054 table = ( 

2055 """\ 

2056 if 1: 

2057 pass 

2058 # An indented comment. 

2059 """, 

2060 """\ 

2061 table = ( 

2062 # Indented comment. 

2063 ) 

2064 """ 

2065 ) 

2066 

2067 fails = 0 

2068 for contents in table: 

2069 contents, tokens, tree = self.make_data(contents) 

2070 expected = contents 

2071 if 0: 

2072 dump_contents(contents) 

2073 dump_tokens(tokens) 

2074 # dump_tree(tokens, tree) 

2075 results = self.beautify(contents, tokens, tree, 

2076 max_join_line_length=line_length, 

2077 max_split_line_length=line_length, 

2078 ) 

2079 message = ( 

2080 f"\n" 

2081 f" contents: {contents!r}\n" 

2082 f" expected: {expected!r}\n" 

2083 f" got: {results!r}") 

2084 if results != expected: # pragma: no cover 

2085 fails += 1 

2086 print(f"Fail: {fails}\n{message}") 

2087 assert not fails, fails 

2088 #@+node:ekr.20200116104031.1: *4* TestOrange.test_join_and_strip_condition 

2089 def test_join_and_strip_condition(self): 

2090 

2091 contents = """\ 

2092 if ( 

2093 a == b or 

2094 c == d 

2095 ): 

2096 pass 

2097 """ 

2098 expected = """\ 

2099 if (a == b or c == d): 

2100 pass 

2101 """ 

2102 contents, tokens, tree = self.make_data(contents) 

2103 expected = textwrap.dedent(expected) 

2104 # Black also removes parens, which is beyond our scope at present. 

2105 # expected = self.blacken(contents, line_length=40) 

2106 results = self.beautify(contents, tokens, tree) 

2107 self.assertEqual(results, expected) 

2108 #@+node:ekr.20200208041446.1: *4* TestOrange.test_join_leading_whitespace 

2109 def test_join_leading_whitespace(self): 

2110 

2111 line_length = 40 # For testing. 

2112 table = ( 

2113 #1234567890x1234567890x1234567890x1234567890x 

2114 """\ 

2115 if 1: 

2116 print('4444', 

2117 '5555') 

2118 """, 

2119 """\ 

2120 if 1: 

2121 print('4444', '5555')\n""", 

2122 ) 

2123 fails = 0 

2124 for contents in table: 

2125 contents, tokens, tree = self.make_data(contents) 

2126 if 0: 

2127 dump_contents(contents) 

2128 dump_tokens(tokens) 

2129 # dump_tree(tokens, tree) 

2130 expected = contents 

2131 # expected = self.blacken(contents, line_length=line_length) 

2132 results = self.beautify(contents, tokens, tree, 

2133 max_join_line_length=line_length, 

2134 max_split_line_length=line_length, 

2135 ) 

2136 message = ( 

2137 f"\n" 

2138 f" contents: {contents!r}\n" 

2139 f" expected: {expected!r}\n" 

2140 f" got: {results!r}") 

2141 if results != expected: # pragma: no cover 

2142 fails += 1 

2143 print(f"Fail: {fails}\n{message}") 

2144 assert not fails, fails 

2145 #@+node:ekr.20200121093134.1: *4* TestOrange.test_join_lines 

2146 def test_join_lines(self): 

2147 

2148 # Except where noted, all entries are expected values.... 

2149 line_length = 40 # For testing. 

2150 table = ( 

2151 #1234567890x1234567890x1234567890x1234567890x 

2152 """print('4444',\n '5555')""", 

2153 """print('4444', '5555')\n""", 

2154 ) 

2155 fails = 0 

2156 for contents in table: 

2157 contents, tokens, tree = self.make_data(contents) 

2158 if 0: 

2159 dump_contents(contents) 

2160 dump_tokens(tokens) 

2161 # dump_tree(tokens, tree) 

2162 expected = contents 

2163 results = self.beautify(contents, tokens, tree, 

2164 max_join_line_length=line_length, 

2165 max_split_line_length=line_length, 

2166 ) 

2167 message = ( 

2168 f"\n" 

2169 f" contents: {contents!r}\n" 

2170 f" expected: {expected!r}\n" 

2171 f" orange: {results!r}") 

2172 if results != expected: # pragma: no cover 

2173 fails += 1 

2174 print(f"Fail: {fails}\n{message}") 

2175 self.assertEqual(fails, 0) 

2176 #@+node:ekr.20200210051900.1: *4* TestOrange.test_join_suppression 

2177 def test_join_suppression(self): 

2178 

2179 contents = """\ 

2180 class T: 

2181 a = 1 

2182 print( 

2183 a 

2184 ) 

2185 """ 

2186 expected = """\ 

2187 class T: 

2188 a = 1 

2189 print(a) 

2190 """ 

2191 contents, tokens, tree = self.make_data(contents) 

2192 expected = textwrap.dedent(expected) 

2193 results = self.beautify(contents, tokens, tree) 

2194 self.assertEqual(results, expected) 

2195 #@+node:ekr.20200207093606.1: *4* TestOrange.test_join_too_long_lines 

2196 def test_join_too_long_lines(self): 

2197 

2198 # Except where noted, all entries are expected values.... 

2199 line_length = 40 # For testing. 

2200 table = ( 

2201 #1234567890x1234567890x1234567890x1234567890x 

2202 ( 

2203 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')""", 

2204 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')\n""", 

2205 ), 

2206 ) 

2207 fails = 0 

2208 for contents, expected in table: 

2209 contents, tokens, tree = self.make_data(contents) 

2210 if 0: 

2211 dump_contents(contents) 

2212 dump_tokens(tokens) 

2213 # dump_tree(tokens, tree) 

2214 results = self.beautify(contents, tokens, tree, 

2215 max_join_line_length=line_length, 

2216 max_split_line_length=line_length, 

2217 ) 

2218 message = ( 

2219 f"\n" 

2220 f" contents: {contents!r}\n" 

2221 f" expected: {expected!r}\n" 

2222 f" got: {results!r}") 

2223 if results != expected: # pragma: no cover 

2224 fails += 1 

2225 print(f"Fail: {fails}\n{message}") 

2226 assert not fails, fails 

2227 #@+node:ekr.20220327131225.1: *4* TestOrange.test_leading_stars 

2228 def test_leading_stars(self): 

2229 

2230 # #2533. 

2231 contents = """\ 

2232 def f( 

2233 arg1, 

2234 *args, 

2235 **kwargs 

2236 ): 

2237 pass 

2238 """ 

2239 expected = textwrap.dedent("""\ 

2240 def f(arg1, *args, **kwargs): 

2241 pass 

2242 """) 

2243 contents, tokens, tree = self.make_data(contents) 

2244 results = self.beautify(contents, tokens, tree) 

2245 self.assertEqual(expected, results) 

2246 #@+node:ekr.20200108075541.1: *4* TestOrange.test_leo_sentinels 

2247 def test_leo_sentinels_1(self): 

2248 

2249 # Careful: don't put a sentinel into the file directly. 

2250 # That would corrupt leoAst.py. 

2251 sentinel = '#@+node:ekr.20200105143308.54: ** test' 

2252 contents = f"""\ 

2253 {sentinel} 

2254 def spam(): 

2255 pass 

2256 """ 

2257 contents, tokens, tree = self.make_data(contents) 

2258 expected = contents.rstrip() + '\n' 

2259 results = self.beautify(contents, tokens, tree) 

2260 self.assertEqual(results, expected) 

2261 #@+node:ekr.20200209155457.1: *4* TestOrange.test_leo_sentinels_2 

2262 def test_leo_sentinels_2(self): 

2263 

2264 # Careful: don't put a sentinel into the file directly. 

2265 # That would corrupt leoAst.py. 

2266 sentinel = '#@+node:ekr.20200105143308.54: ** test' 

2267 contents = f"""\ 

2268 {sentinel} 

2269 class TestClass: 

2270 pass 

2271 """ 

2272 contents, tokens, tree = self.make_data(contents) 

2273 expected = contents.rstrip() + '\n' 

2274 results = self.beautify(contents, tokens, tree) 

2275 self.assertEqual(results, expected) 

2276 #@+node:ekr.20200108082833.1: *4* TestOrange.test_lines_before_class 

2277 def test_lines_before_class(self): 

2278 

2279 contents = """\ 

2280 a = 2 

2281 class aClass: 

2282 pass 

2283 """ 

2284 contents, tokens, tree = self.make_data(contents) 

2285 expected = contents 

2286 results = self.beautify(contents, tokens, tree) 

2287 self.assertEqual(results, expected) 

2288 #@+node:ekr.20200110014220.86: *4* TestOrange.test_multi_line_pet_peeves 

2289 def test_multi_line_pet_peeves(self): 

2290 

2291 contents = """\ 

2292 if x == 4: pass 

2293 if x == 4 : pass 

2294 print (x, y); x, y = y, x 

2295 print (x , y) ; x , y = y , x 

2296 if(1): 

2297 pass 

2298 elif(2): 

2299 pass 

2300 while(3): 

2301 pass 

2302 """ 

2303 # At present Orange doesn't split lines... 

2304 expected = """\ 

2305 if x == 4: pass 

2306 if x == 4: pass 

2307 print(x, y); x, y = y, x 

2308 print(x, y); x, y = y, x 

2309 if (1): 

2310 pass 

2311 elif (2): 

2312 pass 

2313 while (3): 

2314 pass 

2315 """ 

2316 contents, tokens, tree = self.make_data(contents) 

2317 expected = self.adjust_expected(expected) 

2318 results = self.beautify(contents, tokens, tree) 

2319 self.assertEqual(results, expected) 

2320 #@+node:ekr.20200110014220.95: *4* TestOrange.test_one_line_pet_peeves 

2321 def test_one_line_pet_peeves(self): 

2322 

2323 tag = 'test_one_line_pet_peeves' 

2324 # Except where noted, all entries are expected values.... 

2325 if 0: 

2326 # Test fails or recents... 

2327 table = ( 

2328 # """a[: 1 if True else 2 :]""", 

2329 """a[:-1]""", 

2330 ) 

2331 else: 

2332 table = ( 

2333 # Assignments... 

2334 # Slices (colons)... 

2335 """a[:-1]""", 

2336 """a[: 1 if True else 2 :]""", 

2337 """a[1 : 1 + 2]""", 

2338 """a[lower:]""", 

2339 """a[lower::]""", 

2340 """a[:upper]""", 

2341 """a[:upper:]""", 

2342 """a[::step]""", 

2343 """a[lower:upper:]""", 

2344 """a[lower:upper:step]""", 

2345 """a[lower + offset : upper + offset]""", 

2346 """a[: upper_fn(x) :]""", 

2347 """a[: upper_fn(x) : step_fn(x)]""", 

2348 """a[:: step_fn(x)]""", 

2349 """a[: upper_fn(x) :]""", 

2350 """a[: upper_fn(x) : 2 + 1]""", 

2351 """a[:]""", 

2352 """a[::]""", 

2353 """a[1:]""", 

2354 """a[1::]""", 

2355 """a[:2]""", 

2356 """a[:2:]""", 

2357 """a[::3]""", 

2358 """a[1:2]""", 

2359 """a[1:2:]""", 

2360 """a[:2:3]""", 

2361 """a[1:2:3]""", 

2362 # * and **, inside and outside function calls. 

2363 """a = b * c""", 

2364 # Now done in test_star_star_operator 

2365 # """a = b ** c""", # Black has changed recently. 

2366 """f(*args)""", 

2367 """f(**kwargs)""", 

2368 """f(*args, **kwargs)""", 

2369 """f(a, *args)""", 

2370 """f(a=2, *args)""", 

2371 # Calls... 

2372 """f(-1)""", 

2373 """f(-1 < 2)""", 

2374 """f(1)""", 

2375 """f(2 * 3)""", 

2376 """f(2 + name)""", 

2377 """f(a)""", 

2378 """f(a.b)""", 

2379 """f(a=2 + 3, b=4 - 5, c= 6 * 7, d=8 / 9, e=10 // 11)""", 

2380 """f(a[1 + 2])""", 

2381 """f({key: 1})""", 

2382 """t = (0,)""", 

2383 """x, y = y, x""", 

2384 # Dicts... 

2385 """d = {key: 1}""", 

2386 """d['key'] = a[i]""", 

2387 # Trailing comments: expect two spaces. 

2388 """whatever # comment""", 

2389 """whatever # comment""", 

2390 """whatever # comment""", 

2391 # Word ops... 

2392 """v1 = v2 and v3 if v3 not in v4 or v5 in v6 else v7""", 

2393 """print(v7 for v8 in v9)""", 

2394 # Unary ops... 

2395 """v = -1 if a < b else -2""", 

2396 # Returns... 

2397 """return -1""", 

2398 ) 

2399 fails = 0 

2400 for i, contents in enumerate(table): 

2401 description = f"{tag} part {i}" 

2402 contents, tokens, tree = self.make_data(contents, description) 

2403 expected = self.blacken(contents) 

2404 results = self.beautify(contents, tokens, tree, filename=description) 

2405 message = ( 

2406 f"\n" 

2407 f" contents: {contents.rstrip()}\n" 

2408 f" black: {expected.rstrip()}\n" 

2409 f" orange: {results.rstrip()}") 

2410 if results != expected: # pragma: no cover 

2411 fails += 1 

2412 print(f"Fail: {fails}\n{message}") 

2413 self.assertEqual(fails, 0) 

2414 #@+node:ekr.20220327135448.1: *4* TestOrange.test_relative_imports 

2415 def test_relative_imports(self): 

2416 

2417 # #2533. 

2418 contents = """\ 

2419 from .module1 import w 

2420 from . module2 import x 

2421 from ..module1 import y 

2422 from .. module2 import z 

2423 from . import a 

2424 from.import b 

2425 from leo.core import leoExternalFiles 

2426 import leo.core.leoGlobals as g 

2427 """ 

2428 expected = textwrap.dedent("""\ 

2429 from .module1 import w 

2430 from .module2 import x 

2431 from ..module1 import y 

2432 from ..module2 import z 

2433 from . import a 

2434 from . import b 

2435 from leo.core import leoExternalFiles 

2436 import leo.core.leoGlobals as g 

2437 """) 

2438 contents, tokens, tree = self.make_data(contents) 

2439 results = self.beautify(contents, tokens, tree) 

2440 self.assertEqual(expected, results) 

2441 #@+node:ekr.20200210050646.1: *4* TestOrange.test_return 

2442 def test_return(self): 

2443 

2444 contents = """return []""" 

2445 expected = self.blacken(contents) 

2446 contents, tokens, tree = self.make_data(contents) 

2447 results = self.beautify(contents, tokens, tree) 

2448 self.assertEqual(results, expected) 

2449 #@+node:ekr.20200107174742.1: *4* TestOrange.test_single_quoted_string 

2450 def test_single_quoted_string(self): 

2451 

2452 contents = """print('hi')""" 

2453 # blacken suppresses string normalization. 

2454 expected = self.blacken(contents) 

2455 contents, tokens, tree = self.make_data(contents) 

2456 results = self.beautify(contents, tokens, tree) 

2457 self.assertEqual(results, expected) 

2458 #@+node:ekr.20200117180956.1: *4* TestOrange.test_split_lines 

2459 def test_split_lines(self): 

2460 

2461 line_length = 40 # For testing. 

2462 table = ( 

2463 #1234567890x1234567890x1234567890x1234567890x 

2464 """\ 

2465 if 1: 

2466 print('1111111111', '2222222222', '3333333333') 

2467 """, 

2468 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc')""", 

2469 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc', 'ddddddddddddddddd')""", 

2470 ) 

2471 fails = 0 

2472 for contents in table: 

2473 contents, tokens, tree = self.make_data(contents) 

2474 if 0: 

2475 dump_tokens(tokens) 

2476 # dump_tree(tokens, tree) 

2477 expected = self.blacken(contents, line_length=line_length) 

2478 results = self.beautify(contents, tokens, tree, 

2479 max_join_line_length=line_length, 

2480 max_split_line_length=line_length, 

2481 ) 

2482 message = ( 

2483 f"\n" 

2484 f" contents: {contents!s}\n" 

2485 f" black: {expected!s}\n" 

2486 f" orange: {results!s}") 

2487 if results != expected: # pragma: no cover 

2488 fails += 1 

2489 print(f"Fail: {fails}\n{message}") 

2490 self.assertEqual(fails, 0) 

2491 #@+node:ekr.20200210073227.1: *4* TestOrange.test_split_lines_2 

2492 def test_split_lines_2(self): 

2493 

2494 line_length = 40 # For testing. 

2495 # Different from how black handles things. 

2496 contents = """\ 

2497 if not any([z.kind == 'lt' for z in line_tokens]): 

2498 return False 

2499 """ 

2500 expected = """\ 

2501 if not any( 

2502 [z.kind == 'lt' for z in line_tokens]): 

2503 return False 

2504 """ 

2505 fails = 0 

2506 contents, tokens, tree = self.make_data(contents) 

2507 # expected = self.blacken(contents, line_length=line_length) 

2508 expected = textwrap.dedent(expected) 

2509 results = self.beautify(contents, tokens, tree, 

2510 max_join_line_length=line_length, 

2511 max_split_line_length=line_length, 

2512 ) 

2513 message = ( 

2514 f"\n" 

2515 f" contents: {contents!r}\n" 

2516 f" expected: {expected!r}\n" 

2517 f" got: {results!r}") 

2518 if results != expected: # pragma: no cover 

2519 fails += 1 

2520 print(f"Fail: {fails}\n{message}") 

2521 self.assertEqual(fails, 0) 

2522 #@+node:ekr.20200219144837.1: *4* TestOrange.test_split_lines_3 

2523 def test_split_lines_3(self): 

2524 

2525 line_length = 40 # For testing. 

2526 # Different from how black handles things. 

2527 contents = """print('eee', ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 'jjjjjjj', 'kkkkkk')""" 

2528 # This is a bit different from black, but it's good enough for now. 

2529 expected = """\ 

2530 print( 

2531 'eee', 

2532 ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 

2533 'jjjjjjj', 

2534 'kkkkkk', 

2535 ) 

2536 """ 

2537 fails = 0 

2538 contents, tokens, tree = self.make_data(contents) 

2539 # expected = self.blacken(contents, line_length=line_length) 

2540 expected = textwrap.dedent(expected) 

2541 results = self.beautify(contents, tokens, tree, 

2542 max_join_line_length=line_length, 

2543 max_split_line_length=line_length, 

2544 ) 

2545 message = ( 

2546 f"\n" 

2547 f" contents: {contents!r}\n" 

2548 f" expected: {expected!r}\n" 

2549 f" got: {results!r}") 

2550 if results != expected: # pragma: no cover 

2551 fails += 1 

2552 print(f"Fail: {fails}\n{message}") 

2553 self.assertEqual(fails, 0) 

2554 #@+node:ekr.20220401191253.1: *4* TestOrange.test_star_star_operator 

2555 def test_star_star_operator(self): 

2556 # Was tested in pet peeves, but this is more permissive. 

2557 contents = """a = b ** c""" 

2558 contents, tokens, tree = self.make_data(contents) 

2559 # Don't rely on black for this test. 

2560 # expected = self.blacken(contents) 

2561 expected = contents 

2562 results = self.beautify(contents, tokens, tree) 

2563 self.assertEqual(results, expected) 

2564 #@+node:ekr.20200119155207.1: *4* TestOrange.test_sync_tokens 

2565 def test_sync_tokens(self): 

2566 

2567 contents = """if x == 4: pass""" 

2568 # At present Orange doesn't split lines... 

2569 expected = """if x == 4: pass""" 

2570 contents, tokens, tree = self.make_data(contents) 

2571 expected = self.adjust_expected(expected) 

2572 results = self.beautify(contents, tokens, tree) 

2573 self.assertEqual(results, expected) 

2574 #@+node:ekr.20200209161226.1: *4* TestOrange.test_ternary 

2575 def test_ternary(self): 

2576 

2577 contents = """print(2 if name == 'class' else 1)""" 

2578 contents, tokens, tree = self.make_data(contents) 

2579 expected = contents 

2580 results = self.beautify(contents, tokens, tree) 

2581 self.assertEqual(results, expected) 

2582 #@+node:ekr.20200211093359.1: *4* TestOrange.test_verbatim 

2583 def test_verbatim(self): 

2584 

2585 line_length = 40 # For testing. 

2586 contents = """\ 

2587 #@@nobeautify 

2588 

2589 def addOptionsToParser(self, parser, trace_m): 

2590 

2591 add = parser.add_option 

2592 

2593 def add_bool(option, help, dest=None): 

2594 add(option, action='store_true', dest=dest, help=help) 

2595 

2596 add_bool('--diff', 'use Leo as an external git diff') 

2597 # add_bool('--dock', 'use a Qt dock') 

2598 add_bool('--fullscreen', 'start fullscreen') 

2599 add_bool('--init-docks', 'put docks in default positions') 

2600 # Multiple bool values. 

2601 add('-v', '--version', action='store_true', 

2602 help='print version number and exit') 

2603 

2604 # From leoAtFile.py 

2605 noDirective = 1 # not an at-directive. 

2606 allDirective = 2 # at-all (4.2) 

2607 docDirective = 3 # @doc. 

2608 

2609 #@@beautify 

2610 """ 

2611 contents, tokens, tree = self.make_data(contents) 

2612 expected = contents 

2613 results = self.beautify(contents, tokens, tree, 

2614 max_join_line_length=line_length, 

2615 max_split_line_length=line_length, 

2616 ) 

2617 self.assertEqual(results, expected, msg=contents) 

2618 #@+node:ekr.20200211094209.1: *4* TestOrange.test_verbatim_with_pragma 

2619 def test_verbatim_with_pragma(self): 

2620 

2621 line_length = 40 # For testing. 

2622 contents = """\ 

2623 #pragma: no beautify 

2624 

2625 def addOptionsToParser(self, parser, trace_m): 

2626 

2627 add = parser.add_option 

2628 

2629 def add_bool(option, help, dest=None): 

2630 add(option, action='store_true', dest=dest, help=help) 

2631 

2632 add_bool('--diff', 'use Leo as an external git diff') 

2633 # add_bool('--dock', 'use a Qt dock') 

2634 add_bool('--fullscreen', 'start fullscreen') 

2635 add_other('--window-size', 'initial window size (height x width)', m='SIZE') 

2636 add_other('--window-spot', 'initial window position (top x left)', m='SPOT') 

2637 # Multiple bool values. 

2638 add('-v', '--version', action='store_true', 

2639 help='print version number and exit') 

2640 

2641 # pragma: beautify 

2642 """ 

2643 contents, tokens, tree = self.make_data(contents) 

2644 expected = contents 

2645 results = self.beautify(contents, tokens, tree, 

2646 max_join_line_length=line_length, 

2647 max_split_line_length=line_length, 

2648 ) 

2649 self.assertEqual(results, expected, msg=contents) 

2650 #@+node:ekr.20200729083027.1: *4* TestOrange.verbatim2 

2651 def test_verbatim2(self): 

2652 

2653 contents = """\ 

2654 #@@beautify 

2655 #@@nobeautify 

2656 #@+at Starts doc part 

2657 # More doc part. 

2658 # The @c ends the doc part. 

2659 #@@c 

2660 """ 

2661 contents, tokens, tree = self.make_data(contents) 

2662 expected = contents 

2663 results = self.beautify(contents, tokens, tree) 

2664 self.assertEqual(results, expected, msg=contents) 

2665 #@-others 

2666#@+node:ekr.20191231130208.1: *3* class TestReassignTokens (BaseTest) 

2667class TestReassignTokens(BaseTest): 

2668 """Test cases for the ReassignTokens class.""" 

2669 #@+others 

2670 #@+node:ekr.20191231130320.1: *4* test_reassign_tokens (to do) 

2671 def test_reassign_tokens(self): 

2672 pass 

2673 #@+node:ekr.20191231130334.1: *4* test_nearest_common_ancestor 

2674 def test_nearest_common_ancestor(self): 

2675 

2676 contents = """name='uninverted %s' % d.name()""" 

2677 self.make_data(contents) 

2678 #@-others 

2679#@+node:ekr.20200110093802.1: *3* class TestTokens (BaseTest) 

2680class TestTokens(BaseTest): 

2681 """Unit tests for tokenizing.""" 

2682 #@+others 

2683 #@+node:ekr.20200122165910.1: *4* TT.show_asttokens_script 

2684 def show_asttokens_script(self): # pragma: no cover 

2685 """ 

2686 A script showing how asttokens can *easily* do the following: 

2687 - Inject parent/child links into ast nodes. 

2688 - Inject many-to-many links between tokens and ast nodes. 

2689 """ 

2690 # pylint: disable=import-error,reimported 

2691 import ast 

2692 import asttokens 

2693 import token as token_module 

2694 stack: List[ast.AST] = [] 

2695 # Define TestToken class and helper functions. 

2696 #@+others 

2697 #@+node:ekr.20200122170101.3: *5* class TestToken 

2698 class TestToken: 

2699 """A patchable representation of the 5-tuples created by tokenize and used by asttokens.""" 

2700 

2701 def __init__(self, kind, value): 

2702 self.kind = kind 

2703 self.value = value 

2704 self.node_list: List[Any] = [] 

2705 

2706 def __str__(self): 

2707 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list]) 

2708 return f"{self.kind:12} {self.value:20} {tokens_s!s}" 

2709 

2710 __repr__ = __str__ 

2711 #@+node:ekr.20200122170101.1: *5* function: atok_name 

2712 def atok_name(token): 

2713 """Return a good looking name for the given 5-tuple""" 

2714 return token_module.tok_name[token[0]].lower() # type:ignore 

2715 #@+node:ekr.20200122170101.2: *5* function: atok_value 

2716 def atok_value(token): 

2717 """Print a good looking value for the given 5-tuple""" 

2718 return token.string if atok_name(token) == 'string' else repr(token.string) 

2719 #@+node:ekr.20200122170057.1: *5* function: dump_token 

2720 def dump_token(token): 

2721 node_list = list(set(getattr(token, 'node_set', []))) 

2722 node_list = sorted([z.__class__.__name__ for z in node_list]) 

2723 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}" 

2724 #@+node:ekr.20200122170337.1: *5* function: postvisit 

2725 def postvisit(node, par_value, value): 

2726 nonlocal stack 

2727 stack.pop() 

2728 return par_value or [] 

2729 #@+node:ekr.20200122170101.4: *5* function: previsit 

2730 def previsit(node, par_value): 

2731 nonlocal stack 

2732 if isinstance(node, ast.Module): 

2733 stack = [] 

2734 if stack: 

2735 parent = stack[-1] 

2736 children: List[ast.AST] = getattr(parent, 'children', []) 

2737 parent.children = children + [node] # type:ignore 

2738 node.parent = parent 

2739 else: 

2740 node.parent = None 

2741 node.children = [] 

2742 stack.append(node) 

2743 return par_value, [] 

2744 #@-others 

2745 table = ( 

2746 # """print('%s in %5.2f sec' % ("done", 2.9))\n""", 

2747 """print(a[1:2:3])\n""", 

2748 ) 

2749 for source in table: 

2750 print(f"Source...\n\n{source}") 

2751 atok = asttokens.ASTTokens(source, parse=True) 

2752 # Create a patchable list of Token objects. 

2753 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] 

2754 # Inject parent/child links into nodes. 

2755 asttokens.util.visit_tree(atok.tree, previsit, postvisit) 

2756 # Create token.token_list for each token. 

2757 for node in asttokens.util.walk(atok.tree): 

2758 # Inject node into token.node_list 

2759 for ast_token in atok.get_tokens(node, include_extra=True): 

2760 i = ast_token.index 

2761 token = tokens[i] 

2762 token.node_list.append(node) 

2763 # Print the resulting parent/child links. 

2764 for node in ast.walk(atok.tree): 

2765 if hasattr(node, 'first_token'): 

2766 parent = getattr(node, 'parent', None) 

2767 parent_s = parent.__class__.__name__ if parent else 'None' 

2768 children: List[ast.AST] = getattr(node, 'children', []) 

2769 if children: 

2770 children_s = ', '.join(z.__class__.__name__ for z in children) 

2771 else: 

2772 children_s = 'None' 

2773 print( 

2774 f"\n" 

2775 f" node: {node.__class__.__name__}\n" 

2776 f" parent: {parent_s}\n" 

2777 f"children: {children_s}") 

2778 # Print the resulting tokens. 

2779 g.printObj(tokens, tag='Tokens') 

2780 #@+node:ekr.20200121025938.1: *4* TT.show_example_dump 

2781 def show_example_dump(self): # pragma: no cover 

2782 

2783 # Will only be run when enabled explicitly. 

2784 

2785 contents = """\ 

2786 print('line 1') 

2787 print('line 2') 

2788 print('line 3') 

2789 """ 

2790 contents, tokens, tree = self.make_data(contents) 

2791 dump_contents(contents) 

2792 dump_tokens(tokens) 

2793 dump_tree(tokens, tree) 

2794 #@+node:ekr.20200110015014.6: *4* TT.test_bs_nl_tokens 

2795 def test_bs_nl_tokens(self): 

2796 # Test https://bugs.python.org/issue38663. 

2797 

2798 contents = """\ 

2799 print \ 

2800 ('abc') 

2801 """ 

2802 self.check_roundtrip(contents) 

2803 #@+node:ekr.20200110015014.8: *4* TT.test_continuation_1 

2804 def test_continuation_1(self): 

2805 

2806 contents = """\ 

2807 a = (3,4, 

2808 5,6) 

2809 y = [3, 4, 

2810 5] 

2811 z = {'a': 5, 

2812 'b':15, 'c':True} 

2813 x = len(y) + 5 - a[ 

2814 3] - a[2] + len(z) - z[ 

2815 'b'] 

2816 """ 

2817 self.check_roundtrip(contents) 

2818 #@+node:ekr.20200111085210.1: *4* TT.test_continuation_2 

2819 def test_continuation_2(self): 

2820 # Backslash means line continuation, except for comments 

2821 contents = ( 

2822 'x=1+\\\n 2' 

2823 '# This is a comment\\\n # This also' 

2824 ) 

2825 self.check_roundtrip(contents) 

2826 #@+node:ekr.20200111085211.1: *4* TT.test_continuation_3 

2827 def test_continuation_3(self): 

2828 

2829 contents = """\ 

2830 # Comment \\\n 

2831 x = 0 

2832 """ 

2833 self.check_roundtrip(contents) 

2834 #@+node:ekr.20200110015014.10: *4* TT.test_string_concatenation_1 

2835 def test_string_concatentation_1(self): 

2836 # Two *plain* string literals on the same line 

2837 self.check_roundtrip("""'abc' 'xyz'""") 

2838 #@+node:ekr.20200111081801.1: *4* TT.test_string_concatenation_2 

2839 def test_string_concatentation_2(self): 

2840 # f-string followed by plain string on the same line 

2841 self.check_roundtrip("""f'abc' 'xyz'""") 

2842 #@+node:ekr.20200111081832.1: *4* TT.test_string_concatenation_3 

2843 def test_string_concatentation_3(self): 

2844 # plain string followed by f-string on the same line 

2845 self.check_roundtrip("""'abc' f'xyz'""") 

2846 #@+node:ekr.20160521103254.1: *4* TT.test_visitors_exist 

2847 def test_visitors_exist(self): 

2848 """Ensure that visitors for all ast nodes exist.""" 

2849 import _ast 

2850 # Compute all fields to BaseTest. 

2851 aList = sorted(dir(_ast)) 

2852 remove = [ 

2853 'Interactive', 'Suite', # Not necessary. 

2854 'AST', # The base class, 

2855 # Constants... 

2856 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 

2857 'PyCF_ONLY_AST', 

2858 'PyCF_TYPE_COMMENTS', 

2859 # New ast nodes for Python 3.8. 

2860 # We can ignore these nodes because: 

2861 # 1. ast.parse does not generate them by default. 

2862 # 2. The type comments are ordinary comments. 

2863 # They do not need to be specially synced. 

2864 # 3. Tools such as black, orange, and fstringify will 

2865 # only ever handle comments as comments. 

2866 'FunctionType', 'NamedExpr', 'TypeIgnore', 

2867 ] 

2868 aList = [z for z in aList if not z[0].islower()] 

2869 # Remove base classes. 

2870 aList = [z for z in aList 

2871 if not z.startswith('_') and not z in remove] 

2872 # Now test them. 

2873 table = ( 

2874 TokenOrderGenerator, 

2875 ) 

2876 for class_ in table: 

2877 traverser = class_() 

2878 errors, nodes, ops = 0, 0, 0 

2879 for z in aList: 

2880 if hasattr(traverser, 'do_' + z): 

2881 nodes += 1 

2882 elif _op_names.get(z): 

2883 ops += 1 

2884 else: # pragma: no cover 

2885 errors += 1 

2886 print( 

2887 f"Missing visitor: " 

2888 f"{traverser.__class__.__name__}.{z}") 

2889 msg = f"{nodes} node types, {ops} op types, {errors} errors" 

2890 assert not errors, msg 

2891 #@-others 

2892#@+node:ekr.20200107144010.1: *3* class TestTopLevelFunctions (BaseTest) 

2893class TestTopLevelFunctions(BaseTest): 

2894 """Tests for the top-level functions in leoAst.py.""" 

2895 #@+others 

2896 #@+node:ekr.20200107144227.1: *4* test_get_encoding_directive 

2897 def test_get_encoding_directive(self): 

2898 

2899 filename = __file__ 

2900 assert os.path.exists(filename), repr(filename) 

2901 with open(filename, 'rb') as f: 

2902 bb = f.read() 

2903 e = get_encoding_directive(bb) 

2904 self.assertEqual(e.lower(), 'utf-8') 

2905 #@+node:ekr.20200107150857.1: *4* test_strip_BOM 

2906 def test_strip_BOM(self): 

2907 

2908 filename = __file__ 

2909 assert os.path.exists(filename), repr(filename) 

2910 with open(filename, 'rb') as f: 

2911 bb = f.read() 

2912 assert bb, filename 

2913 e, s = strip_BOM(bb) 

2914 assert e is None or e.lower() == 'utf-8', repr(e) 

2915 #@-others 

2916#@+node:ekr.20191227152538.1: *3* class TestTOT (BaseTest) 

2917class TestTOT(BaseTest): 

2918 """Tests for the TokenOrderTraverser class.""" 

2919 #@+others 

2920 #@+node:ekr.20200111115318.1: *4* test_tot.test_traverse 

2921 def test_traverse(self): 

2922 

2923 contents = """\ 

2924 f(1) 

2925 b = 2 + 3 

2926 """ 

2927 # print('%s = %s' % (2+3, 4*5)) 

2928 if 1: 

2929 contents, tokens, tree = self.make_file_data('leoApp.py') 

2930 else: 

2931 contents, tokens, tree = self.make_data(contents) 

2932 tot = TokenOrderTraverser() 

2933 t1 = get_time() 

2934 n_nodes = tot.traverse(tree) 

2935 t2 = get_time() 

2936 self.update_counts('nodes', n_nodes) 

2937 self.update_times('50: TOT.traverse', t2 - t1) 

2938 # self.dump_stats() 

2939 #@-others 

2940#@-others 

2941#@-leo