Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# -*- coding: utf-8 -*- 

2#@+leo-ver=5-thin 

3#@+node:ekr.20210902073413.1: * @file ../unittests/core/test_leoAst.py 

4#@@first 

5"""Tests of leoAst.py""" 

6#@+<< leoAst imports >> 

7#@+node:ekr.20210902074548.1: ** << leoAst imports >> 

8import ast 

9import os 

10import sys 

11import textwrap 

12import time 

13import token as token_module 

14from typing import Any, Dict, List 

15import unittest 

16import warnings 

17warnings.simplefilter("ignore") 

18# pylint: disable=import-error 

19# Third-party. 

20try: 

21 import asttokens 

22except Exception: # pragma: no cover 

23 asttokens = None 

24try: 

25 # Suppress a warning about imp being deprecated. 

26 with warnings.catch_warnings(): 

27 import black 

28except Exception: # pragma: no cover 

29 black = None 

30 

31# pylint: disable=wrong-import-position 

32from leo.core import leoGlobals as g 

33from leo.core.leoAst import AstNotEqual 

34from leo.core.leoAst import Fstringify, Orange 

35from leo.core.leoAst import Token, TokenOrderGenerator, TokenOrderTraverser 

36from leo.core.leoAst import get_encoding_directive, read_file, strip_BOM 

37from leo.core.leoAst import make_tokens, parse_ast, tokens_to_string 

38from leo.core.leoAst import dump_ast, dump_contents, dump_tokens, dump_tree, _op_names 

39#@-<< leoAst imports >> 

40v1, v2, junk1, junk2, junk3 = sys.version_info 

41py_version = (v1, v2) 

42#@+others 

43#@+node:ekr.20200107114620.1: ** functions: unit testing 

44#@+node:ekr.20191027072126.1: *3* function: compare_asts & helpers 

45def compare_asts(ast1, ast2): # pragma: no cover 

46 """Compare two ast trees. Return True if they are equal.""" 

47 # Compare the two parse trees. 

48 try: 

49 _compare_asts(ast1, ast2) 

50 except AstNotEqual: 

51 dump_ast(ast1, tag='AST BEFORE') 

52 dump_ast(ast2, tag='AST AFTER') 

53 return False 

54 except Exception: 

55 g.trace("Unexpected exception") 

56 g.es_exception() 

57 return False 

58 return True 

59#@+node:ekr.20191027071653.2: *4* function._compare_asts 

60def _compare_asts(node1, node2): # pragma: no cover 

61 """ 

62 Compare both nodes, and recursively compare their children. 

63 

64 See also: http://stackoverflow.com/questions/3312989/ 

65 """ 

66 # Compare the nodes themselves. 

67 _compare_nodes(node1, node2) 

68 # Get the list of fields. 

69 fields1 = getattr(node1, "_fields", []) # type:ignore 

70 fields2 = getattr(node2, "_fields", []) # type:ignore 

71 if fields1 != fields2: 

72 raise AstNotEqual( 

73 f"node1._fields: {fields1}\n" f"node2._fields: {fields2}") 

74 # Recursively compare each field. 

75 for field in fields1: 

76 if field not in ('lineno', 'col_offset', 'ctx'): 

77 attr1 = getattr(node1, field, None) 

78 attr2 = getattr(node2, field, None) 

79 if attr1.__class__.__name__ != attr2.__class__.__name__: 

80 raise AstNotEqual(f"attrs1: {attr1},\n" f"attrs2: {attr2}") 

81 _compare_asts(attr1, attr2) 

82#@+node:ekr.20191027071653.3: *4* function._compare_nodes 

83def _compare_nodes(node1, node2): # pragma: no cover 

84 """ 

85 Compare node1 and node2. 

86 For lists and tuples, compare elements recursively. 

87 Raise AstNotEqual if not equal. 

88 """ 

89 # Class names must always match. 

90 if node1.__class__.__name__ != node2.__class__.__name__: 

91 raise AstNotEqual( 

92 f"node1.__class__.__name__: {node1.__class__.__name__}\n" 

93 f"node2.__class__.__name__: {node2.__class__.__name_}" 

94 ) 

95 # Special cases for strings and None 

96 if node1 is None: 

97 return 

98 if isinstance(node1, str): 

99 if node1 != node2: 

100 raise AstNotEqual(f"node1: {node1!r}\n" f"node2: {node2!r}") 

101 # Special cases for lists and tuples: 

102 if isinstance(node1, (tuple, list)): 

103 if len(node1) != len(node2): 

104 raise AstNotEqual(f"node1: {node1}\n" f"node2: {node2}") 

105 for i, item1 in enumerate(node1): 

106 item2 = node2[i] 

107 if item1.__class__.__name__ != item2.__class__.__name__: 

108 raise AstNotEqual( 

109 f"list item1: {i} {item1}\n" f"list item2: {i} {item2}" 

110 ) 

111 _compare_asts(item1, item2) 

112#@+node:ekr.20191121081439.1: *3* function: compare_lists 

113def compare_lists(list1, list2): # pragma: no cover 

114 """ 

115 Compare two lists of strings, showing the first mismatch. 

116 

117 Return the index of the first mismatched lines, or None if identical. 

118 """ 

119 import itertools 

120 it = itertools.zip_longest(list1, list2, fillvalue='Missing!') 

121 for i, (s1, s2) in enumerate(it): 

122 if s1 != s2: 

123 return i 

124 return None 

125#@+node:ekr.20191226071135.1: *3* function: get_time 

126def get_time(): 

127 return time.process_time() 

128#@+node:ekr.20210902074155.1: ** Test classes... 

129#@+node:ekr.20191227154302.1: *3* class BaseTest (TestCase) 

130class BaseTest(unittest.TestCase): 

131 """ 

132 The base class of all tests of leoAst.py. 

133 

134 This class contains only helpers. 

135 """ 

136 

137 # Statistics. 

138 counts: Dict[str, int] = {} 

139 times: Dict[str, float] = {} 

140 

141 # Debugging traces & behavior. 

142 # create_links: 'full-traceback' 

143 # make_data: 'contents', 'tokens', 'tree', 

144 # 'post-tokens', 'post-tree', 

145 # 'unit-test' 

146 debug_list: List[str] = [] 

147 link_error: Exception = None 

148 

149 #@+others 

150 #@+node:ekr.20200110103036.1: *4* BaseTest.adjust_expected 

151 def adjust_expected(self, s): 

152 """Adjust leading indentation in the expected string s.""" 

153 return textwrap.dedent(s.lstrip('\\\n')).rstrip() + '\n' 

154 #@+node:ekr.20200110092217.1: *4* BaseTest.check_roundtrip 

155 def check_roundtrip(self, contents): 

156 """Check that the tokenizer round-trips the given contents.""" 

157 contents, tokens, tree = self.make_data(contents) 

158 results = tokens_to_string(tokens) 

159 self.assertEqual(contents, results) 

160 #@+node:ekr.20191227054856.1: *4* BaseTest.make_data 

161 def make_data(self, contents, description=None): 

162 """Return (contents, tokens, tree) for the given contents.""" 

163 contents = contents.lstrip('\\\n') 

164 if not contents: 

165 return '', None, None # pragma: no cover 

166 self.link_error = None 

167 t1 = get_time() 

168 self.update_counts('characters', len(contents)) 

169 # Ensure all tests end in exactly one newline. 

170 contents = textwrap.dedent(contents).rstrip() + '\n' 

171 # Create the TOG instance. 

172 self.tog = TokenOrderGenerator() 

173 self.tog.filename = description or g.callers(2).split(',')[0] 

174 # Pass 0: create the tokens and parse tree 

175 tokens = self.make_tokens(contents) 

176 if not tokens: 

177 self.fail('make_tokens failed') # pragma: no cover 

178 tree = self.make_tree(contents) 

179 if not tree: 

180 self.fail('make_tree failed') # pragma: no cover 

181 if 'contents' in self.debug_list: 

182 dump_contents(contents) # pragma: no cover 

183 if 'ast' in self.debug_list: # pragma: no cover 

184 if py_version >= (3, 9): 

185 # pylint: disable=unexpected-keyword-arg 

186 g.printObj(ast.dump(tree, indent=2), tag='ast.dump') 

187 else: 

188 g.printObj(ast.dump(tree), tag='ast.dump') 

189 if 'tree' in self.debug_list: # Excellent traces for tracking down mysteries. 

190 dump_ast(tree) # pragma: no cover 

191 if 'tokens' in self.debug_list: 

192 dump_tokens(tokens) # pragma: no cover 

193 self.balance_tokens(tokens) 

194 # Pass 1: create the links. 

195 self.create_links(tokens, tree) 

196 if 'post-tree' in self.debug_list: 

197 dump_tree(tokens, tree) # pragma: no cover 

198 if 'post-tokens' in self.debug_list: 

199 dump_tokens(tokens) # pragma: no cover 

200 t2 = get_time() 

201 self.update_times('90: TOTAL', t2 - t1) 

202 if self.link_error: 

203 self.fail(self.link_error) # pragma: no cover 

204 return contents, tokens, tree 

205 #@+node:ekr.20191227103533.1: *4* BaseTest.make_file_data 

206 def make_file_data(self, filename): 

207 """Return (contents, tokens, tree) from the given file.""" 

208 directory = os.path.dirname(__file__) 

209 filename = g.os_path_finalize_join(directory, '..', '..', 'core', filename) 

210 assert os.path.exists(filename), repr(filename) 

211 contents = read_file(filename) 

212 contents, tokens, tree = self.make_data(contents, filename) 

213 return contents, tokens, tree 

214 #@+node:ekr.20191228101601.1: *4* BaseTest: passes... 

215 #@+node:ekr.20191228095945.11: *5* 0.1: BaseTest.make_tokens 

216 def make_tokens(self, contents): 

217 """ 

218 BaseTest.make_tokens. 

219 

220 Make tokens from contents. 

221 """ 

222 t1 = get_time() 

223 # Tokenize. 

224 tokens = make_tokens(contents) 

225 t2 = get_time() 

226 self.update_counts('tokens', len(tokens)) 

227 self.update_times('01: make-tokens', t2 - t1) 

228 return tokens 

229 #@+node:ekr.20191228102101.1: *5* 0.2: BaseTest.make_tree 

230 def make_tree(self, contents): 

231 """ 

232 BaseTest.make_tree. 

233 

234 Return the parse tree for the given contents string. 

235 """ 

236 t1 = get_time() 

237 tree = parse_ast(contents) 

238 t2 = get_time() 

239 self.update_times('02: parse_ast', t2 - t1) 

240 return tree 

241 #@+node:ekr.20191228185201.1: *5* 0.3: BaseTest.balance_tokens 

242 def balance_tokens(self, tokens): 

243 """ 

244 BastTest.balance_tokens. 

245 

246 Insert links between corresponding paren tokens. 

247 """ 

248 t1 = get_time() 

249 count = self.tog.balance_tokens(tokens) 

250 t2 = get_time() 

251 self.update_counts('paren-tokens', count) 

252 self.update_times('03: balance-tokens', t2 - t1) 

253 return count 

254 #@+node:ekr.20191228101437.1: *5* 1.1: BaseTest.create_links 

255 def create_links(self, tokens, tree, filename='unit test'): 

256 """ 

257 BaseTest.create_links. 

258 

259 Insert two-way links between the tokens and ast tree. 

260 """ 

261 tog = self.tog 

262 try: 

263 t1 = get_time() 

264 # Yes, list *is* required here. 

265 list(tog.create_links(tokens, tree)) 

266 t2 = get_time() 

267 self.update_counts('nodes', tog.n_nodes) 

268 self.update_times('11: create-links', t2 - t1) 

269 except Exception as e: # pragma: no cover 

270 # print('\n') 

271 # g.trace(g.callers(), '\n') 

272 if 'full-traceback' in self.debug_list: 

273 g.es_exception() 

274 # Weird: calling self.fail creates ugly failures. 

275 self.link_error = e 

276 #@+node:ekr.20191228095945.10: *5* 2.1: BaseTest.fstringify 

277 def fstringify(self, contents, tokens, tree, filename=None, silent=False): 

278 """ 

279 BaseTest.fstringify. 

280 """ 

281 t1 = get_time() 

282 if not filename: 

283 filename = g.callers(1) 

284 fs = Fstringify() 

285 if silent: 

286 fs.silent = True 

287 result_s = fs.fstringify(contents, filename, tokens, tree) 

288 t2 = get_time() 

289 self.update_times('21: fstringify', t2 - t1) 

290 return result_s 

291 #@+node:ekr.20200107175223.1: *5* 2.2: BaseTest.beautify 

292 def beautify(self, contents, tokens, tree, filename=None, max_join_line_length=None, max_split_line_length=None): 

293 """ 

294 BaseTest.beautify. 

295 """ 

296 t1 = get_time() 

297 if not contents: 

298 return '' # pragma: no cover 

299 if not filename: 

300 filename = g.callers(2).split(',')[0] 

301 orange = Orange() 

302 result_s = orange.beautify(contents, filename, tokens, tree, 

303 max_join_line_length=max_join_line_length, 

304 max_split_line_length=max_split_line_length) 

305 t2 = get_time() 

306 self.update_times('22: beautify', t2 - t1) 

307 self.code_list = orange.code_list 

308 return result_s 

309 #@+node:ekr.20191228095945.1: *4* BaseTest: stats... 

310 # Actions should fail by throwing an exception. 

311 #@+node:ekr.20191228095945.12: *5* BaseTest.dump_stats & helpers 

312 def dump_stats(self): # pragma: no cover 

313 """Show all calculated statistics.""" 

314 if self.counts or self.times: 

315 print('') 

316 self.dump_counts() 

317 self.dump_times() 

318 print('') 

319 #@+node:ekr.20191228154757.1: *6* BaseTest.dump_counts 

320 def dump_counts(self): # pragma: no cover 

321 """Show all calculated counts.""" 

322 for key, n in self.counts.items(): 

323 print(f"{key:>16}: {n:>6}") 

324 #@+node:ekr.20191228154801.1: *6* BaseTest.dump_times 

325 def dump_times(self): # pragma: no cover 

326 """ 

327 Show all calculated times. 

328 

329 Keys should start with a priority (sort order) of the form `[0-9][0-9]:` 

330 """ 

331 for key in sorted(self.times): 

332 t = self.times.get(key) 

333 key2 = key[3:] 

334 print(f"{key2:>16}: {t:6.3f} sec.") 

335 #@+node:ekr.20191228181624.1: *5* BaseTest.update_counts & update_times 

336 def update_counts(self, key, n): # pragma: no cover 

337 """Update the count statistic given by key, n.""" 

338 old_n = self.counts.get(key, 0) 

339 self.counts[key] = old_n + n 

340 

341 def update_times(self, key, t): # pragma: no cover 

342 """Update the timing statistic given by key, t.""" 

343 old_t = self.times.get(key, 0.0) 

344 self.times[key] = old_t + t 

345 #@-others 

346#@+node:ekr.20200122161530.1: *3* class Optional_TestFiles (BaseTest) 

347class Optional_TestFiles(BaseTest): 

348 """ 

349 Tests for the TokenOrderGenerator class that act on files. 

350 

351 These are optional tests. They take a long time and are not needed 

352 for 100% coverage. 

353 

354 All of these tests failed at one time. 

355 """ 

356 #@+others 

357 #@+node:ekr.20200726145235.2: *4* TestFiles.test_leoApp 

358 def test_leoApp(self): 

359 

360 self.make_file_data('leoApp.py') 

361 #@+node:ekr.20200726145235.1: *4* TestFiles.test_leoAst 

362 def test_leoAst(self): 

363 

364 self.make_file_data('leoAst.py') 

365 #@+node:ekr.20200726145333.1: *4* TestFiles.test_leoDebugger 

366 def test_leoDebugger(self): 

367 

368 self.make_file_data('leoDebugger.py') 

369 #@+node:ekr.20200726145333.2: *4* TestFiles.test_leoFind 

370 def test_leoFind(self): 

371 

372 self.make_file_data('leoFind.py') 

373 #@+node:ekr.20200726145333.3: *4* TestFiles.test_leoGlobals 

374 def test_leoGlobals(self): 

375 

376 self.make_file_data('leoGlobals.py') 

377 #@+node:ekr.20200726145333.4: *4* TestFiles.test_leoTips 

378 def test_leoTips(self): 

379 

380 self.make_file_data('leoTips.py') 

381 #@+node:ekr.20200726145735.1: *4* TestFiles.test_runLeo 

382 def test_runLeo(self): 

383 

384 self.make_file_data('runLeo.py') 

385 #@+node:ekr.20200115162419.1: *4* TestFiles.compare_tog_vs_asttokens 

386 def compare_tog_vs_asttokens(self): # pragma: no cover 

387 """Compare asttokens token lists with TOG token lists.""" 

388 if not asttokens: 

389 self.skipTest('requires asttokens') 

390 # Define TestToken class and helper functions. 

391 stack: List[ast.AST] = [] 

392 #@+others 

393 #@+node:ekr.20200124024159.2: *5* class TestToken (internal) 

394 class TestToken: 

395 """A patchable representation of the 5-tuples created by tokenize and used by asttokens.""" 

396 

397 def __init__(self, kind, value): 

398 self.kind = kind 

399 self.value = value 

400 self.node_list: List[ast.AST] = [] 

401 

402 def __str__(self): 

403 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list]) 

404 return f"{self.kind:14} {self.value:20} {tokens_s!s}" 

405 

406 __repr__ = __str__ 

407 #@+node:ekr.20200124024159.3: *5* function: atok_name 

408 def atok_name(token): 

409 """Return a good looking name for the given 5-tuple""" 

410 return token_module.tok_name[token[0]].lower() # type:ignore 

411 #@+node:ekr.20200124024159.4: *5* function: atok_value 

412 def atok_value(token): 

413 """Print a good looking value for the given 5-tuple""" 

414 return token.string if atok_name(token) == 'string' else repr(token.string) 

415 #@+node:ekr.20200124024159.5: *5* function: dump_token 

416 def dump_token(token): 

417 node_list = list(set(getattr(token, 'node_set', []))) 

418 node_list = sorted([z.__class__.__name__ for z in node_list]) 

419 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}" 

420 #@+node:ekr.20200124024159.6: *5* function: postvisit 

421 def postvisit(node, par_value, value): 

422 nonlocal stack 

423 stack.pop() 

424 return par_value or [] 

425 #@+node:ekr.20200124024159.7: *5* function: previsit 

426 def previsit(node, par_value): 

427 nonlocal stack 

428 if isinstance(node, ast.Module): 

429 stack = [] 

430 if stack: 

431 parent = stack[-1] 

432 children: List[ast.AST] = getattr(parent, 'children', []) 

433 parent.children = children + [node] # type:ignore 

434 node.parent = parent 

435 else: 

436 node.parent = None 

437 node.children = [] 

438 stack.append(node) 

439 return par_value, [] 

440 #@-others 

441 directory = r'c:\leo.repo\leo-editor\leo\core' 

442 filename = 'leoAst.py' 

443 filename = os.path.join(directory, filename) 

444 # A fair comparison omits the read time. 

445 t0 = get_time() 

446 contents = read_file(filename) 

447 t1 = get_time() 

448 # Part 1: TOG. 

449 tog = TokenOrderGenerator() 

450 tog.filename = filename 

451 tokens = make_tokens(contents) 

452 tree = parse_ast(contents) 

453 tog.create_links(tokens, tree) 

454 tog.balance_tokens(tokens) 

455 t2 = get_time() 

456 # Part 2: Create asttokens data. 

457 atok = asttokens.ASTTokens(contents, parse=True, filename=filename) 

458 t3 = get_time() 

459 # Create a patchable list of TestToken objects. 

460 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] # type:ignore 

461 # Inject parent/child links into nodes. 

462 asttokens.util.visit_tree(atok.tree, previsit, postvisit) 

463 # Create token.token_list for each token. 

464 for node in asttokens.util.walk(atok.tree): 

465 # Inject node into token.node_list 

466 for ast_token in atok.get_tokens(node, include_extra=True): 

467 i = ast_token.index 

468 token = tokens[i] 

469 token.node_list.append(node) 

470 t4 = get_time() 

471 if 1: 

472 print( 

473 f" read: {t1-t0:5.3f} sec.\n" 

474 f" TOG: {t2-t1:5.3f} sec.\n" 

475 f"asttokens 1: {t3-t2:5.3f} sec.\n" 

476 f"asttokens 2: {t4-t3:5.3f} sec.\n") 

477 if 0: 

478 print('===== asttokens =====\n') 

479 for node in asttokens.util.walk(tree): 

480 print(f"{node.__class__.__name__:>10} {atok.get_text(node)!s}") 

481 #@-others 

482#@+node:ekr.20191229083512.1: *3* class TestFstringify (BaseTest) 

483class TestFstringify(BaseTest): 

484 """Tests for the TokenOrderGenerator class.""" 

485 #@+others 

486 #@+node:ekr.20200111043311.1: *4* Bugs... 

487 #@+node:ekr.20210318054321.1: *5* TestFstringify.test_bug_1851 

488 def test_bug_1851(self): 

489 # leoCheck.py. 

490 contents = """\ 

491 from dataclasses import dataclass 

492 

493 @dataclass(frozen=True) 

494 class TestClass: 

495 value: str 

496 start: int 

497 end: int 

498 

499 f = TestClass('abc', 0, 10) 

500 """ 

501 contents, tokens, tree = self.make_data(contents) 

502 expected = textwrap.dedent(contents).rstrip() + '\n' 

503 results = self.fstringify(contents, tokens, tree) 

504 self.assertEqual(results, expected) 

505 #@+node:ekr.20200111043311.2: *5* TestFstringify.test_crash_1 

506 def test_crash_1(self): 

507 # leoCheck.py. 

508 contents = """return ('error', 'no member %s' % ivar)""" 

509 expected = """return ('error', f"no member {ivar}")\n""" 

510 contents, tokens, tree = self.make_data(contents) 

511 results = self.fstringify(contents, tokens, tree) 

512 self.assertEqual(results, expected) 

513 #@+node:ekr.20200111075114.1: *5* TestFstringify.test_crash_2 

514 def test_crash_2(self): 

515 # leoCheck.py, line 1704. 

516 # format = 

517 # 'files: %s lines: %s chars: %s classes: %s\n' 

518 # 'defs: %s calls: %s undefined calls: %s returns: %s' 

519 # ) 

520 contents = r"""'files: %s\n' 'defs: %s'""" 

521 expected = contents + '\n' 

522 contents, tokens, tree = self.make_data(contents) 

523 results = self.fstringify(contents, tokens, tree) 

524 self.assertEqual(results, expected) 

525 #@+node:ekr.20200214155156.1: *4* TestFstringify.show_message 

526 def show_message(self): # pragma: no cover 

527 """Separate test of fs.message.""" 

528 fs = Fstringify() 

529 fs.filename = 'test_file.py' 

530 fs.line_number = 42 

531 fs.line = 'The test line\n' 

532 fs.silent = False 

533 # Test message. 

534 fs.message( 

535 "Test:\n" 

536 "< Left align\n" 

537 ":Colon: align\n" 

538 "> Right align\n" 

539 " Default align") 

540 # 

541 # change_quotes... 

542 fs.message("can't create f-fstring: no lt_s!") 

543 lt_s = "lt_s" 

544 delim = 'Delim' 

545 token = Token('Kind', 'Value') 

546 fs.message( 

547 f"unexpected token: {token.kind} {token.value}\n" 

548 f" lt_s: {lt_s!r}") 

549 fs.message( 

550 f"can't create f-fstring: {lt_s!r}\n" 

551 f": conflicting delim: {delim!r}") 

552 fs.message( 

553 f"can't create f-fstring: {lt_s!r}\n" 

554 f":backslash in {{expr}}: {delim!r}") 

555 # Check newlines... 

556 fs.message( 

557 f" can't create f-fstring: {lt_s!r}\n" 

558 f":curly bracket underflow:") 

559 fs.message( 

560 f" can't create f-fstring: {lt_s!r}\n" 

561 f":string contains a backslash:") 

562 fs.message( 

563 f" can't create f-fstring: {lt_s!r}\n" 

564 f":unclosed curly bracket:") 

565 # Make fstring 

566 before, after = 'Before', 'After' 

567 fs.message( 

568 f"trace:\n" 

569 f":from: {before!s}\n" 

570 f": to: {after!s}") 

571 #@+node:ekr.20200106163535.1: *4* TestFstringify.test_braces 

572 def test_braces(self): 

573 

574 # From pr.construct_stylesheet in leoPrinting.py 

575 contents = """'h1 {font-family: %s}' % (family)""" 

576 expected = """f"h1 {{font-family: {family}}}"\n""" 

577 contents, tokens, tree = self.make_data(contents) 

578 results = self.fstringify(contents, tokens, tree) 

579 self.assertEqual(results, expected) 

580 #@+node:ekr.20200217171334.1: *4* TestFstringify.test_backslash_in_expr 

581 def test_backslash_in_expr(self): 

582 # From get_flake8_config. 

583 contents = r"""print('aaa\n%s' % ('\n'.join(dir_table)))""" 

584 expected = contents.rstrip() + '\n' 

585 contents, tokens, tree = self.make_data(contents) 

586 results = self.fstringify(contents, tokens, tree, silent=True) 

587 self.assertEqual(results, expected) 

588 #@+node:ekr.20191230150653.1: *4* TestFstringify.test_call_in_rhs 

589 def test_call_in_rhs(self): 

590 

591 contents = """'%s' % d()""" 

592 expected = """f"{d()}"\n""" 

593 contents, tokens, tree = self.make_data(contents) 

594 results = self.fstringify(contents, tokens, tree) 

595 self.assertEqual(results, expected) 

596 #@+node:ekr.20200104045907.1: *4* TestFstringify.test_call_in_rhs_2 

597 def test_call_in_rhs_2(self): 

598 

599 # From LM.traceSettingsDict 

600 contents = """print('%s' % (len(d.keys())))""" 

601 expected = """print(f"{len(d.keys())}")\n""" 

602 contents, tokens, tree = self.make_data(contents) 

603 results = self.fstringify(contents, tokens, tree) 

604 self.assertEqual(results, expected) 

605 #@+node:ekr.20200105073155.1: *4* TestFstringify.test_call_with_attribute 

606 def test_call_with_attribute(self): 

607 

608 contents = """g.blue('wrote %s' % p.atShadowFileNodeName())""" 

609 expected = """g.blue(f"wrote {p.atShadowFileNodeName()}")\n""" 

610 contents, tokens, tree = self.make_data(contents) 

611 results = self.fstringify(contents, tokens, tree) 

612 self.assertEqual(results, expected) 

613 #@+node:ekr.20200122035055.1: *4* TestFstringify.test_call_with_comments 

614 def test_call_with_comments(self): 

615 

616 contents = """\ 

617 print('%s in %5.2f sec' % ( 

618 "done", # message 

619 2.9, # time 

620 )) # trailing comment""" 

621 

622 expected = """\ 

623 print(f'{"done"} in {2.9:5.2f} sec') # trailing comment 

624 """ 

625 contents, tokens, tree = self.make_data(contents) 

626 expected = textwrap.dedent(expected).rstrip() + '\n' 

627 results = self.fstringify(contents, tokens, tree) 

628 self.assertEqual(results, expected) 

629 #@+node:ekr.20200206173126.1: *4* TestFstringify.test_change_quotes 

630 def test_change_quotes(self): 

631 

632 contents = """ret = '[%s]' % ','.join([show(z) for z in arg])""" 

633 expected = """ret = f"[{','.join([show(z) for z in arg])}]"\n""" 

634 contents, tokens, tree = self.make_data(contents) 

635 results = self.fstringify(contents, tokens, tree) 

636 self.assertEqual(results, expected) 

637 #@+node:ekr.20200101060616.1: *4* TestFstringify.test_complex_rhs 

638 def test_complex_rhs(self): 

639 # From LM.mergeShortcutsDicts. 

640 contents = ( 

641 """g.trace('--trace-binding: %20s binds %s to %s' % (""" 

642 """ c.shortFileName(), binding, d.get(binding) or []))""") 

643 expected = ( 

644 """g.trace(f"--trace-binding: {c.shortFileName():20} """ 

645 """binds {binding} to {d.get(binding) or []}")\n""") 

646 contents, tokens, tree = self.make_data(contents) 

647 results = self.fstringify(contents, tokens, tree) 

648 self.assertEqual(results, expected) 

649 #@+node:ekr.20200206174208.1: *4* TestFstringify.test_function_call 

650 def test_function_call(self): 

651 

652 contents = """mods = ''.join(['%s+' % z.capitalize() for z in self.mods])""" 

653 expected = """mods = ''.join([f"{z.capitalize()}+" for z in self.mods])\n""" 

654 contents, tokens, tree = self.make_data(contents) 

655 results = self.fstringify(contents, tokens, tree) 

656 self.assertEqual(results, expected) 

657 #@+node:ekr.20200106085608.1: *4* TestFstringify.test_ImportFrom 

658 def test_ImportFrom(self): 

659 

660 table = ( 

661 """from .globals import a, b""", 

662 """from ..globals import x, y, z""", 

663 """from . import j""", 

664 ) 

665 for contents in table: 

666 contents, tokens, tree = self.make_data(contents) 

667 results = self.fstringify(contents, tokens, tree) 

668 self.assertEqual(results, contents) 

669 #@+node:ekr.20200106042452.1: *4* TestFstringify.test_ListComp 

670 def test_ListComp(self): 

671 

672 table = ( 

673 """replaces = [L + c + R[1:] for L, R in splits if R for c in letters]""", 

674 """[L for L in x for c in y]""", 

675 """[L for L in x for c in y if L if not c]""", 

676 ) 

677 for contents in table: 

678 contents, tokens, tree = self.make_data(contents) 

679 results = self.fstringify(contents, tokens, tree) 

680 expected = contents 

681 self.assertEqual(results, expected) 

682 #@+node:ekr.20200112163031.1: *4* TestFstringify.test_munge_spec 

683 def test_munge_spec(self): 

684 

685 # !head:tail or :tail 

686 table = ( 

687 ('+1s', '', '+1'), 

688 ('-2s', '', '>2'), 

689 ('3s', '', '3'), 

690 ('4r', 'r', '4'), 

691 ) 

692 for spec, e_head, e_tail in table: 

693 head, tail = Fstringify().munge_spec(spec) 

694 assert(head, tail) == (e_head, e_tail), ( 

695 f"\n" 

696 f" spec: {spec}\n" 

697 f"expected head: {e_head}\n" 

698 f" got head: {head}\n" 

699 f"expected tail: {e_tail}\n" 

700 f" got tail: {tail}\n") 

701 #@+node:ekr.20200104042705.1: *4* TestFstringify.test_newlines 

702 def test_newlines(self): 

703 

704 contents = r"""\ 

705 print("hello\n") 

706 print('world\n') 

707 print("hello\r\n") 

708 print('world\r\n') 

709 """ 

710 contents, tokens, tree = self.make_data(contents) 

711 expected = contents 

712 results = self.fstringify(contents, tokens, tree) 

713 self.assertEqual(results, expected) 

714 #@+node:ekr.20191230183652.1: *4* TestFstringify.test_parens_in_rhs 

715 def test_parens_in_rhs(self): 

716 

717 contents = """print('%20s' % (ivar), val)""" 

718 expected = """print(f"{ivar:20}", val)\n""" 

719 contents, tokens, tree = self.make_data(contents) 

720 results = self.fstringify(contents, tokens, tree) 

721 self.assertEqual(results, expected) 

722 #@+node:ekr.20200106091740.1: *4* TestFstringify.test_single_quotes 

723 def test_single_quotes(self): 

724 

725 table = ( 

726 # Case 0. 

727 ("""print('%r "default"' % style_name)""", 

728 """print(f'{style_name!r} "default"')\n"""), 

729 # Case 1. 

730 ("""print('%r' % "val")""", 

731 """print(f'{"val"!r}')\n"""), 

732 # Case 2. 

733 ("""print("%r" % "val")""", 

734 """print(f'{"val"!r}')\n"""), 

735 ) 

736 for i, data in enumerate(table): 

737 contents, expected = data 

738 description = f"test_single_quotes: {i}" 

739 contents, tokens, tree = self.make_data(contents, description) 

740 results = self.fstringify(contents, tokens, tree, filename=description) 

741 self.assertEqual(results, expected, msg=i) 

742 #@+node:ekr.20200214094938.1: *4* TestFstringify.test_switch_quotes 

743 def test_switch_quotes(self): 

744 table = ( 

745 ( 

746 """print('%r' % 'style_name')""", 

747 """print(f"{'style_name'!r}")\n""", 

748 ), 

749 ) 

750 for i, data in enumerate(table): 

751 contents, expected = data 

752 description = f"test_single_quotes: {i}" 

753 contents, tokens, tree = self.make_data(contents, description) 

754 results = self.fstringify(contents, tokens, tree, filename=description) 

755 self.assertEqual(results, expected, msg=i) 

756 #@+node:ekr.20200206173725.1: *4* TestFstringify.test_switch_quotes_2 

757 def test_switch_quotes_2(self): 

758 

759 contents = """ 

760 g.es('%s blah blah' % ( 

761 g.angleBrackets('*'))) 

762 """ 

763 expected = """g.es(f"{g.angleBrackets(\'*\')} blah blah")\n""" 

764 contents, tokens, tree = self.make_data(contents) 

765 results = self.fstringify(contents, tokens, tree) 

766 self.assertEqual(results, expected) 

767 #@+node:ekr.20200206173628.1: *4* TestFstringify.test_switch_quotes_3 

768 def test_switch_quotes_3(self): 

769 

770 contents = """print('Test %s' % 'one')""" 

771 expected = """print(f"Test {'one'}")\n""" 

772 contents, tokens, tree = self.make_data(contents) 

773 results = self.fstringify(contents, tokens, tree) 

774 self.assertEqual(results, expected) 

775 #@+node:ekr.20200219125956.1: *4* TestFstringify.test_switch_quotes_fail 

776 def test_switch_quotes_fail(self): 

777 

778 contents = """print('Test %s %s' % ('one', "two"))""" 

779 contents, tokens, tree = self.make_data(contents) 

780 expected = contents 

781 results = self.fstringify(contents, tokens, tree) 

782 self.assertEqual(results, expected) 

783 #@-others 

784#@+node:ekr.20200107174645.1: *3* class TestOrange (BaseTest) 

785class TestOrange(BaseTest): 

786 """ 

787 Tests for the Orange class. 

788 

789 **Important**: All unit tests assume that black_mode is False. 

790 That is, unit tests assume that no blank lines 

791 are ever inserted or deleted. 

792 """ 

793 #@+others 

794 #@+node:ekr.20200115201823.1: *4* TestOrange.blacken 

795 def blacken(self, contents, line_length=None): 

796 """Return the results of running black on contents""" 

797 if not black: 

798 self.skipTest('Can not import black') # pragma: no cover 

799 # Suppress string normalization! 

800 try: 

801 mode = black.FileMode() 

802 mode.string_normalization = False 

803 if line_length is not None: 

804 mode.line_length = line_length 

805 except TypeError: # pragma: no cover 

806 self.skipTest('old version of black') 

807 return black.format_str(contents, mode=mode) 

808 #@+node:ekr.20200228074455.1: *4* TestOrange.test_bug_1429 

809 def test_bug_1429(self): 

810 

811 contents = r'''\ 

812 def get_semver(tag): 

813 """bug 1429 docstring""" 

814 try: 

815 import semantic_version 

816 version = str(semantic_version.Version.coerce(tag, partial=True)) 

817 # tuple of major, minor, build, pre-release, patch 

818 # 5.6b2 --> 5.6-b2 

819 except(ImportError, ValueError) as err: 

820 print('\n', err) 

821 print("""*** Failed to parse Semantic Version from git tag '{0}'. 

822 Expecting tag name like '5.7b2', 'leo-4.9.12', 'v4.3' for releases. 

823 This version can't be uploaded to PyPi.org.""".format(tag)) 

824 version = tag 

825 return version 

826 ''' 

827 contents, tokens, tree = self.make_data(contents) 

828 expected = contents.rstrip() + '\n' 

829 results = self.beautify(contents, tokens, tree, 

830 max_join_line_length=0, max_split_line_length=0) 

831 self.assertEqual(results, expected) 

832 #@+node:ekr.20210318055702.1: *4* TestOrange.test_bug_1851 

833 def test_bug_1851(self): 

834 

835 contents = r'''\ 

836 def foo(a1): 

837 pass 

838 ''' 

839 contents, tokens, tree = self.make_data(contents) 

840 expected = contents.rstrip() + '\n' 

841 results = self.beautify(contents, tokens, tree, 

842 max_join_line_length=0, max_split_line_length=0) 

843 self.assertEqual(results, expected) 

844 #@+node:ekr.20200219114415.1: *4* TestOrange.test_at_doc_part 

845 def test_at_doc_part(self): 

846 

847 line_length = 40 # For testing. 

848 contents = """\ 

849 #@+at Line 1 

850 # Line 2 

851 #@@c 

852 

853 print('hi') 

854 """ 

855 contents, tokens, tree = self.make_data(contents) 

856 expected = contents.rstrip() + '\n' 

857 results = self.beautify(contents, tokens, tree, 

858 max_join_line_length=line_length, 

859 max_split_line_length=line_length, 

860 ) 

861 self.assertEqual(results, expected) 

862 #@+node:ekr.20200116102345.1: *4* TestOrange.test_backslash_newline 

863 def test_backslash_newline(self): 

864 """ 

865 This test is necessarily different from black, because orange doesn't 

866 delete semicolon tokens. 

867 """ 

868 contents = r""" 

869 print(a);\ 

870 print(b) 

871 print(c); \ 

872 print(d) 

873 """ 

874 contents, tokens, tree = self.make_data(contents) 

875 expected = contents.rstrip() + '\n' 

876 # expected = self.blacken(contents).rstrip() + '\n' 

877 results = self.beautify(contents, tokens, tree) 

878 self.assertEqual(results, expected) 

879 #@+node:ekr.20200219145639.1: *4* TestOrange.test_blank_lines_after_function 

880 def test_blank_lines_after_function(self): 

881 

882 contents = """\ 

883 # Comment line 1. 

884 # Comment line 2. 

885 

886 def spam(): 

887 pass 

888 # Properly indented comment. 

889 

890 # Comment line3. 

891 # Comment line4. 

892 a = 2 

893 """ 

894 contents, tokens, tree = self.make_data(contents) 

895 expected = contents 

896 results = self.beautify(contents, tokens, tree) 

897 self.assertEqual(results, expected) 

898 #@+node:ekr.20200220050758.1: *4* TestOrange.test_blank_lines_after_function_2 

899 def test_blank_lines_after_function_2(self): 

900 

901 contents = """\ 

902 # Leading comment line 1. 

903 # Leading comment lines 2. 

904 

905 def spam(): 

906 pass 

907 

908 # Trailing comment line. 

909 a = 2 

910 """ 

911 contents, tokens, tree = self.make_data(contents) 

912 expected = contents 

913 results = self.beautify(contents, tokens, tree) 

914 self.assertEqual(results, expected) 

915 #@+node:ekr.20200220053212.1: *4* TestOrange.test_blank_lines_after_function_3 

916 def test_blank_lines_after_function_3(self): 

917 

918 # From leoAtFile.py. 

919 contents = r"""\ 

920 def writeAsisNode(self, p): 

921 print('1') 

922 

923 def put(s): 

924 print('2') 

925 

926 # Trailing comment 1. 

927 # Trailing comment 2. 

928 print('3') 

929 """ 

930 contents, tokens, tree = self.make_data(contents) 

931 expected = contents 

932 results = self.beautify(contents, tokens, tree) 

933 self.assertEqual(results, expected) 

934 #@+node:ekr.20200210120455.1: *4* TestOrange.test_decorator 

935 def test_decorator(self): 

936 

937 table = ( 

938 # Case 0. 

939 """\ 

940 @my_decorator(1) 

941 def func(): 

942 pass 

943 """, 

944 # Case 1. 

945 """\ 

946 if 1: 

947 @my_decorator 

948 def func(): 

949 pass 

950 """, 

951 # Case 2. 

952 '''\ 

953 @g.commander_command('promote') 

954 def promote(self, event=None, undoFlag=True): 

955 """Make all children of the selected nodes siblings of the selected node.""" 

956 ''', 

957 ) 

958 for i, contents in enumerate(table): 

959 contents, tokens, tree = self.make_data(contents) 

960 expected = contents 

961 results = self.beautify(contents, tokens, tree) 

962 if results != expected: 

963 g.trace('Fail:', i) # pragma: no cover 

964 self.assertEqual(results, expected) 

965 #@+node:ekr.20200211094614.1: *4* TestOrange.test_dont_delete_blank_lines 

966 def test_dont_delete_blank_lines(self): 

967 

968 line_length = 40 # For testing. 

969 contents = """\ 

970 class Test: 

971 

972 def test_func(): 

973 

974 pass 

975 

976 a = 2 

977 """ 

978 contents, tokens, tree = self.make_data(contents) 

979 expected = contents.rstrip() + '\n' 

980 results = self.beautify(contents, tokens, tree, 

981 max_join_line_length=line_length, 

982 max_split_line_length=line_length, 

983 ) 

984 self.assertEqual(results, expected) 

985 #@+node:ekr.20200116110652.1: *4* TestOrange.test_function_defs 

986 def test_function_defs(self): 

987 

988 table = ( 

989 # Case 0. 

990 """\ 

991 def f1(a=2 + 5): 

992 pass 

993 """, 

994 # Case 2 

995 """\ 

996 def f1(): 

997 pass 

998 """, 

999 # Case 3. 

1000 """\ 

1001 def f1(): 

1002 pass 

1003 """, 

1004 # Case 4. 

1005 '''\ 

1006 def should_kill_beautify(p): 

1007 """Return True if p.b contains @killbeautify""" 

1008 return 'killbeautify' in g.get_directives_dict(p) 

1009 ''', 

1010 ) 

1011 for i, contents in enumerate(table): 

1012 contents, tokens, tree = self.make_data(contents) 

1013 expected = self.blacken(contents).rstrip() + '\n' 

1014 results = self.beautify(contents, tokens, tree) 

1015 self.assertEqual(results, expected) 

1016 #@+node:ekr.20200209152745.1: *4* TestOrange.test_indented_comment 

1017 def test_indented_comment(self): 

1018 

1019 line_length = 40 # For testing. 

1020 table = ( 

1021 """\ 

1022 if 1: 

1023 pass 

1024 # An indented comment. 

1025 """, 

1026 """\ 

1027 table = ( 

1028 # Indented comment. 

1029 ) 

1030 """ 

1031 ) 

1032 

1033 fails = 0 

1034 for contents in table: 

1035 contents, tokens, tree = self.make_data(contents) 

1036 expected = contents 

1037 if 0: 

1038 dump_contents(contents) 

1039 dump_tokens(tokens) 

1040 # dump_tree(tokens, tree) 

1041 results = self.beautify(contents, tokens, tree, 

1042 max_join_line_length=line_length, 

1043 max_split_line_length=line_length, 

1044 ) 

1045 message = ( 

1046 f"\n" 

1047 f" contents: {contents!r}\n" 

1048 f" expected: {expected!r}\n" 

1049 f" got: {results!r}") 

1050 if results != expected: # pragma: no cover 

1051 fails += 1 

1052 print(f"Fail: {fails}\n{message}") 

1053 assert not fails, fails 

1054 #@+node:ekr.20200116104031.1: *4* TestOrange.test_join_and_strip_condition 

1055 def test_join_and_strip_condition(self): 

1056 

1057 contents = """\ 

1058 if ( 

1059 a == b or 

1060 c == d 

1061 ): 

1062 pass 

1063 """ 

1064 expected = """\ 

1065 if (a == b or c == d): 

1066 pass 

1067 """ 

1068 contents, tokens, tree = self.make_data(contents) 

1069 expected = textwrap.dedent(expected) 

1070 # Black also removes parens, which is beyond our scope at present. 

1071 # expected = self.blacken(contents, line_length=40) 

1072 results = self.beautify(contents, tokens, tree) 

1073 self.assertEqual(results, expected) 

1074 #@+node:ekr.20200208041446.1: *4* TestOrange.test_join_leading_whitespace 

1075 def test_join_leading_whitespace(self): 

1076 

1077 line_length = 40 # For testing. 

1078 table = ( 

1079 #1234567890x1234567890x1234567890x1234567890x 

1080 """\ 

1081 if 1: 

1082 print('4444', 

1083 '5555') 

1084 """, 

1085 """\ 

1086 if 1: 

1087 print('4444', '5555')\n""", 

1088 ) 

1089 fails = 0 

1090 for contents in table: 

1091 contents, tokens, tree = self.make_data(contents) 

1092 if 0: 

1093 dump_contents(contents) 

1094 dump_tokens(tokens) 

1095 # dump_tree(tokens, tree) 

1096 expected = contents 

1097 # expected = self.blacken(contents, line_length=line_length) 

1098 results = self.beautify(contents, tokens, tree, 

1099 max_join_line_length=line_length, 

1100 max_split_line_length=line_length, 

1101 ) 

1102 message = ( 

1103 f"\n" 

1104 f" contents: {contents!r}\n" 

1105 f" expected: {expected!r}\n" 

1106 f" got: {results!r}") 

1107 if results != expected: # pragma: no cover 

1108 fails += 1 

1109 print(f"Fail: {fails}\n{message}") 

1110 assert not fails, fails 

1111 #@+node:ekr.20200121093134.1: *4* TestOrange.test_join_lines 

1112 def test_join_lines(self): 

1113 

1114 # Except where noted, all entries are expected values.... 

1115 line_length = 40 # For testing. 

1116 table = ( 

1117 #1234567890x1234567890x1234567890x1234567890x 

1118 """print('4444',\n '5555')""", 

1119 """print('4444', '5555')\n""", 

1120 ) 

1121 fails = 0 

1122 for contents in table: 

1123 contents, tokens, tree = self.make_data(contents) 

1124 if 0: 

1125 dump_contents(contents) 

1126 dump_tokens(tokens) 

1127 # dump_tree(tokens, tree) 

1128 expected = contents 

1129 results = self.beautify(contents, tokens, tree, 

1130 max_join_line_length=line_length, 

1131 max_split_line_length=line_length, 

1132 ) 

1133 message = ( 

1134 f"\n" 

1135 f" contents: {contents!r}\n" 

1136 f" expected: {expected!r}\n" 

1137 f" orange: {results!r}") 

1138 if results != expected: # pragma: no cover 

1139 fails += 1 

1140 print(f"Fail: {fails}\n{message}") 

1141 self.assertEqual(fails, 0) 

1142 #@+node:ekr.20200210051900.1: *4* TestOrange.test_join_suppression 

1143 def test_join_suppression(self): 

1144 

1145 contents = """\ 

1146 class T: 

1147 a = 1 

1148 print( 

1149 a 

1150 ) 

1151 """ 

1152 expected = """\ 

1153 class T: 

1154 a = 1 

1155 print(a) 

1156 """ 

1157 contents, tokens, tree = self.make_data(contents) 

1158 expected = textwrap.dedent(expected) 

1159 results = self.beautify(contents, tokens, tree) 

1160 self.assertEqual(results, expected) 

1161 #@+node:ekr.20200207093606.1: *4* TestOrange.test_join_too_long_lines 

1162 def test_join_too_long_lines(self): 

1163 

1164 # Except where noted, all entries are expected values.... 

1165 line_length = 40 # For testing. 

1166 table = ( 

1167 #1234567890x1234567890x1234567890x1234567890x 

1168 ( 

1169 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')""", 

1170 """print('aaaaaaaaaaaa',\n 'bbbbbbbbbbbb', 'cccccccccccccccc')\n""", 

1171 ), 

1172 ) 

1173 fails = 0 

1174 for contents, expected in table: 

1175 contents, tokens, tree = self.make_data(contents) 

1176 if 0: 

1177 dump_contents(contents) 

1178 dump_tokens(tokens) 

1179 # dump_tree(tokens, tree) 

1180 results = self.beautify(contents, tokens, tree, 

1181 max_join_line_length=line_length, 

1182 max_split_line_length=line_length, 

1183 ) 

1184 message = ( 

1185 f"\n" 

1186 f" contents: {contents!r}\n" 

1187 f" expected: {expected!r}\n" 

1188 f" got: {results!r}") 

1189 if results != expected: # pragma: no cover 

1190 fails += 1 

1191 print(f"Fail: {fails}\n{message}") 

1192 assert not fails, fails 

1193 #@+node:ekr.20200108075541.1: *4* TestOrange.test_leo_sentinels 

1194 def test_leo_sentinels_1(self): 

1195 

1196 # Careful: don't put a sentinel into the file directly. 

1197 # That would corrupt leoAst.py. 

1198 sentinel = '#@+node:ekr.20200105143308.54: ** test' 

1199 contents = f"""\ 

1200 {sentinel} 

1201 def spam(): 

1202 pass 

1203 """ 

1204 contents, tokens, tree = self.make_data(contents) 

1205 expected = contents.rstrip() + '\n' 

1206 results = self.beautify(contents, tokens, tree) 

1207 self.assertEqual(results, expected) 

1208 #@+node:ekr.20200209155457.1: *4* TestOrange.test_leo_sentinels_2 

1209 def test_leo_sentinels_2(self): 

1210 

1211 # Careful: don't put a sentinel into the file directly. 

1212 # That would corrupt leoAst.py. 

1213 sentinel = '#@+node:ekr.20200105143308.54: ** test' 

1214 contents = f"""\ 

1215 {sentinel} 

1216 class TestClass: 

1217 pass 

1218 """ 

1219 contents, tokens, tree = self.make_data(contents) 

1220 expected = contents.rstrip() + '\n' 

1221 results = self.beautify(contents, tokens, tree) 

1222 self.assertEqual(results, expected) 

1223 #@+node:ekr.20200108082833.1: *4* TestOrange.test_lines_before_class 

1224 def test_lines_before_class(self): 

1225 

1226 contents = """\ 

1227 a = 2 

1228 class aClass: 

1229 pass 

1230 """ 

1231 contents, tokens, tree = self.make_data(contents) 

1232 expected = contents 

1233 results = self.beautify(contents, tokens, tree) 

1234 self.assertEqual(results, expected) 

1235 #@+node:ekr.20200110014220.86: *4* TestOrange.test_multi_line_pet_peeves 

1236 def test_multi_line_pet_peeves(self): 

1237 

1238 contents = """\ 

1239 if x == 4: pass 

1240 if x == 4 : pass 

1241 print (x, y); x, y = y, x 

1242 print (x , y) ; x , y = y , x 

1243 if(1): 

1244 pass 

1245 elif(2): 

1246 pass 

1247 while(3): 

1248 pass 

1249 """ 

1250 # At present Orange doesn't split lines... 

1251 expected = """\ 

1252 if x == 4: pass 

1253 if x == 4: pass 

1254 print(x, y); x, y = y, x 

1255 print(x, y); x, y = y, x 

1256 if (1): 

1257 pass 

1258 elif (2): 

1259 pass 

1260 while (3): 

1261 pass 

1262 """ 

1263 contents, tokens, tree = self.make_data(contents) 

1264 expected = self.adjust_expected(expected) 

1265 results = self.beautify(contents, tokens, tree) 

1266 self.assertEqual(results, expected) 

1267 #@+node:ekr.20200110014220.95: *4* TestOrange.test_one_line_pet_peeves 

1268 def test_one_line_pet_peeves(self): 

1269 

1270 tag = 'test_one_line_pet_peeves' 

1271 # Except where noted, all entries are expected values.... 

1272 if 0: 

1273 # Test fails or recents... 

1274 table = ( 

1275 # """a[: 1 if True else 2 :]""", 

1276 """a[:-1]""", 

1277 ) 

1278 else: 

1279 table = ( 

1280 # Assignments... 

1281 # Slices (colons)... 

1282 """a[:-1]""", 

1283 """a[: 1 if True else 2 :]""", 

1284 """a[1 : 1 + 2]""", 

1285 """a[lower:]""", 

1286 """a[lower::]""", 

1287 """a[:upper]""", 

1288 """a[:upper:]""", 

1289 """a[::step]""", 

1290 """a[lower:upper:]""", 

1291 """a[lower:upper:step]""", 

1292 """a[lower + offset : upper + offset]""", 

1293 """a[: upper_fn(x) :]""", 

1294 """a[: upper_fn(x) : step_fn(x)]""", 

1295 """a[:: step_fn(x)]""", 

1296 """a[: upper_fn(x) :]""", 

1297 """a[: upper_fn(x) : 2 + 1]""", 

1298 """a[:]""", 

1299 """a[::]""", 

1300 """a[1:]""", 

1301 """a[1::]""", 

1302 """a[:2]""", 

1303 """a[:2:]""", 

1304 """a[::3]""", 

1305 """a[1:2]""", 

1306 """a[1:2:]""", 

1307 """a[:2:3]""", 

1308 """a[1:2:3]""", 

1309 # * and **, inside and outside function calls. 

1310 """a = b * c""", 

1311 """a = b ** c""", 

1312 """f(*args)""", 

1313 """f(**kwargs)""", 

1314 """f(*args, **kwargs)""", 

1315 """f(a, *args)""", 

1316 """f(a=2, *args)""", 

1317 # Calls... 

1318 """f(-1)""", 

1319 """f(-1 < 2)""", 

1320 """f(1)""", 

1321 """f(2 * 3)""", 

1322 """f(2 + name)""", 

1323 """f(a)""", 

1324 """f(a.b)""", 

1325 """f(a=2 + 3, b=4 - 5, c= 6 * 7, d=8 / 9, e=10 // 11)""", 

1326 """f(a[1 + 2])""", 

1327 """f({key: 1})""", 

1328 """t = (0,)""", 

1329 """x, y = y, x""", 

1330 # Dicts... 

1331 """d = {key: 1}""", 

1332 """d['key'] = a[i]""", 

1333 # Trailing comments: expect two spaces. 

1334 """whatever # comment""", 

1335 """whatever # comment""", 

1336 """whatever # comment""", 

1337 # Word ops... 

1338 """v1 = v2 and v3 if v3 not in v4 or v5 in v6 else v7""", 

1339 """print(v7 for v8 in v9)""", 

1340 # Unary ops... 

1341 """v = -1 if a < b else -2""", 

1342 # Returns... 

1343 """return -1""", 

1344 ) 

1345 fails = 0 

1346 for i, contents in enumerate(table): 

1347 description = f"{tag} part {i}" 

1348 contents, tokens, tree = self.make_data(contents, description) 

1349 expected = self.blacken(contents) 

1350 results = self.beautify(contents, tokens, tree, filename=description) 

1351 message = ( 

1352 f"\n" 

1353 f" contents: {contents.rstrip()}\n" 

1354 f" black: {expected.rstrip()}\n" 

1355 f" orange: {results.rstrip()}") 

1356 if results != expected: # pragma: no cover 

1357 fails += 1 

1358 print(f"Fail: {fails}\n{message}") 

1359 self.assertEqual(fails, 0) 

1360 #@+node:ekr.20200210050646.1: *4* TestOrange.test_return 

1361 def test_return(self): 

1362 

1363 contents = """return []""" 

1364 expected = self.blacken(contents) 

1365 contents, tokens, tree = self.make_data(contents) 

1366 results = self.beautify(contents, tokens, tree) 

1367 self.assertEqual(results, expected) 

1368 #@+node:ekr.20200107174742.1: *4* TestOrange.test_single_quoted_string 

1369 def test_single_quoted_string(self): 

1370 

1371 contents = """print('hi')""" 

1372 # blacken suppresses string normalization. 

1373 expected = self.blacken(contents) 

1374 contents, tokens, tree = self.make_data(contents) 

1375 results = self.beautify(contents, tokens, tree) 

1376 self.assertEqual(results, expected) 

1377 #@+node:ekr.20200117180956.1: *4* TestOrange.test_split_lines 

1378 def test_split_lines(self): 

1379 

1380 line_length = 40 # For testing. 

1381 table = ( 

1382 #1234567890x1234567890x1234567890x1234567890x 

1383 """\ 

1384 if 1: 

1385 print('1111111111', '2222222222', '3333333333') 

1386 """, 

1387 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc')""", 

1388 """print('aaaaaaaaaaaaa', 'bbbbbbbbbbbbbb', 'cccccc', 'ddddddddddddddddd')""", 

1389 ) 

1390 fails = 0 

1391 for contents in table: 

1392 contents, tokens, tree = self.make_data(contents) 

1393 if 0: 

1394 dump_tokens(tokens) 

1395 # dump_tree(tokens, tree) 

1396 expected = self.blacken(contents, line_length=line_length) 

1397 results = self.beautify(contents, tokens, tree, 

1398 max_join_line_length=line_length, 

1399 max_split_line_length=line_length, 

1400 ) 

1401 message = ( 

1402 f"\n" 

1403 f" contents: {contents!s}\n" 

1404 f" black: {expected!s}\n" 

1405 f" orange: {results!s}") 

1406 if results != expected: # pragma: no cover 

1407 fails += 1 

1408 print(f"Fail: {fails}\n{message}") 

1409 self.assertEqual(fails, 0) 

1410 #@+node:ekr.20200210073227.1: *4* TestOrange.test_split_lines_2 

1411 def test_split_lines_2(self): 

1412 

1413 line_length = 40 # For testing. 

1414 # Different from how black handles things. 

1415 contents = """\ 

1416 if not any([z.kind == 'lt' for z in line_tokens]): 

1417 return False 

1418 """ 

1419 expected = """\ 

1420 if not any( 

1421 [z.kind == 'lt' for z in line_tokens]): 

1422 return False 

1423 """ 

1424 fails = 0 

1425 contents, tokens, tree = self.make_data(contents) 

1426 # expected = self.blacken(contents, line_length=line_length) 

1427 expected = textwrap.dedent(expected) 

1428 results = self.beautify(contents, tokens, tree, 

1429 max_join_line_length=line_length, 

1430 max_split_line_length=line_length, 

1431 ) 

1432 message = ( 

1433 f"\n" 

1434 f" contents: {contents!r}\n" 

1435 f" expected: {expected!r}\n" 

1436 f" got: {results!r}") 

1437 if results != expected: # pragma: no cover 

1438 fails += 1 

1439 print(f"Fail: {fails}\n{message}") 

1440 self.assertEqual(fails, 0) 

1441 #@+node:ekr.20200219144837.1: *4* TestOrange.test_split_lines_3 

1442 def test_split_lines_3(self): 

1443 

1444 line_length = 40 # For testing. 

1445 # Different from how black handles things. 

1446 contents = """print('eee', ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 'jjjjjjj', 'kkkkkk')""" 

1447 # This is a bit different from black, but it's good enough for now. 

1448 expected = """\ 

1449 print( 

1450 'eee', 

1451 ('fffffff, ggggggg', 'hhhhhhhh', 'iiiiiii'), 

1452 'jjjjjjj', 

1453 'kkkkkk', 

1454 ) 

1455 """ 

1456 fails = 0 

1457 contents, tokens, tree = self.make_data(contents) 

1458 # expected = self.blacken(contents, line_length=line_length) 

1459 expected = textwrap.dedent(expected) 

1460 results = self.beautify(contents, tokens, tree, 

1461 max_join_line_length=line_length, 

1462 max_split_line_length=line_length, 

1463 ) 

1464 message = ( 

1465 f"\n" 

1466 f" contents: {contents!r}\n" 

1467 f" expected: {expected!r}\n" 

1468 f" got: {results!r}") 

1469 if results != expected: # pragma: no cover 

1470 fails += 1 

1471 print(f"Fail: {fails}\n{message}") 

1472 self.assertEqual(fails, 0) 

1473 #@+node:ekr.20200119155207.1: *4* TestOrange.test_sync_tokens 

1474 def test_sync_tokens(self): 

1475 

1476 contents = """if x == 4: pass""" 

1477 # At present Orange doesn't split lines... 

1478 expected = """if x == 4: pass""" 

1479 contents, tokens, tree = self.make_data(contents) 

1480 expected = self.adjust_expected(expected) 

1481 results = self.beautify(contents, tokens, tree) 

1482 self.assertEqual(results, expected) 

1483 #@+node:ekr.20200209161226.1: *4* TestOrange.test_ternary 

1484 def test_ternary(self): 

1485 

1486 contents = """print(2 if name == 'class' else 1)""" 

1487 contents, tokens, tree = self.make_data(contents) 

1488 expected = contents 

1489 results = self.beautify(contents, tokens, tree) 

1490 self.assertEqual(results, expected) 

1491 #@+node:ekr.20200211093359.1: *4* TestOrange.test_verbatim 

1492 def test_verbatim(self): 

1493 

1494 line_length = 40 # For testing. 

1495 contents = """\ 

1496 #@@nobeautify 

1497 

1498 def addOptionsToParser(self, parser, trace_m): 

1499 

1500 add = parser.add_option 

1501 

1502 def add_bool(option, help, dest=None): 

1503 add(option, action='store_true', dest=dest, help=help) 

1504 

1505 add_bool('--diff', 'use Leo as an external git diff') 

1506 # add_bool('--dock', 'use a Qt dock') 

1507 add_bool('--fullscreen', 'start fullscreen') 

1508 add_bool('--init-docks', 'put docks in default positions') 

1509 # Multiple bool values. 

1510 add('-v', '--version', action='store_true', 

1511 help='print version number and exit') 

1512 

1513 # From leoAtFile.py 

1514 noDirective = 1 # not an at-directive. 

1515 allDirective = 2 # at-all (4.2) 

1516 docDirective = 3 # @doc. 

1517 

1518 #@@beautify 

1519 """ 

1520 contents, tokens, tree = self.make_data(contents) 

1521 expected = contents 

1522 results = self.beautify(contents, tokens, tree, 

1523 max_join_line_length=line_length, 

1524 max_split_line_length=line_length, 

1525 ) 

1526 self.assertEqual(results, expected, msg=contents) 

1527 #@+node:ekr.20200729083027.1: *4* TestOrange.verbatim2 

1528 def test_verbatim2(self): 

1529 

1530 contents = """\ 

1531 #@@beautify 

1532 #@@nobeautify 

1533 #@+at Starts doc part 

1534 # More doc part. 

1535 # The @c ends the doc part. 

1536 #@@c 

1537 """ 

1538 contents, tokens, tree = self.make_data(contents) 

1539 expected = contents 

1540 results = self.beautify(contents, tokens, tree) 

1541 self.assertEqual(results, expected, msg=contents) 

1542 #@+node:ekr.20200211094209.1: *4* TestOrange.test_verbatim_with_pragma 

1543 def test_verbatim_with_pragma(self): 

1544 

1545 line_length = 40 # For testing. 

1546 contents = """\ 

1547 #pragma: no beautify 

1548 

1549 def addOptionsToParser(self, parser, trace_m): 

1550 

1551 add = parser.add_option 

1552 

1553 def add_bool(option, help, dest=None): 

1554 add(option, action='store_true', dest=dest, help=help) 

1555 

1556 add_bool('--diff', 'use Leo as an external git diff') 

1557 # add_bool('--dock', 'use a Qt dock') 

1558 add_bool('--fullscreen', 'start fullscreen') 

1559 add_other('--window-size', 'initial window size (height x width)', m='SIZE') 

1560 add_other('--window-spot', 'initial window position (top x left)', m='SPOT') 

1561 # Multiple bool values. 

1562 add('-v', '--version', action='store_true', 

1563 help='print version number and exit') 

1564 

1565 # pragma: beautify 

1566 """ 

1567 contents, tokens, tree = self.make_data(contents) 

1568 expected = contents 

1569 results = self.beautify(contents, tokens, tree, 

1570 max_join_line_length=line_length, 

1571 max_split_line_length=line_length, 

1572 ) 

1573 self.assertEqual(results, expected, msg=contents) 

1574 #@-others 

1575#@+node:ekr.20191231130208.1: *3* class TestReassignTokens (BaseTest) 

1576class TestReassignTokens(BaseTest): 

1577 """Test cases for the ReassignTokens class.""" 

1578 #@+others 

1579 #@+node:ekr.20191231130320.1: *4* test_reassign_tokens (to do) 

1580 def test_reassign_tokens(self): 

1581 pass 

1582 #@+node:ekr.20191231130334.1: *4* test_nearest_common_ancestor 

1583 def test_nearest_common_ancestor(self): 

1584 

1585 contents = """name='uninverted %s' % d.name()""" 

1586 self.make_data(contents) 

1587 #@-others 

1588#@+node:ekr.20191227051737.1: *3* class TestTOG (BaseTest) 

1589class TestTOG(BaseTest): 

1590 """ 

1591 Tests for the TokenOrderGenerator class. 

1592 

1593 These tests call BaseTest.make_data, which creates the two-way links 

1594 between tokens and the parse tree. 

1595 

1596 The asserts in tog.sync_tokens suffice to create strong unit tests. 

1597 """ 

1598 

1599 debug_list = ['unit-test'] 

1600 

1601 #@+others 

1602 #@+node:ekr.20210318213945.1: *4* TestTOG.Recent bugs & features 

1603 #@+node:ekr.20210321172902.1: *5* test_bug_1851 

1604 def test_bug_1851(self): 

1605 

1606 contents = r'''\ 

1607 def foo(a1): 

1608 pass 

1609 ''' 

1610 contents, tokens, tree = self.make_data(contents) 

1611 #@+node:ekr.20210914161519.1: *5* test_bug_2171 

1612 def test_bug_2171(self): 

1613 

1614 import sys 

1615 if sys.version_info < (3, 9, 0): 

1616 self.skipTest('Requires Python 3.9') # pragma: no cover 

1617 

1618 contents = "'HEAD:%s' % g.os_path_join( *(relative_path + [filename]) )" 

1619 contents, tokens, tree = self.make_data(contents) 

1620 #@+node:ekr.20210318213133.1: *5* test_full_grammar 

1621 def test_full_grammar(self): 

1622 # Load py3_test_grammar.py. 

1623 dir_ = os.path.dirname(__file__) 

1624 path = os.path.abspath(os.path.join(dir_, '..', 'py3_test_grammar.py')) 

1625 assert os.path.exists(path), path 

1626 if py_version < (3, 8): 

1627 self.skipTest('Requires Python 3.8 or above') # pragma: no cover 

1628 # Verify that leoAst can parse the file. 

1629 contents = read_file(path) 

1630 self.make_data(contents) 

1631 #@+node:ekr.20210318214057.1: *5* test_line_315 

1632 def test_line_315(self): 

1633 

1634 # 

1635 # Known bug: position-only args exist in Python 3.8, 

1636 # but there is no easy way of syncing them. 

1637 # This bug will not be fixed. 

1638 # The workaround is to require Python 3.9 

1639 if py_version >= (3, 9): 

1640 contents = '''\ 

1641 f(1, x=2, 

1642 *[3, 4], y=5) 

1643 ''' 

1644 elif 1: # Expected order. 

1645 contents = '''f(1, *[a, 3], x=2, y=5)''' # pragma: no cover 

1646 else: # Legacy. 

1647 contents = '''f(a, *args, **kwargs)''' 

1648 contents, tokens, tree = self.make_data(contents) 

1649 #@+node:ekr.20210320095504.8: *5* test_line_337 

1650 def test_line_337(self): 

1651 

1652 if py_version >= (3, 8): # Requires neither line_no nor col_offset fields. 

1653 contents = '''def f(a, b:1, c:2, d, e:3=4, f=5, *g:6, h:7, i=8, j:9=10, **k:11) -> 12: pass''' 

1654 else: 

1655 contents = '''def f(a, b, d=4, *arg, **keys): pass''' # pragma: no cover 

1656 contents, tokens, tree = self.make_data(contents) 

1657 #@+node:ekr.20210320065202.1: *5* test_line_483 

1658 def test_line_483(self): 

1659 

1660 if py_version < (3, 8): 

1661 # Python 3.8: https://bugs.python.org/issue32117 

1662 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") # pragma: no cover 

1663 contents = '''def g3(): return 1, *return_list''' 

1664 contents, tokens, tree = self.make_data(contents) 

1665 #@+node:ekr.20210320065344.1: *5* test_line_494 

1666 def test_line_494(self): 

1667 

1668 """ 

1669 https://docs.python.org/3/whatsnew/3.8.html#other-language-changes 

1670 

1671 Generalized iterable unpacking in yield and return statements no longer 

1672 requires enclosing parentheses. This brings the yield and return syntax 

1673 into better agreement with normal assignment syntax. 

1674 """ 

1675 if py_version < (3, 8): 

1676 # Python 3.8: https://bugs.python.org/issue32117 

1677 self.skipTest(f"Python {v1}.{v2} does not support generalized iterable assignment") # pragma: no cover 

1678 contents = '''def g2(): yield 1, *yield_list''' 

1679 contents, tokens, tree = self.make_data(contents) 

1680 #@+node:ekr.20210319130349.1: *5* test_line_875 

1681 def test_line_875(self): 

1682 

1683 contents = '''list((x, y) for x in 'abcd' for y in 'abcd')''' 

1684 contents, tokens, tree = self.make_data(contents) 

1685 #@+node:ekr.20210319130616.1: *5* test_line_898 

1686 def test_line_898(self): 

1687 

1688 contents = '''g = ((i,j) for i in range(x) if t for j in range(x))''' 

1689 contents, tokens, tree = self.make_data(contents) 

1690 #@+node:ekr.20210320085705.1: *5* test_walrus_operator 

1691 def test_walrus_operator(self): 

1692 

1693 if py_version < (3, 8): 

1694 self.skipTest(f"Python {v1}.{v2} does not support assignment expressions") # pragma: no cover 

1695 contents = '''if (n := len(a)) > 10: pass''' 

1696 contents, tokens, tree = self.make_data(contents) 

1697 #@+node:ekr.20191227052446.10: *4* TestTOG.Contexts... 

1698 #@+node:ekr.20191227052446.11: *5* test_ClassDef 

1699 def test_ClassDef(self): 

1700 contents = """\ 

1701 class TestClass1: 

1702 pass 

1703 

1704 def decorator(): 

1705 pass 

1706 

1707 @decorator 

1708 class TestClass2: 

1709 pass 

1710 

1711 @decorator 

1712 class TestClass(base1, base2): 

1713 pass 

1714 """ 

1715 self.make_data(contents) 

1716 #@+node:ekr.20191227052446.12: *5* test_ClassDef2 

1717 def test_ClassDef2(self): 

1718 contents = r'''\ 

1719 """ds 1""" 

1720 class TestClass: 

1721 """ds 2""" 

1722 def long_name(a, b=2): 

1723 """ds 3""" 

1724 print('done') 

1725 ''' 

1726 self.make_data(contents) 

1727 #@+node:ekr.20191227052446.13: *5* test_FunctionDef 

1728 def test_FunctionDef(self): 

1729 contents = r"""\ 

1730 def run(fileName=None, pymacs=None): 

1731 pass 

1732 """ 

1733 self.make_data(contents) 

1734 #@+node:ekr.20200111171738.1: *5* test_FunctionDef_with_annotations 

1735 def test_FunctionDef_with_annotations(self): 

1736 contents = r"""\ 

1737 def foo(a: 'x', b: 5 + 6, c: list) -> max(2, 9): 

1738 pass 

1739 """ 

1740 self.make_data(contents) 

1741 # contents, tokens, tree = self.make_data(contents) 

1742 # dump_ast(tree) 

1743 #@+node:ekr.20210802162650.1: *5* test_FunctionDef_with_posonly_args 

1744 def test_FunctionDef_with_posonly_args(self): 

1745 

1746 import sys 

1747 if sys.version_info < (3, 9, 0): 

1748 self.skipTest('Requires Python 3.9') # pragma: no cover 

1749 

1750 # From PEP 570 

1751 contents = r"""\ 

1752 def pos_only_arg(arg, /): 

1753 pass 

1754 def kwd_only_arg(*, arg): 

1755 pass 

1756 def combined_example(pos_only, /, standard, *, kwd_only): 

1757 pass 

1758 """ 

1759 self.make_data(contents) 

1760 #@+node:ekr.20191227052446.14: *4* TestTOG.Expressions & operators... 

1761 #@+node:ekr.20191227052446.15: *5* test_attribute 

1762 def test_attribute(self): 

1763 contents = r"""\ 

1764 open(os.devnull, "w") 

1765 """ 

1766 self.make_data(contents) 

1767 #@+node:ekr.20191227052446.16: *5* test_CompareOp 

1768 def test_CompareOp(self): 

1769 contents = r"""\ 

1770 if a and not b and c: 

1771 pass 

1772 """ 

1773 self.make_data(contents) 

1774 #@+node:ekr.20191227052446.17: *5* test_Dict_1 

1775 def test_Dict(self): 

1776 contents = r"""\ 

1777 d = {'a' if x else 'b': True,} 

1778 """ 

1779 self.make_data(contents) 

1780 #@+node:ekr.20200111191153.1: *5* test_Dict_2 

1781 def test_Dict_2(self): 

1782 contents = r"""\ 

1783 d = {} 

1784 """ 

1785 self.make_data(contents) 

1786 #@+node:ekr.20191227052446.18: *5* test_DictComp 

1787 def test_DictComp(self): 

1788 # leoGlobals.py, line 3028. 

1789 contents = r"""\ 

1790 d2 = {val: key for key, val in d} 

1791 """ 

1792 self.make_data(contents) 

1793 #@+node:ekr.20200112042410.1: *5* test_ExtSlice 

1794 def test_ExtSlice(self): 

1795 contents = r"""a [1, 2: 3]""" 

1796 self.make_data(contents) 

1797 #@+node:ekr.20191227052446.19: *5* test_ListComp 

1798 def test_ListComp(self): 

1799 # ListComp and comprehension. 

1800 contents = r"""\ 

1801 any([p2.isDirty() for p2 in p.subtree()]) 

1802 """ 

1803 self.make_data(contents) 

1804 #@+node:ekr.20191227052446.20: *5* test_NameConstant 

1805 def test_NameConstant(self): 

1806 contents = r"""\ 

1807 run(a=None, b=str) 

1808 """ 

1809 self.make_data(contents) 

1810 #@+node:ekr.20191227052446.21: *5* test_Operator: semicolon 

1811 def test_op_semicolon(self): 

1812 contents = r"""\ 

1813 print('c'); 

1814 print('d') 

1815 """ 

1816 self.make_data(contents) 

1817 #@+node:ekr.20191227052446.22: *5* test_Operator: semicolon between statements 

1818 def test_op_semicolon2(self): 

1819 contents = r"""\ 

1820 a = 1 ; b = 2 

1821 print('a') ; print('b') 

1822 """ 

1823 self.make_data(contents) 

1824 #@+node:ekr.20200111194454.1: *5* test_Set 

1825 def test_Set(self): 

1826 contents = """{'a', 'b'}""" 

1827 self.make_data(contents) 

1828 #@+node:ekr.20200111195654.1: *5* test_SetComp 

1829 def test_SetComp(self): 

1830 contents = """aSet = { (x, y) for x in r for y in r if x < y }""" 

1831 self.make_data(contents) 

1832 #@+node:ekr.20191227052446.23: *5* test_UnaryOp 

1833 def test_UnaryOp(self): 

1834 contents = r"""\ 

1835 print(-(2)) 

1836 """ 

1837 self.make_data(contents) 

1838 #@+node:ekr.20191227052446.65: *4* TestTOG.f-strings.... 

1839 #@+node:ekr.20191227052446.66: *5* test_fstring01: complex Call 

1840 def test_fstring1(self): 

1841 # Line 1177, leoApp.py 

1842 contents = r"""\ 

1843 print( 

1844 message = f"line 1: {old_id!r}\n" "line 2\n" 

1845 ) 

1846 print('done') 

1847 """ 

1848 self.make_data(contents) 

1849 #@+node:ekr.20191227052446.67: *5* test_fstring02: Ternary 

1850 def test_fstring2(self): 

1851 contents = r"""\ 

1852 func(f"{b if not cond1 else ''}") 

1853 """ 

1854 self.make_data(contents) 

1855 #@+node:ekr.20191227052446.68: *5* test_fstring03: single f-string 

1856 def test_fstring3(self): 

1857 contents = r"""\ 

1858 print(f'{7.1}') 

1859 print('end') 

1860 """ 

1861 self.make_data(contents) 

1862 #@+node:ekr.20191227052446.69: *5* test_fstring04: f-string + plain 

1863 def test_fstring4(self): 

1864 contents = r"""\ 

1865 print(f'{7.1}' 'p7.2') 

1866 print('end') 

1867 """ 

1868 self.make_data(contents) 

1869 #@+node:ekr.20191227052446.70: *5* test_fstring05: plain + f-string 

1870 def test_fstring5(self): 

1871 contents = r"""\ 

1872 print('p1' f'{f2}') 

1873 'end' 

1874 """ 

1875 self.make_data(contents) 

1876 #@+node:ekr.20191227052446.71: *5* test_fstring06: f-string + fstring 

1877 def test_fstring6(self): 

1878 contents = r"""\ 

1879 print(f'{f1}' f'{f2}') 

1880 'end' 

1881 """ 

1882 self.make_data(contents) 

1883 #@+node:ekr.20191227052446.72: *5* test_fstring07: many 

1884 def test_fstring7(self): 

1885 contents = r"""\ 

1886 print('s1', f'{f2}' f'f3' f'{f4}' 's5') 

1887 'end' 

1888 """ 

1889 self.make_data(contents) 

1890 #@+node:ekr.20191227052446.73: *5* test_fstring08: ternary op 

1891 def test_fstring8(self): 

1892 # leoFind.py line 856 

1893 contents = r"""\ 

1894 a = f"{'a' if x else 'b'}" 

1895 f() 

1896 

1897 # Pass 

1898 # print(f"{'a' if x else 'b'}") 

1899 """ 

1900 self.make_data(contents) 

1901 #@+node:ekr.20191227052446.74: *5* test_fstring09: leoFind.py line 856 

1902 def test_fstring9(self): 

1903 contents = r"""\ 

1904 func( 

1905 "Isearch" 

1906 f"{' Backward' if True else ''}" 

1907 ) 

1908 print('done') 

1909 """ 

1910 self.make_data(contents) 

1911 #@+node:ekr.20191227052446.75: *5* test_fstring10: leoFind.py: line 861 

1912 def test_fstring10(self): 

1913 # leoFind.py: line 861 

1914 contents = r"""\ 

1915 one(f"{'B'}" ": ") 

1916 """ 

1917 self.make_data(contents) 

1918 #@+node:ekr.20191227052446.76: *5* test_fstring11: joins 

1919 def test_fstring11(self): 

1920 contents = r"""\ 

1921 print(f'x3{e3+1}y3' f'x4{e4+2}y4') 

1922 print('done') 

1923 """ 

1924 self.make_data(contents) 

1925 #@+node:ekr.20191227052446.77: *6* more 

1926 # Single f-strings. 

1927 # 'p1' ; 

1928 # f'f1' ; 

1929 # f'x1{e1}y1' ; 

1930 # f'x2{e2+1}y2{e2+2}z2' ; 

1931 

1932 # Concatentated strings... 

1933 # 'p2', 'p3' ; 

1934 # f'f2' 'f3' ; 

1935 

1936 # f'x5{e5+1}y5{e5+1}z5' f'x6{e6+1}y6{e6+1}z6' ; 

1937 #@+node:ekr.20191227052446.78: *5* test_fstring12: joins + 1 f-expr 

1938 def test_fstring12(self): 

1939 contents = r"""\ 

1940 print(f'x1{e1}y1', 'p1') 

1941 print(f'x2{e2}y2', f'f2') 

1942 print(f'x3{e3}y3', f'x4{e4}y4') 

1943 print('end') 

1944 """ 

1945 self.make_data(contents) 

1946 #@+node:ekr.20191227052446.79: *5* test_fstring13: joins + 2 f-exprs 

1947 def test_fstring13(self): 

1948 contents = r"""\ 

1949 print(f'x1{e1}y1{e2}z1', 'p1') 

1950 print(f'x2{e3}y2{e3}z2', f'f2') 

1951 print(f'x3{e4}y3{e5}z3', f'x4{e6}y4{e7}z4') 

1952 print('end') 

1953 """ 

1954 self.make_data(contents) 

1955 #@+node:ekr.20191227052446.80: *5* test_fstring14: complex, with commas 

1956 def test_fstring14(self): 

1957 contents = r"""\ 

1958 print(f"{list(z for z in ('a', 'b', 'c') if z != 'b')}") 

1959 """ 

1960 self.make_data(contents) 

1961 #@+node:ekr.20191227052446.81: *5* test_fstring15 

1962 def test_fstring15(self): 

1963 contents = r"""\ 

1964 print(f"test {a}={2}") 

1965 print('done') 

1966 """ 

1967 self.make_data(contents) 

1968 #@+node:ekr.20191227052446.83: *5* test_fstring16: simple 

1969 def test_fstring16(self): 

1970 contents = r"""\ 

1971 'p1' ; 

1972 f'f1' ; 

1973 'done' ; 

1974 """ 

1975 self.make_data(contents) 

1976 #@+node:ekr.20191227052446.82: *5* test_regex_fstring 

1977 def test_regex_fstring(self): 

1978 # Line 7709, leoGlobals.py 

1979 contents = r'''\ 

1980 fr"""{kinds}://[^\s'"]+[\w=/]""" 

1981 ''' 

1982 self.make_data(contents) 

1983 #@+node:ekr.20191227052446.32: *4* TestTOG.If... 

1984 #@+node:ekr.20191227052446.33: *5* test_from leoTips.py 

1985 def test_if1(self): 

1986 # Line 93, leoTips.py 

1987 contents = r"""\ 

1988 self.make_data(contents) 

1989 unseen = [i for i in range(5) if i not in seen] 

1990 for issue in data: 

1991 for a in aList: 

1992 print('a') 

1993 else: 

1994 print('b') 

1995 if b: 

1996 print('c') 

1997 """ 

1998 self.make_data(contents) 

1999 #@+node:ekr.20191227052446.34: *5* test_if + tuple 

2000 def test_if2(self): 

2001 contents = r"""\ 

2002 for i, j in b: 

2003 pass 

2004 """ 

2005 self.make_data(contents) 

2006 #@+node:ekr.20191227052446.35: *5* test_if + unary op 

2007 def test_if3(self): 

2008 contents = r"""\ 

2009 if -(2): 

2010 pass 

2011 """ 

2012 self.make_data(contents) 

2013 #@+node:ekr.20191227052446.36: *5* test_if, elif 

2014 def test_if4(self): 

2015 contents = r"""\ 

2016 if 1: 

2017 print('a') 

2018 elif 2: 

2019 print('b') 

2020 elif 3: 

2021 print('c') 

2022 print('d') 

2023 print('-') 

2024 if 1: 

2025 print('e') 

2026 elif 2: 

2027 print('f') 

2028 print('g') 

2029 """ 

2030 self.make_data(contents) 

2031 #@+node:ekr.20191227052446.37: *5* test_if, elif + 2 

2032 def test_if5(self): 

2033 contents = r"""\ 

2034 if 1: 

2035 pass 

2036 elif 2: 

2037 pass 

2038 pass 

2039 """ 

2040 self.make_data(contents) 

2041 #@+node:ekr.20191227052446.38: *5* test_if, elif, else 

2042 def test_if6(self): 

2043 contents = r"""\ 

2044 if (a): 

2045 print('a1') 

2046 print('a2') 

2047 elif b: 

2048 print('b1') 

2049 print('b2') 

2050 else: 

2051 print('c1') 

2052 print('c2') 

2053 """ 

2054 self.make_data(contents) 

2055 #@+node:ekr.20191227052446.39: *5* test_if, else 

2056 def test_if7(self): 

2057 contents = r"""\ 

2058 if 1: 

2059 print('a') 

2060 else: 

2061 print('b') 

2062 """ 

2063 self.make_data(contents) 

2064 #@+node:ekr.20191227052446.40: *5* test_if, else, if 

2065 def test_if8(self): 

2066 contents = r"""\ 

2067 if 1: 

2068 print('a') 

2069 else: 

2070 if 2: 

2071 print('b') 

2072 """ 

2073 self.make_data(contents) 

2074 #@+node:ekr.20191227052446.41: *5* test_Nested If's 

2075 def test_if9(self): 

2076 contents = r"""\ 

2077 if a: 

2078 if b: 

2079 print('b') 

2080 else: 

2081 if d: 

2082 print('d') 

2083 """ 

2084 self.make_data(contents) 

2085 #@+node:ekr.20191227052446.42: *5* test_ternary + if 

2086 def test_if10(self): 

2087 contents = r"""\ 

2088 if 1: 

2089 a = 'class' if cond else 'def' 

2090 # find_pattern = prefix + ' ' + word 

2091 print('1') 

2092 else: 

2093 print('2') 

2094 """ 

2095 self.make_data(contents) 

2096 #@+node:ekr.20191227145620.1: *4* TestTOG.Miscellaneous... 

2097 #@+node:ekr.20200206041753.1: *5* test_comment_in_set_links 

2098 def test_comment_in_set_links(self): 

2099 contents = """ 

2100 def spam(): 

2101 # comment 

2102 pass 

2103 """ 

2104 self.make_data(contents) 

2105 #@+node:ekr.20200112065944.1: *5* test_ellipsis_1 

2106 def test_ellipsis_1(self): 

2107 contents = """ 

2108 def spam(): 

2109 ... 

2110 """ 

2111 self.make_data(contents) 

2112 #@+node:ekr.20200112070228.1: *5* test_ellipsis_2 

2113 def test_ellipsis_2(self): 

2114 contents = """ 

2115 def partial(func: Callable[..., str], *args): 

2116 pass 

2117 """ 

2118 self.make_data(contents) 

2119 #@+node:ekr.20191227075951.1: *5* test_end_of_line 

2120 def test_end_of_line(self): 

2121 self.make_data("""# Only a comment.""") 

2122 #@+node:ekr.20191227052446.50: *4* TestTOG.Plain Strings... 

2123 #@+node:ekr.20191227052446.52: *5* test_\x and \o escapes 

2124 def test_escapes(self): 

2125 # Line 4609, leoGlobals.py 

2126 contents = r"""\ 

2127 print("\x7e" "\0777") # tilde. 

2128 print('done') 

2129 """ 

2130 self.make_data(contents) 

2131 #@+node:ekr.20191227052446.53: *5* test_backslashes in docstring 

2132 def test_backslashes(self): 

2133 # leoGlobals.py. 

2134 contents = r'''\ 

2135 class SherlockTracer: 

2136 """before\\after""" 

2137 ''' 

2138 self.make_data(contents) 

2139 #@+node:ekr.20191227052446.54: *5* test_bs/nl 

2140 def test_bs_nl(self): 

2141 contents = r"""\ 

2142 print('hello\ 

2143 world') 

2144 """ 

2145 self.make_data(contents) 

2146 #@+node:ekr.20191227052446.55: *5* test_bytes bs-x 

2147 def test_bytes(self): 

2148 # Line 201, leoApp.py 

2149 contents = r"""\ 

2150 print(b'\xfe') 

2151 print('done') 

2152 """ 

2153 self.make_data(contents) 

2154 #@+node:ekr.20191227052446.56: *5* test_empty string 

2155 def test_empyt_string(self): 

2156 contents = r"""\ 

2157 self.s = '' 

2158 self.i = 0 

2159 """ 

2160 self.make_data(contents) 

2161 #@+node:ekr.20191227052446.57: *5* test_escaped string delims 

2162 def test_escaped_delims(self): 

2163 contents = r"""\ 

2164 print("a\"b") 

2165 """ 

2166 self.make_data(contents) 

2167 #@+node:ekr.20191227052446.58: *5* test_escaped strings 

2168 def test_escaped_strings(self): 

2169 contents = r"""\ 

2170 f1(a='\b', b='\n', t='\t') 

2171 f2(f='\f', r='\r', v='\v') 

2172 f3(bs='\\') 

2173 """ 

2174 self.make_data(contents) 

2175 #@+node:ekr.20191227052446.59: *5* test_f-string join 

2176 def test_fstring_join(self): 

2177 # The first newline causes the fail. 

2178 contents = r"""\ 

2179 print(f"a {old_id!r}\n" "b\n") 

2180 print('done') 

2181 """ 

2182 self.make_data(contents) 

2183 #@+node:ekr.20191227052446.64: *5* test_potential_fstring 

2184 def test_potential_fstring(self): 

2185 contents = r"""\ 

2186 print('test %s=%s'%(a, 2)) 

2187 print('done') 

2188 """ 

2189 self.make_data(contents) 

2190 #@+node:ekr.20191227052446.60: *5* test_raw docstring 

2191 def test_raw_docstring(self): 

2192 contents = r'''\ 

2193 # Line 1619 leoFind.py 

2194 print(r"""DS""") 

2195 ''' 

2196 self.make_data(contents) 

2197 #@+node:ekr.20191227052446.61: *5* test_raw escaped strings 

2198 def test_raw_escapes(self): 

2199 contents = r"""\ 

2200 r1(a=r'\b', b=r'\n', t=r'\t') 

2201 r2(f=r'\f', r=r'\r', v=r'\v') 

2202 r3(bs=r'\\') 

2203 """ 

2204 self.make_data(contents) 

2205 #@+node:ekr.20191227052446.62: *5* test_single quote 

2206 def test_single_quote(self): 

2207 # leoGlobals.py line 806. 

2208 contents = r"""\ 

2209 print('"') 

2210 """ 

2211 self.make_data(contents) 

2212 #@+node:ekr.20191227052446.63: *5* test_string concatenation_1 

2213 def test_concatenation_1(self): 

2214 contents = r"""\ 

2215 print('a' 'b') 

2216 print('c') 

2217 """ 

2218 self.make_data(contents) 

2219 #@+node:ekr.20200111042825.1: *5* test_string_concatenation_2 

2220 def test_string_concatenation_2(self): 

2221 # Crash in leoCheck.py. 

2222 contents = """return self.Type('error', 'no member %s' % ivar)""" 

2223 self.make_data(contents) 

2224 #@+node:ekr.20191227052446.43: *4* TestTOG.Statements... 

2225 #@+node:ekr.20200112075707.1: *5* test_AnnAssign 

2226 def test_AnnAssign(self): 

2227 contents = """x: int = 0""" 

2228 self.make_data(contents) 

2229 #@+node:ekr.20200112071833.1: *5* test_AsyncFor 

2230 def test_AsyncFor(self): 

2231 # This may require Python 3.7. 

2232 contents = """\ 

2233 async def commit(session, data): 

2234 async for z in session.transaction(): 

2235 await z(data) 

2236 else: 

2237 print('oops') 

2238 """ 

2239 self.make_data(contents) 

2240 #@+node:ekr.20200111175043.1: *5* test_AsyncFunctionDef 

2241 def test_AsyncFunctionDef(self): 

2242 contents = """\ 

2243 @my_decorator 

2244 async def count() -> 42: 

2245 print("One") 

2246 await asyncio.sleep(1) 

2247 """ 

2248 self.make_data(contents) 

2249 #@+node:ekr.20200112073151.1: *5* test_AsyncWith 

2250 def test_AsyncWith(self): 

2251 contents = """\ 

2252 async def commit(session, data): 

2253 async with session.transaction(): 

2254 await session.update(data) 

2255 """ 

2256 self.make_data(contents) 

2257 #@+node:ekr.20191227052446.44: *5* test_Call 

2258 def test_Call(self): 

2259 contents = """func(a, b, one='one', two=2, three=4+5, *args, **kwargs)""" 

2260 # contents = """func(*args, **kwargs)""" 

2261 # f1(a,b=2) 

2262 # f2(1 + 2) 

2263 # f3(arg, *args, **kwargs) 

2264 # f4(a='a', *args, **kwargs) 

2265 self.make_data(contents) 

2266 #@+node:ekr.20200206040732.1: *5* test_Delete 

2267 def test_Delete(self): 

2268 

2269 # Coverage test for spaces 

2270 contents = """del x""" 

2271 self.make_data(contents) 

2272 #@+node:ekr.20200111175335.1: *5* test_For 

2273 def test_For(self): 

2274 contents = r"""\ 

2275 for a in b: 

2276 pass 

2277 """ 

2278 self.make_data(contents) 

2279 #@+node:ekr.20191227052446.45: *5* test_Global 

2280 def test_Global(self): 

2281 # Line 1604, leoGlobals.py 

2282 contents = r""" 

2283 def spam(): 

2284 global gg 

2285 print('') 

2286 """ 

2287 self.make_data(contents) 

2288 #@+node:ekr.20200111200424.1: *5* test_ImportFrom 

2289 def test_ImportFrom(self): 

2290 contents = r"""from a import b as c""" 

2291 self.make_data(contents) 

2292 #@+node:ekr.20210318174705.1: *5* test_ImportFromStar 

2293 def test_ImportFromStar(self): 

2294 contents = r"""from sys import *""" 

2295 self.make_data(contents) 

2296 #@+node:ekr.20200206040424.1: *5* test_Lambda 

2297 def test_Lambda(self): 

2298 

2299 # Coverage test for spaces 

2300 contents = """f = lambda x: x""" 

2301 self.make_data(contents) 

2302 #@+node:ekr.20200111200640.1: *5* test_Nonlocal 

2303 def test_Nonlocal(self): 

2304 contents = r"""nonlocal name1, name2""" 

2305 self.make_data(contents) 

2306 #@+node:ekr.20220224120239.1: *5* test_Raise 

2307 def test_Raise(self): 

2308 contents = "raise ImportError from None" 

2309 self.make_data(contents) 

2310 #@+node:ekr.20191227052446.46: *5* test_Try 

2311 def test_Try(self): 

2312 contents = r"""\ 

2313 try: 

2314 print('a1') 

2315 print('a2') 

2316 except ImportError: 

2317 print('b1') 

2318 print('b2') 

2319 except SyntaxError: 

2320 print('c1') 

2321 print('c2') 

2322 finally: 

2323 print('d1') 

2324 print('d2') 

2325 """ 

2326 self.make_data(contents) 

2327 #@+node:ekr.20191227052446.47: *5* test_TryExceptElse 

2328 def test_Try2(self): 

2329 # Line 240: leoDebugger.py 

2330 contents = r"""\ 

2331 try: 

2332 print('a') 

2333 except ValueError: 

2334 print('b') 

2335 else: 

2336 print('c') 

2337 """ 

2338 self.make_data(contents) 

2339 #@+node:ekr.20200206041336.1: *5* test_While 

2340 def test_While(self): 

2341 contents = r"""\ 

2342 while f(): 

2343 print('continue') 

2344 else: 

2345 print('done') 

2346 """ 

2347 self.make_data(contents) 

2348 #@+node:ekr.20191227052446.48: *5* test_With 

2349 def test_With(self): 

2350 # leoGlobals.py, line 1785. 

2351 contents = r"""\ 

2352 with open(fn) as f: 

2353 pass 

2354 """ 

2355 self.make_data(contents) 

2356 #@+node:ekr.20200206041611.1: *5* test_Yield 

2357 def test_Yield(self): 

2358 contents = r"""\ 

2359 def gen_test(): 

2360 yield self.gen_token('newline', '\n') 

2361 """ 

2362 self.make_data(contents) 

2363 #@+node:ekr.20191227052446.49: *5* test_YieldFrom 

2364 def test_YieldFrom(self): 

2365 # Line 1046, leoAst.py 

2366 contents = r"""\ 

2367 def gen_test(): 

2368 self.node = tree 

2369 yield from self.gen_token('newline', '\n') 

2370 print('done') 

2371 """ 

2372 self.make_data(contents) 

2373 #@+node:ekr.20191228193740.1: *4* TestTOG.test_aa && zz 

2374 def test_aaa(self): 

2375 """The first test.""" 

2376 g.total_time = get_time() 

2377 

2378 def test_zzz(self): 

2379 """The last test.""" 

2380 t2 = get_time() 

2381 self.update_times('90: TOTAL', t2 - g.total_time) 

2382 # self.dump_stats() 

2383 #@-others 

2384#@+node:ekr.20200110093802.1: *3* class TestTokens (BaseTest) 

2385class TestTokens(BaseTest): 

2386 """Unit tests for tokenizing.""" 

2387 #@+others 

2388 #@+node:ekr.20200122165910.1: *4* TT.show_asttokens_script 

2389 def show_asttokens_script(self): # pragma: no cover 

2390 """ 

2391 A script showing how asttokens can *easily* do the following: 

2392 - Inject parent/child links into ast nodes. 

2393 - Inject many-to-many links between tokens and ast nodes. 

2394 """ 

2395 # pylint: disable=import-error,reimported 

2396 import ast 

2397 import asttokens 

2398 import token as token_module 

2399 stack: List[ast.AST] = [] 

2400 # Define TestToken class and helper functions. 

2401 #@+others 

2402 #@+node:ekr.20200122170101.3: *5* class TestToken 

2403 class TestToken: 

2404 """A patchable representation of the 5-tuples created by tokenize and used by asttokens.""" 

2405 

2406 def __init__(self, kind, value): 

2407 self.kind = kind 

2408 self.value = value 

2409 self.node_list: List[Any] = [] 

2410 

2411 def __str__(self): 

2412 tokens_s = ', '.join([z.__class__.__name__ for z in self.node_list]) 

2413 return f"{self.kind:12} {self.value:20} {tokens_s!s}" 

2414 

2415 __repr__ = __str__ 

2416 #@+node:ekr.20200122170101.1: *5* function: atok_name 

2417 def atok_name(token): 

2418 """Return a good looking name for the given 5-tuple""" 

2419 return token_module.tok_name[token[0]].lower() # type:ignore 

2420 #@+node:ekr.20200122170101.2: *5* function: atok_value 

2421 def atok_value(token): 

2422 """Print a good looking value for the given 5-tuple""" 

2423 return token.string if atok_name(token) == 'string' else repr(token.string) 

2424 #@+node:ekr.20200122170057.1: *5* function: dump_token 

2425 def dump_token(token): 

2426 node_list = list(set(getattr(token, 'node_set', []))) 

2427 node_list = sorted([z.__class__.__name__ for z in node_list]) 

2428 return f"{token.index:2} {atok_name(token):12} {atok_value(token):20} {node_list}" 

2429 #@+node:ekr.20200122170337.1: *5* function: postvisit 

2430 def postvisit(node, par_value, value): 

2431 nonlocal stack 

2432 stack.pop() 

2433 return par_value or [] 

2434 #@+node:ekr.20200122170101.4: *5* function: previsit 

2435 def previsit(node, par_value): 

2436 nonlocal stack 

2437 if isinstance(node, ast.Module): 

2438 stack = [] 

2439 if stack: 

2440 parent = stack[-1] 

2441 children: List[ast.AST] = getattr(parent, 'children', []) 

2442 parent.children = children + [node] # type:ignore 

2443 node.parent = parent 

2444 else: 

2445 node.parent = None 

2446 node.children = [] 

2447 stack.append(node) 

2448 return par_value, [] 

2449 #@-others 

2450 table = ( 

2451 # """print('%s in %5.2f sec' % ("done", 2.9))\n""", 

2452 """print(a[1:2:3])\n""", 

2453 ) 

2454 for source in table: 

2455 print(f"Source...\n\n{source}") 

2456 atok = asttokens.ASTTokens(source, parse=True) 

2457 # Create a patchable list of Token objects. 

2458 tokens = [TestToken(atok_name(z), atok_value(z)) for z in atok.tokens] 

2459 # Inject parent/child links into nodes. 

2460 asttokens.util.visit_tree(atok.tree, previsit, postvisit) 

2461 # Create token.token_list for each token. 

2462 for node in asttokens.util.walk(atok.tree): 

2463 # Inject node into token.node_list 

2464 for ast_token in atok.get_tokens(node, include_extra=True): 

2465 i = ast_token.index 

2466 token = tokens[i] 

2467 token.node_list.append(node) 

2468 # Print the resulting parent/child links. 

2469 for node in ast.walk(atok.tree): 

2470 if hasattr(node, 'first_token'): 

2471 parent = getattr(node, 'parent', None) 

2472 parent_s = parent.__class__.__name__ if parent else 'None' 

2473 children: List[ast.AST] = getattr(node, 'children', []) 

2474 if children: 

2475 children_s = ', '.join(z.__class__.__name__ for z in children) 

2476 else: 

2477 children_s = 'None' 

2478 print( 

2479 f"\n" 

2480 f" node: {node.__class__.__name__}\n" 

2481 f" parent: {parent_s}\n" 

2482 f"children: {children_s}") 

2483 # Print the resulting tokens. 

2484 g.printObj(tokens, tag='Tokens') 

2485 #@+node:ekr.20200121025938.1: *4* TT.show_example_dump 

2486 def show_example_dump(self): # pragma: no cover 

2487 

2488 # Will only be run when enabled explicitly. 

2489 

2490 contents = """\ 

2491 print('line 1') 

2492 print('line 2') 

2493 print('line 3') 

2494 """ 

2495 contents, tokens, tree = self.make_data(contents) 

2496 dump_contents(contents) 

2497 dump_tokens(tokens) 

2498 dump_tree(tokens, tree) 

2499 #@+node:ekr.20200110015014.6: *4* TT.test_bs_nl_tokens 

2500 def test_bs_nl_tokens(self): 

2501 # Test https://bugs.python.org/issue38663. 

2502 

2503 contents = """\ 

2504 print \ 

2505 ('abc') 

2506 """ 

2507 self.check_roundtrip(contents) 

2508 #@+node:ekr.20200110015014.8: *4* TT.test_continuation_1 

2509 def test_continuation_1(self): 

2510 

2511 contents = """\ 

2512 a = (3,4, 

2513 5,6) 

2514 y = [3, 4, 

2515 5] 

2516 z = {'a': 5, 

2517 'b':15, 'c':True} 

2518 x = len(y) + 5 - a[ 

2519 3] - a[2] + len(z) - z[ 

2520 'b'] 

2521 """ 

2522 self.check_roundtrip(contents) 

2523 #@+node:ekr.20200111085210.1: *4* TT.test_continuation_2 

2524 def test_continuation_2(self): 

2525 # Backslash means line continuation, except for comments 

2526 contents = ( 

2527 'x=1+\\\n 2' 

2528 '# This is a comment\\\n # This also' 

2529 ) 

2530 self.check_roundtrip(contents) 

2531 #@+node:ekr.20200111085211.1: *4* TT.test_continuation_3 

2532 def test_continuation_3(self): 

2533 

2534 contents = """\ 

2535 # Comment \\\n 

2536 x = 0 

2537 """ 

2538 self.check_roundtrip(contents) 

2539 #@+node:ekr.20200110015014.10: *4* TT.test_string_concatenation_1 

2540 def test_string_concatentation_1(self): 

2541 # Two *plain* string literals on the same line 

2542 self.check_roundtrip("""'abc' 'xyz'""") 

2543 #@+node:ekr.20200111081801.1: *4* TT.test_string_concatenation_2 

2544 def test_string_concatentation_2(self): 

2545 # f-string followed by plain string on the same line 

2546 self.check_roundtrip("""f'abc' 'xyz'""") 

2547 #@+node:ekr.20200111081832.1: *4* TT.test_string_concatenation_3 

2548 def test_string_concatentation_3(self): 

2549 # plain string followed by f-string on the same line 

2550 self.check_roundtrip("""'abc' f'xyz'""") 

2551 #@+node:ekr.20160521103254.1: *4* TT.test_visitors_exist 

2552 def test_visitors_exist(self): 

2553 """Ensure that visitors for all ast nodes exist.""" 

2554 import _ast 

2555 # Compute all fields to BaseTest. 

2556 aList = sorted(dir(_ast)) 

2557 remove = [ 

2558 'Interactive', 'Suite', # Not necessary. 

2559 'AST', # The base class, 

2560 # Constants... 

2561 'PyCF_ALLOW_TOP_LEVEL_AWAIT', 

2562 'PyCF_ONLY_AST', 

2563 'PyCF_TYPE_COMMENTS', 

2564 # New ast nodes for Python 3.8. 

2565 # We can ignore these nodes because: 

2566 # 1. ast.parse does not generate them by default. 

2567 # 2. The type comments are ordinary comments. 

2568 # They do not need to be specially synced. 

2569 # 3. Tools such as black, orange, and fstringify will 

2570 # only ever handle comments as comments. 

2571 'FunctionType', 'NamedExpr', 'TypeIgnore', 

2572 ] 

2573 aList = [z for z in aList if not z[0].islower()] 

2574 # Remove base classes. 

2575 aList = [z for z in aList 

2576 if not z.startswith('_') and not z in remove] 

2577 # Now test them. 

2578 table = ( 

2579 TokenOrderGenerator, 

2580 ) 

2581 for class_ in table: 

2582 traverser = class_() 

2583 errors, nodes, ops = 0, 0, 0 

2584 for z in aList: 

2585 if hasattr(traverser, 'do_' + z): 

2586 nodes += 1 

2587 elif _op_names.get(z): 

2588 ops += 1 

2589 else: # pragma: no cover 

2590 errors += 1 

2591 print( 

2592 f"Missing visitor: " 

2593 f"{traverser.__class__.__name__}.{z}") 

2594 msg = f"{nodes} node types, {ops} op types, {errors} errors" 

2595 assert not errors, msg 

2596 #@-others 

2597#@+node:ekr.20200107144010.1: *3* class TestTopLevelFunctions (BaseTest) 

2598class TestTopLevelFunctions(BaseTest): 

2599 """Tests for the top-level functions in leoAst.py.""" 

2600 #@+others 

2601 #@+node:ekr.20200107144227.1: *4* test_get_encoding_directive 

2602 def test_get_encoding_directive(self): 

2603 

2604 filename = __file__ 

2605 assert os.path.exists(filename), repr(filename) 

2606 with open(filename, 'rb') as f: 

2607 bb = f.read() 

2608 e = get_encoding_directive(bb) 

2609 self.assertEqual(e.lower(), 'utf-8') 

2610 #@+node:ekr.20200107150857.1: *4* test_strip_BOM 

2611 def test_strip_BOM(self): 

2612 

2613 filename = __file__ 

2614 assert os.path.exists(filename), repr(filename) 

2615 with open(filename, 'rb') as f: 

2616 bb = f.read() 

2617 assert bb, filename 

2618 e, s = strip_BOM(bb) 

2619 assert e is None or e.lower() == 'utf-8', repr(e) 

2620 #@-others 

2621#@+node:ekr.20191227152538.1: *3* class TestTOT (BaseTest) 

2622class TestTOT(BaseTest): 

2623 """Tests for the TokenOrderTraverser class.""" 

2624 #@+others 

2625 #@+node:ekr.20200111115318.1: *4* test_tot.test_traverse 

2626 def test_traverse(self): 

2627 

2628 contents = """\ 

2629 f(1) 

2630 b = 2 + 3 

2631 """ 

2632 # print('%s = %s' % (2+3, 4*5)) 

2633 if 1: 

2634 contents, tokens, tree = self.make_file_data('leoApp.py') 

2635 else: 

2636 contents, tokens, tree = self.make_data(contents) 

2637 tot = TokenOrderTraverser() 

2638 t1 = get_time() 

2639 n_nodes = tot.traverse(tree) 

2640 t2 = get_time() 

2641 self.update_counts('nodes', n_nodes) 

2642 self.update_times('50: TOT.traverse', t2 - t1) 

2643 # self.dump_stats() 

2644 #@-others 

2645#@-others 

2646#@-leo