Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1""" 

2A collection of utility functions and classes. Originally, many 

3(but not all) were from the Python Cookbook -- hence the name cbook. 

4 

5This module is safe to import from anywhere within matplotlib; 

6it imports matplotlib only at runtime. 

7""" 

8 

9import collections 

10import collections.abc 

11import contextlib 

12import functools 

13import glob 

14import gzip 

15import itertools 

16import locale 

17import numbers 

18import operator 

19import os 

20from pathlib import Path 

21import re 

22import shlex 

23import subprocess 

24import sys 

25import time 

26import traceback 

27import types 

28import warnings 

29import weakref 

30from weakref import WeakMethod 

31 

32import numpy as np 

33 

34import matplotlib 

35from .deprecation import ( 

36 deprecated, warn_deprecated, 

37 _rename_parameter, _delete_parameter, _make_keyword_only, 

38 _suppress_matplotlib_deprecation_warning, 

39 MatplotlibDeprecationWarning, mplDeprecation) 

40 

41 

42def _exception_printer(exc): 

43 traceback.print_exc() 

44 

45 

46class _StrongRef: 

47 """ 

48 Wrapper similar to a weakref, but keeping a strong reference to the object. 

49 """ 

50 

51 def __init__(self, obj): 

52 self._obj = obj 

53 

54 def __call__(self): 

55 return self._obj 

56 

57 def __eq__(self, other): 

58 return isinstance(other, _StrongRef) and self._obj == other._obj 

59 

60 def __hash__(self): 

61 return hash(self._obj) 

62 

63 

64class CallbackRegistry: 

65 """Handle registering and disconnecting for a set of signals and callbacks: 

66 

67 >>> def oneat(x): 

68 ... print('eat', x) 

69 >>> def ondrink(x): 

70 ... print('drink', x) 

71 

72 >>> from matplotlib.cbook import CallbackRegistry 

73 >>> callbacks = CallbackRegistry() 

74 

75 >>> id_eat = callbacks.connect('eat', oneat) 

76 >>> id_drink = callbacks.connect('drink', ondrink) 

77 

78 >>> callbacks.process('drink', 123) 

79 drink 123 

80 >>> callbacks.process('eat', 456) 

81 eat 456 

82 >>> callbacks.process('be merry', 456) # nothing will be called 

83 >>> callbacks.disconnect(id_eat) 

84 >>> callbacks.process('eat', 456) # nothing will be called 

85 

86 In practice, one should always disconnect all callbacks when they are 

87 no longer needed to avoid dangling references (and thus memory leaks). 

88 However, real code in Matplotlib rarely does so, and due to its design, 

89 it is rather difficult to place this kind of code. To get around this, 

90 and prevent this class of memory leaks, we instead store weak references 

91 to bound methods only, so when the destination object needs to die, the 

92 CallbackRegistry won't keep it alive. 

93 

94 Parameters 

95 ---------- 

96 exception_handler : callable, optional 

97 If provided must have signature :: 

98 

99 def handler(exc: Exception) -> None: 

100 

101 If not None this function will be called with any `Exception` 

102 subclass raised by the callbacks in `CallbackRegistry.process`. 

103 The handler may either consume the exception or re-raise. 

104 

105 The callable must be pickle-able. 

106 

107 The default handler is :: 

108 

109 def h(exc): 

110 traceback.print_exc() 

111 """ 

112 

113 # We maintain two mappings: 

114 # callbacks: signal -> {cid -> callback} 

115 # _func_cid_map: signal -> {callback -> cid} 

116 # (actually, callbacks are weakrefs to the actual callbacks). 

117 

118 def __init__(self, exception_handler=_exception_printer): 

119 self.exception_handler = exception_handler 

120 self.callbacks = {} 

121 self._cid_gen = itertools.count() 

122 self._func_cid_map = {} 

123 

124 # In general, callbacks may not be pickled; thus, we simply recreate an 

125 # empty dictionary at unpickling. In order to ensure that `__setstate__` 

126 # (which just defers to `__init__`) is called, `__getstate__` must 

127 # return a truthy value (for pickle protocol>=3, i.e. Py3, the 

128 # *actual* behavior is that `__setstate__` will be called as long as 

129 # `__getstate__` does not return `None`, but this is undocumented -- see 

130 # http://bugs.python.org/issue12290). 

131 

132 def __getstate__(self): 

133 return {'exception_handler': self.exception_handler} 

134 

135 def __setstate__(self, state): 

136 self.__init__(**state) 

137 

138 def connect(self, s, func): 

139 """Register *func* to be called when signal *s* is generated. 

140 """ 

141 self._func_cid_map.setdefault(s, {}) 

142 try: 

143 proxy = WeakMethod(func, self._remove_proxy) 

144 except TypeError: 

145 proxy = _StrongRef(func) 

146 if proxy in self._func_cid_map[s]: 

147 return self._func_cid_map[s][proxy] 

148 

149 cid = next(self._cid_gen) 

150 self._func_cid_map[s][proxy] = cid 

151 self.callbacks.setdefault(s, {}) 

152 self.callbacks[s][cid] = proxy 

153 return cid 

154 

155 # Keep a reference to sys.is_finalizing, as sys may have been cleared out 

156 # at that point. 

157 def _remove_proxy(self, proxy, *, _is_finalizing=sys.is_finalizing): 

158 if _is_finalizing(): 

159 # Weakrefs can't be properly torn down at that point anymore. 

160 return 

161 for signal, proxies in list(self._func_cid_map.items()): 

162 try: 

163 del self.callbacks[signal][proxies[proxy]] 

164 except KeyError: 

165 pass 

166 if len(self.callbacks[signal]) == 0: 

167 del self.callbacks[signal] 

168 del self._func_cid_map[signal] 

169 

170 def disconnect(self, cid): 

171 """Disconnect the callback registered with callback id *cid*. 

172 """ 

173 for eventname, callbackd in list(self.callbacks.items()): 

174 try: 

175 del callbackd[cid] 

176 except KeyError: 

177 continue 

178 else: 

179 for signal, functions in list(self._func_cid_map.items()): 

180 for function, value in list(functions.items()): 

181 if value == cid: 

182 del functions[function] 

183 return 

184 

185 def process(self, s, *args, **kwargs): 

186 """ 

187 Process signal *s*. 

188 

189 All of the functions registered to receive callbacks on *s* will be 

190 called with ``*args`` and ``**kwargs``. 

191 """ 

192 for cid, ref in list(self.callbacks.get(s, {}).items()): 

193 func = ref() 

194 if func is not None: 

195 try: 

196 func(*args, **kwargs) 

197 # this does not capture KeyboardInterrupt, SystemExit, 

198 # and GeneratorExit 

199 except Exception as exc: 

200 if self.exception_handler is not None: 

201 self.exception_handler(exc) 

202 else: 

203 raise 

204 

205 

206class silent_list(list): 

207 """ 

208 A list with a short ``repr()``. 

209 

210 This is meant to be used for a homogeneous list of artists, so that they 

211 don't cause long, meaningless output. 

212 

213 Instead of :: 

214 

215 [<matplotlib.lines.Line2D object at 0x7f5749fed3c8>, 

216 <matplotlib.lines.Line2D object at 0x7f5749fed4e0>, 

217 <matplotlib.lines.Line2D object at 0x7f5758016550>] 

218 

219 one will get :: 

220 

221 <a list of 3 Line2D objects> 

222 """ 

223 def __init__(self, type, seq=None): 

224 self.type = type 

225 if seq is not None: 

226 self.extend(seq) 

227 

228 def __repr__(self): 

229 return '<a list of %d %s objects>' % (len(self), self.type) 

230 

231 __str__ = __repr__ 

232 

233 def __getstate__(self): 

234 # store a dictionary of this SilentList's state 

235 return {'type': self.type, 'seq': self[:]} 

236 

237 def __setstate__(self, state): 

238 self.type = state['type'] 

239 self.extend(state['seq']) 

240 

241 

242class IgnoredKeywordWarning(UserWarning): 

243 """ 

244 A class for issuing warnings about keyword arguments that will be ignored 

245 by Matplotlib. 

246 """ 

247 pass 

248 

249 

250def local_over_kwdict(local_var, kwargs, *keys): 

251 """ 

252 Enforces the priority of a local variable over potentially conflicting 

253 argument(s) from a kwargs dict. The following possible output values are 

254 considered in order of priority:: 

255 

256 local_var > kwargs[keys[0]] > ... > kwargs[keys[-1]] 

257 

258 The first of these whose value is not None will be returned. If all are 

259 None then None will be returned. Each key in keys will be removed from the 

260 kwargs dict in place. 

261 

262 Parameters 

263 ---------- 

264 local_var : any object 

265 The local variable (highest priority). 

266 

267 kwargs : dict 

268 Dictionary of keyword arguments; modified in place. 

269 

270 keys : str(s) 

271 Name(s) of keyword arguments to process, in descending order of 

272 priority. 

273 

274 Returns 

275 ------- 

276 out : any object 

277 Either local_var or one of kwargs[key] for key in keys. 

278 

279 Raises 

280 ------ 

281 IgnoredKeywordWarning 

282 For each key in keys that is removed from kwargs but not used as 

283 the output value. 

284 

285 """ 

286 out = local_var 

287 for key in keys: 

288 kwarg_val = kwargs.pop(key, None) 

289 if kwarg_val is not None: 

290 if out is None: 

291 out = kwarg_val 

292 else: 

293 _warn_external('"%s" keyword argument will be ignored' % key, 

294 IgnoredKeywordWarning) 

295 return out 

296 

297 

298def strip_math(s): 

299 """ 

300 Remove latex formatting from mathtext. 

301 

302 Only handles fully math and fully non-math strings. 

303 """ 

304 if len(s) >= 2 and s[0] == s[-1] == "$": 

305 s = s[1:-1] 

306 for tex, plain in [ 

307 (r"\times", "x"), # Specifically for Formatter support. 

308 (r"\mathdefault", ""), 

309 (r"\rm", ""), 

310 (r"\cal", ""), 

311 (r"\tt", ""), 

312 (r"\it", ""), 

313 ("\\", ""), 

314 ("{", ""), 

315 ("}", ""), 

316 ]: 

317 s = s.replace(tex, plain) 

318 return s 

319 

320 

321@deprecated('3.1', alternative='np.iterable') 

322def iterable(obj): 

323 """return true if *obj* is iterable""" 

324 try: 

325 iter(obj) 

326 except TypeError: 

327 return False 

328 return True 

329 

330 

331@deprecated("3.1", alternative="isinstance(..., collections.abc.Hashable)") 

332def is_hashable(obj): 

333 """Returns true if *obj* can be hashed""" 

334 try: 

335 hash(obj) 

336 except TypeError: 

337 return False 

338 return True 

339 

340 

341def is_writable_file_like(obj): 

342 """Return whether *obj* looks like a file object with a *write* method.""" 

343 return callable(getattr(obj, 'write', None)) 

344 

345 

346def file_requires_unicode(x): 

347 """ 

348 Return whether the given writable file-like object requires Unicode to be 

349 written to it. 

350 """ 

351 try: 

352 x.write(b'') 

353 except TypeError: 

354 return True 

355 else: 

356 return False 

357 

358 

359def to_filehandle(fname, flag='r', return_opened=False, encoding=None): 

360 """ 

361 Convert a path to an open file handle or pass-through a file-like object. 

362 

363 Consider using `open_file_cm` instead, as it allows one to properly close 

364 newly created file objects more easily. 

365 

366 Parameters 

367 ---------- 

368 fname : str or path-like or file-like object 

369 If `str` or `os.PathLike`, the file is opened using the flags specified 

370 by *flag* and *encoding*. If a file-like object, it is passed through. 

371 flag : str, default 'r' 

372 Passed as the *mode* argument to `open` when *fname* is `str` or 

373 `os.PathLike`; ignored if *fname* is file-like. 

374 return_opened : bool, default False 

375 If True, return both the file object and a boolean indicating whether 

376 this was a new file (that the caller needs to close). If False, return 

377 only the new file. 

378 encoding : str or None, default None 

379 Passed as the *mode* argument to `open` when *fname* is `str` or 

380 `os.PathLike`; ignored if *fname* is file-like. 

381 

382 Returns 

383 ------- 

384 fh : file-like 

385 opened : bool 

386 *opened* is only returned if *return_opened* is True. 

387 """ 

388 if isinstance(fname, os.PathLike): 

389 fname = os.fspath(fname) 

390 if isinstance(fname, str): 

391 if fname.endswith('.gz'): 

392 # get rid of 'U' in flag for gzipped files. 

393 flag = flag.replace('U', '') 

394 fh = gzip.open(fname, flag) 

395 elif fname.endswith('.bz2'): 

396 # python may not be complied with bz2 support, 

397 # bury import until we need it 

398 import bz2 

399 # get rid of 'U' in flag for bz2 files 

400 flag = flag.replace('U', '') 

401 fh = bz2.BZ2File(fname, flag) 

402 else: 

403 fh = open(fname, flag, encoding=encoding) 

404 opened = True 

405 elif hasattr(fname, 'seek'): 

406 fh = fname 

407 opened = False 

408 else: 

409 raise ValueError('fname must be a PathLike or file handle') 

410 if return_opened: 

411 return fh, opened 

412 return fh 

413 

414 

415@contextlib.contextmanager 

416def open_file_cm(path_or_file, mode="r", encoding=None): 

417 r"""Pass through file objects and context-manage `.PathLike`\s.""" 

418 fh, opened = to_filehandle(path_or_file, mode, True, encoding) 

419 if opened: 

420 with fh: 

421 yield fh 

422 else: 

423 yield fh 

424 

425 

426def is_scalar_or_string(val): 

427 """Return whether the given object is a scalar or string like.""" 

428 return isinstance(val, str) or not np.iterable(val) 

429 

430 

431def get_sample_data(fname, asfileobj=True): 

432 """ 

433 Return a sample data file. *fname* is a path relative to the 

434 `mpl-data/sample_data` directory. If *asfileobj* is `True` 

435 return a file object, otherwise just a file path. 

436 

437 Sample data files are stored in the 'mpl-data/sample_data' directory within 

438 the Matplotlib package. 

439 

440 If the filename ends in .gz, the file is implicitly ungzipped. 

441 """ 

442 path = Path(matplotlib.get_data_path(), 'sample_data', fname) 

443 if asfileobj: 

444 suffix = path.suffix.lower() 

445 if suffix == '.gz': 

446 return gzip.open(path) 

447 elif suffix in ['.csv', '.xrc', '.txt']: 

448 return path.open('r') 

449 else: 

450 return path.open('rb') 

451 else: 

452 return str(path) 

453 

454 

455def _get_data_path(*args): 

456 """ 

457 Return the `Path` to a resource file provided by Matplotlib. 

458 

459 ``*args`` specify a path relative to the base data path. 

460 """ 

461 return Path(matplotlib.get_data_path(), *args) 

462 

463 

464def flatten(seq, scalarp=is_scalar_or_string): 

465 """ 

466 Return a generator of flattened nested containers. 

467 

468 For example: 

469 

470 >>> from matplotlib.cbook import flatten 

471 >>> l = (('John', ['Hunter']), (1, 23), [[([42, (5, 23)], )]]) 

472 >>> print(list(flatten(l))) 

473 ['John', 'Hunter', 1, 23, 42, 5, 23] 

474 

475 By: Composite of Holger Krekel and Luther Blissett 

476 From: https://code.activestate.com/recipes/121294/ 

477 and Recipe 1.12 in cookbook 

478 """ 

479 for item in seq: 

480 if scalarp(item) or item is None: 

481 yield item 

482 else: 

483 yield from flatten(item, scalarp) 

484 

485 

486@functools.lru_cache() 

487def get_realpath_and_stat(path): 

488 realpath = os.path.realpath(path) 

489 stat = os.stat(realpath) 

490 stat_key = (stat.st_ino, stat.st_dev) 

491 return realpath, stat_key 

492 

493 

494# A regular expression used to determine the amount of space to 

495# remove. It looks for the first sequence of spaces immediately 

496# following the first newline, or at the beginning of the string. 

497_find_dedent_regex = re.compile(r"(?:(?:\n\r?)|^)( *)\S") 

498# A cache to hold the regexs that actually remove the indent. 

499_dedent_regex = {} 

500 

501 

502@deprecated("3.1", alternative="inspect.cleandoc") 

503def dedent(s): 

504 """ 

505 Remove excess indentation from docstring *s*. 

506 

507 Discards any leading blank lines, then removes up to n whitespace 

508 characters from each line, where n is the number of leading 

509 whitespace characters in the first line. It differs from 

510 textwrap.dedent in its deletion of leading blank lines and its use 

511 of the first non-blank line to determine the indentation. 

512 

513 It is also faster in most cases. 

514 """ 

515 # This implementation has a somewhat obtuse use of regular 

516 # expressions. However, this function accounted for almost 30% of 

517 # matplotlib startup time, so it is worthy of optimization at all 

518 # costs. 

519 

520 if not s: # includes case of s is None 

521 return '' 

522 

523 match = _find_dedent_regex.match(s) 

524 if match is None: 

525 return s 

526 

527 # This is the number of spaces to remove from the left-hand side. 

528 nshift = match.end(1) - match.start(1) 

529 if nshift == 0: 

530 return s 

531 

532 # Get a regex that will remove *up to* nshift spaces from the 

533 # beginning of each line. If it isn't in the cache, generate it. 

534 unindent = _dedent_regex.get(nshift, None) 

535 if unindent is None: 

536 unindent = re.compile("\n\r? {0,%d}" % nshift) 

537 _dedent_regex[nshift] = unindent 

538 

539 result = unindent.sub("\n", s).strip() 

540 return result 

541 

542 

543class maxdict(dict): 

544 """ 

545 A dictionary with a maximum size. 

546 

547 Notes 

548 ----- 

549 This doesn't override all the relevant methods to constrain the size, 

550 just ``__setitem__``, so use with caution. 

551 """ 

552 def __init__(self, maxsize): 

553 dict.__init__(self) 

554 self.maxsize = maxsize 

555 self._killkeys = [] 

556 

557 def __setitem__(self, k, v): 

558 if k not in self: 

559 if len(self) >= self.maxsize: 

560 del self[self._killkeys[0]] 

561 del self._killkeys[0] 

562 self._killkeys.append(k) 

563 dict.__setitem__(self, k, v) 

564 

565 

566class Stack: 

567 """ 

568 Stack of elements with a movable cursor. 

569 

570 Mimics home/back/forward in a web browser. 

571 """ 

572 

573 def __init__(self, default=None): 

574 self.clear() 

575 self._default = default 

576 

577 def __call__(self): 

578 """Return the current element, or None.""" 

579 if not len(self._elements): 

580 return self._default 

581 else: 

582 return self._elements[self._pos] 

583 

584 def __len__(self): 

585 return len(self._elements) 

586 

587 def __getitem__(self, ind): 

588 return self._elements[ind] 

589 

590 def forward(self): 

591 """Move the position forward and return the current element.""" 

592 self._pos = min(self._pos + 1, len(self._elements) - 1) 

593 return self() 

594 

595 def back(self): 

596 """Move the position back and return the current element.""" 

597 if self._pos > 0: 

598 self._pos -= 1 

599 return self() 

600 

601 def push(self, o): 

602 """ 

603 Push *o* to the stack at current position. Discard all later elements. 

604 

605 *o* is returned. 

606 """ 

607 self._elements = self._elements[:self._pos + 1] + [o] 

608 self._pos = len(self._elements) - 1 

609 return self() 

610 

611 def home(self): 

612 """ 

613 Push the first element onto the top of the stack. 

614 

615 The first element is returned. 

616 """ 

617 if not len(self._elements): 

618 return 

619 self.push(self._elements[0]) 

620 return self() 

621 

622 def empty(self): 

623 """Return whether the stack is empty.""" 

624 return len(self._elements) == 0 

625 

626 def clear(self): 

627 """Empty the stack.""" 

628 self._pos = -1 

629 self._elements = [] 

630 

631 def bubble(self, o): 

632 """ 

633 Raise *o* to the top of the stack. *o* must be present in the stack. 

634 

635 *o* is returned. 

636 """ 

637 if o not in self._elements: 

638 raise ValueError('Unknown element o') 

639 old = self._elements[:] 

640 self.clear() 

641 bubbles = [] 

642 for thiso in old: 

643 if thiso == o: 

644 bubbles.append(thiso) 

645 else: 

646 self.push(thiso) 

647 for _ in bubbles: 

648 self.push(o) 

649 return o 

650 

651 def remove(self, o): 

652 """Remove *o* from the stack.""" 

653 if o not in self._elements: 

654 raise ValueError('Unknown element o') 

655 old = self._elements[:] 

656 self.clear() 

657 for thiso in old: 

658 if thiso != o: 

659 self.push(thiso) 

660 

661 

662def report_memory(i=0): # argument may go away 

663 """Return the memory consumed by the process.""" 

664 def call(command, os_name): 

665 try: 

666 return subprocess.check_output(command) 

667 except subprocess.CalledProcessError: 

668 raise NotImplementedError( 

669 "report_memory works on %s only if " 

670 "the '%s' program is found" % (os_name, command[0]) 

671 ) 

672 

673 pid = os.getpid() 

674 if sys.platform == 'sunos5': 

675 lines = call(['ps', '-p', '%d' % pid, '-o', 'osz'], 'Sun OS') 

676 mem = int(lines[-1].strip()) 

677 elif sys.platform == 'linux': 

678 lines = call(['ps', '-p', '%d' % pid, '-o', 'rss,sz'], 'Linux') 

679 mem = int(lines[1].split()[1]) 

680 elif sys.platform == 'darwin': 

681 lines = call(['ps', '-p', '%d' % pid, '-o', 'rss,vsz'], 'Mac OS') 

682 mem = int(lines[1].split()[0]) 

683 elif sys.platform == 'win32': 

684 lines = call(["tasklist", "/nh", "/fi", "pid eq %d" % pid], 'Windows') 

685 mem = int(lines.strip().split()[-2].replace(',', '')) 

686 else: 

687 raise NotImplementedError( 

688 "We don't have a memory monitor for %s" % sys.platform) 

689 return mem 

690 

691 

692_safezip_msg = 'In safezip, len(args[0])=%d but len(args[%d])=%d' 

693 

694 

695@deprecated("3.1") 

696def safezip(*args): 

697 """make sure *args* are equal len before zipping""" 

698 Nx = len(args[0]) 

699 for i, arg in enumerate(args[1:]): 

700 if len(arg) != Nx: 

701 raise ValueError(_safezip_msg % (Nx, i + 1, len(arg))) 

702 return list(zip(*args)) 

703 

704 

705def safe_masked_invalid(x, copy=False): 

706 x = np.array(x, subok=True, copy=copy) 

707 if not x.dtype.isnative: 

708 # Note that the argument to `byteswap` is 'inplace', 

709 # thus if we have already made a copy, do the byteswap in 

710 # place, else make a copy with the byte order swapped. 

711 # Be explicit that we are swapping the byte order of the dtype 

712 x = x.byteswap(copy).newbyteorder('S') 

713 

714 try: 

715 xm = np.ma.masked_invalid(x, copy=False) 

716 xm.shrink_mask() 

717 except TypeError: 

718 return x 

719 return xm 

720 

721 

722def print_cycles(objects, outstream=sys.stdout, show_progress=False): 

723 """ 

724 Print loops of cyclic references in the given *objects*. 

725 

726 It is often useful to pass in ``gc.garbage`` to find the cycles that are 

727 preventing some objects from being garbage collected. 

728 

729 Parameters 

730 ---------- 

731 objects 

732 A list of objects to find cycles in. 

733 outstream 

734 The stream for output. 

735 show_progress : bool 

736 If True, print the number of objects reached as they are found. 

737 """ 

738 import gc 

739 

740 def print_path(path): 

741 for i, step in enumerate(path): 

742 # next "wraps around" 

743 next = path[(i + 1) % len(path)] 

744 

745 outstream.write(" %s -- " % type(step)) 

746 if isinstance(step, dict): 

747 for key, val in step.items(): 

748 if val is next: 

749 outstream.write("[{!r}]".format(key)) 

750 break 

751 if key is next: 

752 outstream.write("[key] = {!r}".format(val)) 

753 break 

754 elif isinstance(step, list): 

755 outstream.write("[%d]" % step.index(next)) 

756 elif isinstance(step, tuple): 

757 outstream.write("( tuple )") 

758 else: 

759 outstream.write(repr(step)) 

760 outstream.write(" ->\n") 

761 outstream.write("\n") 

762 

763 def recurse(obj, start, all, current_path): 

764 if show_progress: 

765 outstream.write("%d\r" % len(all)) 

766 

767 all[id(obj)] = None 

768 

769 referents = gc.get_referents(obj) 

770 for referent in referents: 

771 # If we've found our way back to the start, this is 

772 # a cycle, so print it out 

773 if referent is start: 

774 print_path(current_path) 

775 

776 # Don't go back through the original list of objects, or 

777 # through temporary references to the object, since those 

778 # are just an artifact of the cycle detector itself. 

779 elif referent is objects or isinstance(referent, types.FrameType): 

780 continue 

781 

782 # We haven't seen this object before, so recurse 

783 elif id(referent) not in all: 

784 recurse(referent, start, all, current_path + [obj]) 

785 

786 for obj in objects: 

787 outstream.write(f"Examining: {obj!r}\n") 

788 recurse(obj, obj, {}, []) 

789 

790 

791class Grouper: 

792 """ 

793 This class provides a lightweight way to group arbitrary objects 

794 together into disjoint sets when a full-blown graph data structure 

795 would be overkill. 

796 

797 Objects can be joined using :meth:`join`, tested for connectedness 

798 using :meth:`joined`, and all disjoint sets can be retrieved by 

799 using the object as an iterator. 

800 

801 The objects being joined must be hashable and weak-referenceable. 

802 

803 For example: 

804 

805 >>> from matplotlib.cbook import Grouper 

806 >>> class Foo: 

807 ... def __init__(self, s): 

808 ... self.s = s 

809 ... def __repr__(self): 

810 ... return self.s 

811 ... 

812 >>> a, b, c, d, e, f = [Foo(x) for x in 'abcdef'] 

813 >>> grp = Grouper() 

814 >>> grp.join(a, b) 

815 >>> grp.join(b, c) 

816 >>> grp.join(d, e) 

817 >>> sorted(map(tuple, grp)) 

818 [(a, b, c), (d, e)] 

819 >>> grp.joined(a, b) 

820 True 

821 >>> grp.joined(a, c) 

822 True 

823 >>> grp.joined(a, d) 

824 False 

825 

826 """ 

827 def __init__(self, init=()): 

828 self._mapping = {weakref.ref(x): [weakref.ref(x)] for x in init} 

829 

830 def __contains__(self, item): 

831 return weakref.ref(item) in self._mapping 

832 

833 def clean(self): 

834 """Clean dead weak references from the dictionary.""" 

835 mapping = self._mapping 

836 to_drop = [key for key in mapping if key() is None] 

837 for key in to_drop: 

838 val = mapping.pop(key) 

839 val.remove(key) 

840 

841 def join(self, a, *args): 

842 """ 

843 Join given arguments into the same set. Accepts one or more arguments. 

844 """ 

845 mapping = self._mapping 

846 set_a = mapping.setdefault(weakref.ref(a), [weakref.ref(a)]) 

847 

848 for arg in args: 

849 set_b = mapping.get(weakref.ref(arg), [weakref.ref(arg)]) 

850 if set_b is not set_a: 

851 if len(set_b) > len(set_a): 

852 set_a, set_b = set_b, set_a 

853 set_a.extend(set_b) 

854 for elem in set_b: 

855 mapping[elem] = set_a 

856 

857 self.clean() 

858 

859 def joined(self, a, b): 

860 """Return whether *a* and *b* are members of the same set.""" 

861 self.clean() 

862 return (self._mapping.get(weakref.ref(a), object()) 

863 is self._mapping.get(weakref.ref(b))) 

864 

865 def remove(self, a): 

866 self.clean() 

867 set_a = self._mapping.pop(weakref.ref(a), None) 

868 if set_a: 

869 set_a.remove(weakref.ref(a)) 

870 

871 def __iter__(self): 

872 """ 

873 Iterate over each of the disjoint sets as a list. 

874 

875 The iterator is invalid if interleaved with calls to join(). 

876 """ 

877 self.clean() 

878 unique_groups = {id(group): group for group in self._mapping.values()} 

879 for group in unique_groups.values(): 

880 yield [x() for x in group] 

881 

882 def get_siblings(self, a): 

883 """Return all of the items joined with *a*, including itself.""" 

884 self.clean() 

885 siblings = self._mapping.get(weakref.ref(a), [weakref.ref(a)]) 

886 return [x() for x in siblings] 

887 

888 

889def simple_linear_interpolation(a, steps): 

890 """ 

891 Resample an array with ``steps - 1`` points between original point pairs. 

892 

893 Along each column of *a*, ``(steps - 1)`` points are introduced between 

894 each original values; the values are linearly interpolated. 

895 

896 Parameters 

897 ---------- 

898 a : array, shape (n, ...) 

899 steps : int 

900 

901 Returns 

902 ------- 

903 array 

904 shape ``((n - 1) * steps + 1, ...)`` 

905 """ 

906 fps = a.reshape((len(a), -1)) 

907 xp = np.arange(len(a)) * steps 

908 x = np.arange((len(a) - 1) * steps + 1) 

909 return (np.column_stack([np.interp(x, xp, fp) for fp in fps.T]) 

910 .reshape((len(x),) + a.shape[1:])) 

911 

912 

913def delete_masked_points(*args): 

914 """ 

915 Find all masked and/or non-finite points in a set of arguments, 

916 and return the arguments with only the unmasked points remaining. 

917 

918 Arguments can be in any of 5 categories: 

919 

920 1) 1-D masked arrays 

921 2) 1-D ndarrays 

922 3) ndarrays with more than one dimension 

923 4) other non-string iterables 

924 5) anything else 

925 

926 The first argument must be in one of the first four categories; 

927 any argument with a length differing from that of the first 

928 argument (and hence anything in category 5) then will be 

929 passed through unchanged. 

930 

931 Masks are obtained from all arguments of the correct length 

932 in categories 1, 2, and 4; a point is bad if masked in a masked 

933 array or if it is a nan or inf. No attempt is made to 

934 extract a mask from categories 2, 3, and 4 if :meth:`np.isfinite` 

935 does not yield a Boolean array. 

936 

937 All input arguments that are not passed unchanged are returned 

938 as ndarrays after removing the points or rows corresponding to 

939 masks in any of the arguments. 

940 

941 A vastly simpler version of this function was originally 

942 written as a helper for Axes.scatter(). 

943 

944 """ 

945 if not len(args): 

946 return () 

947 if is_scalar_or_string(args[0]): 

948 raise ValueError("First argument must be a sequence") 

949 nrecs = len(args[0]) 

950 margs = [] 

951 seqlist = [False] * len(args) 

952 for i, x in enumerate(args): 

953 if not isinstance(x, str) and np.iterable(x) and len(x) == nrecs: 

954 seqlist[i] = True 

955 if isinstance(x, np.ma.MaskedArray): 

956 if x.ndim > 1: 

957 raise ValueError("Masked arrays must be 1-D") 

958 else: 

959 x = np.asarray(x) 

960 margs.append(x) 

961 masks = [] # list of masks that are True where good 

962 for i, x in enumerate(margs): 

963 if seqlist[i]: 

964 if x.ndim > 1: 

965 continue # Don't try to get nan locations unless 1-D. 

966 if isinstance(x, np.ma.MaskedArray): 

967 masks.append(~np.ma.getmaskarray(x)) # invert the mask 

968 xd = x.data 

969 else: 

970 xd = x 

971 try: 

972 mask = np.isfinite(xd) 

973 if isinstance(mask, np.ndarray): 

974 masks.append(mask) 

975 except Exception: # Fixme: put in tuple of possible exceptions? 

976 pass 

977 if len(masks): 

978 mask = np.logical_and.reduce(masks) 

979 igood = mask.nonzero()[0] 

980 if len(igood) < nrecs: 

981 for i, x in enumerate(margs): 

982 if seqlist[i]: 

983 margs[i] = x[igood] 

984 for i, x in enumerate(margs): 

985 if seqlist[i] and isinstance(x, np.ma.MaskedArray): 

986 margs[i] = x.filled() 

987 return margs 

988 

989 

990def _combine_masks(*args): 

991 """ 

992 Find all masked and/or non-finite points in a set of arguments, 

993 and return the arguments as masked arrays with a common mask. 

994 

995 Arguments can be in any of 5 categories: 

996 

997 1) 1-D masked arrays 

998 2) 1-D ndarrays 

999 3) ndarrays with more than one dimension 

1000 4) other non-string iterables 

1001 5) anything else 

1002 

1003 The first argument must be in one of the first four categories; 

1004 any argument with a length differing from that of the first 

1005 argument (and hence anything in category 5) then will be 

1006 passed through unchanged. 

1007 

1008 Masks are obtained from all arguments of the correct length 

1009 in categories 1, 2, and 4; a point is bad if masked in a masked 

1010 array or if it is a nan or inf. No attempt is made to 

1011 extract a mask from categories 2 and 4 if :meth:`np.isfinite` 

1012 does not yield a Boolean array. Category 3 is included to 

1013 support RGB or RGBA ndarrays, which are assumed to have only 

1014 valid values and which are passed through unchanged. 

1015 

1016 All input arguments that are not passed unchanged are returned 

1017 as masked arrays if any masked points are found, otherwise as 

1018 ndarrays. 

1019 

1020 """ 

1021 if not len(args): 

1022 return () 

1023 if is_scalar_or_string(args[0]): 

1024 raise ValueError("First argument must be a sequence") 

1025 nrecs = len(args[0]) 

1026 margs = [] # Output args; some may be modified. 

1027 seqlist = [False] * len(args) # Flags: True if output will be masked. 

1028 masks = [] # List of masks. 

1029 for i, x in enumerate(args): 

1030 if is_scalar_or_string(x) or len(x) != nrecs: 

1031 margs.append(x) # Leave it unmodified. 

1032 else: 

1033 if isinstance(x, np.ma.MaskedArray) and x.ndim > 1: 

1034 raise ValueError("Masked arrays must be 1-D") 

1035 x = np.asanyarray(x) 

1036 if x.ndim == 1: 

1037 x = safe_masked_invalid(x) 

1038 seqlist[i] = True 

1039 if np.ma.is_masked(x): 

1040 masks.append(np.ma.getmaskarray(x)) 

1041 margs.append(x) # Possibly modified. 

1042 if len(masks): 

1043 mask = np.logical_or.reduce(masks) 

1044 for i, x in enumerate(margs): 

1045 if seqlist[i]: 

1046 margs[i] = np.ma.array(x, mask=mask) 

1047 return margs 

1048 

1049 

1050def boxplot_stats(X, whis=1.5, bootstrap=None, labels=None, 

1051 autorange=False): 

1052 r""" 

1053 Returns list of dictionaries of statistics used to draw a series 

1054 of box and whisker plots. The `Returns` section enumerates the 

1055 required keys of the dictionary. Users can skip this function and 

1056 pass a user-defined set of dictionaries to the new `axes.bxp` method 

1057 instead of relying on Matplotlib to do the calculations. 

1058 

1059 Parameters 

1060 ---------- 

1061 X : array-like 

1062 Data that will be represented in the boxplots. Should have 2 or 

1063 fewer dimensions. 

1064 

1065 whis : float or (float, float) (default = 1.5) 

1066 The position of the whiskers. 

1067 

1068 If a float, the lower whisker is at the lowest datum above 

1069 ``Q1 - whis*(Q3-Q1)``, and the upper whisker at the highest datum below 

1070 ``Q3 + whis*(Q3-Q1)``, where Q1 and Q3 are the first and third 

1071 quartiles. The default value of ``whis = 1.5`` corresponds to Tukey's 

1072 original definition of boxplots. 

1073 

1074 If a pair of floats, they indicate the percentiles at which to draw the 

1075 whiskers (e.g., (5, 95)). In particular, setting this to (0, 100) 

1076 results in whiskers covering the whole range of the data. "range" is 

1077 a deprecated synonym for (0, 100). 

1078 

1079 In the edge case where ``Q1 == Q3``, *whis* is automatically set to 

1080 (0, 100) (cover the whole range of the data) if *autorange* is True. 

1081 

1082 Beyond the whiskers, data are considered outliers and are plotted as 

1083 individual points. 

1084 

1085 bootstrap : int, optional 

1086 Number of times the confidence intervals around the median 

1087 should be bootstrapped (percentile method). 

1088 

1089 labels : array-like, optional 

1090 Labels for each dataset. Length must be compatible with 

1091 dimensions of *X*. 

1092 

1093 autorange : bool, optional (False) 

1094 When `True` and the data are distributed such that the 25th and 75th 

1095 percentiles are equal, ``whis`` is set to (0, 100) such that the 

1096 whisker ends are at the minimum and maximum of the data. 

1097 

1098 Returns 

1099 ------- 

1100 bxpstats : list of dict 

1101 A list of dictionaries containing the results for each column 

1102 of data. Keys of each dictionary are the following: 

1103 

1104 ======== =================================== 

1105 Key Value Description 

1106 ======== =================================== 

1107 label tick label for the boxplot 

1108 mean arithmetic mean value 

1109 med 50th percentile 

1110 q1 first quartile (25th percentile) 

1111 q3 third quartile (75th percentile) 

1112 cilo lower notch around the median 

1113 cihi upper notch around the median 

1114 whislo end of the lower whisker 

1115 whishi end of the upper whisker 

1116 fliers outliers 

1117 ======== =================================== 

1118 

1119 Notes 

1120 ----- 

1121 Non-bootstrapping approach to confidence interval uses Gaussian- 

1122 based asymptotic approximation: 

1123 

1124 .. math:: 

1125 

1126 \mathrm{med} \pm 1.57 \times \frac{\mathrm{iqr}}{\sqrt{N}} 

1127 

1128 General approach from: 

1129 McGill, R., Tukey, J.W., and Larsen, W.A. (1978) "Variations of 

1130 Boxplots", The American Statistician, 32:12-16. 

1131 

1132 """ 

1133 

1134 def _bootstrap_median(data, N=5000): 

1135 # determine 95% confidence intervals of the median 

1136 M = len(data) 

1137 percentiles = [2.5, 97.5] 

1138 

1139 bs_index = np.random.randint(M, size=(N, M)) 

1140 bsData = data[bs_index] 

1141 estimate = np.median(bsData, axis=1, overwrite_input=True) 

1142 

1143 CI = np.percentile(estimate, percentiles) 

1144 return CI 

1145 

1146 def _compute_conf_interval(data, med, iqr, bootstrap): 

1147 if bootstrap is not None: 

1148 # Do a bootstrap estimate of notch locations. 

1149 # get conf. intervals around median 

1150 CI = _bootstrap_median(data, N=bootstrap) 

1151 notch_min = CI[0] 

1152 notch_max = CI[1] 

1153 else: 

1154 

1155 N = len(data) 

1156 notch_min = med - 1.57 * iqr / np.sqrt(N) 

1157 notch_max = med + 1.57 * iqr / np.sqrt(N) 

1158 

1159 return notch_min, notch_max 

1160 

1161 # output is a list of dicts 

1162 bxpstats = [] 

1163 

1164 # convert X to a list of lists 

1165 X = _reshape_2D(X, "X") 

1166 

1167 ncols = len(X) 

1168 if labels is None: 

1169 labels = itertools.repeat(None) 

1170 elif len(labels) != ncols: 

1171 raise ValueError("Dimensions of labels and X must be compatible") 

1172 

1173 input_whis = whis 

1174 for ii, (x, label) in enumerate(zip(X, labels)): 

1175 

1176 # empty dict 

1177 stats = {} 

1178 if label is not None: 

1179 stats['label'] = label 

1180 

1181 # restore whis to the input values in case it got changed in the loop 

1182 whis = input_whis 

1183 

1184 # note tricksiness, append up here and then mutate below 

1185 bxpstats.append(stats) 

1186 

1187 # if empty, bail 

1188 if len(x) == 0: 

1189 stats['fliers'] = np.array([]) 

1190 stats['mean'] = np.nan 

1191 stats['med'] = np.nan 

1192 stats['q1'] = np.nan 

1193 stats['q3'] = np.nan 

1194 stats['cilo'] = np.nan 

1195 stats['cihi'] = np.nan 

1196 stats['whislo'] = np.nan 

1197 stats['whishi'] = np.nan 

1198 stats['med'] = np.nan 

1199 continue 

1200 

1201 # up-convert to an array, just to be safe 

1202 x = np.asarray(x) 

1203 

1204 # arithmetic mean 

1205 stats['mean'] = np.mean(x) 

1206 

1207 # medians and quartiles 

1208 q1, med, q3 = np.percentile(x, [25, 50, 75]) 

1209 

1210 # interquartile range 

1211 stats['iqr'] = q3 - q1 

1212 if stats['iqr'] == 0 and autorange: 

1213 whis = (0, 100) 

1214 

1215 # conf. interval around median 

1216 stats['cilo'], stats['cihi'] = _compute_conf_interval( 

1217 x, med, stats['iqr'], bootstrap 

1218 ) 

1219 

1220 # lowest/highest non-outliers 

1221 if np.isscalar(whis): 

1222 if np.isreal(whis): 

1223 loval = q1 - whis * stats['iqr'] 

1224 hival = q3 + whis * stats['iqr'] 

1225 elif whis in ['range', 'limit', 'limits', 'min/max']: 

1226 warn_deprecated( 

1227 "3.2", message=f"Setting whis to {whis!r} is deprecated " 

1228 "since %(since)s and support for it will be removed " 

1229 "%(removal)s; set it to [0, 100] to achieve the same " 

1230 "effect.") 

1231 loval = np.min(x) 

1232 hival = np.max(x) 

1233 else: 

1234 raise ValueError('whis must be a float or list of percentiles') 

1235 else: 

1236 loval, hival = np.percentile(x, whis) 

1237 

1238 # get high extreme 

1239 wiskhi = x[x <= hival] 

1240 if len(wiskhi) == 0 or np.max(wiskhi) < q3: 

1241 stats['whishi'] = q3 

1242 else: 

1243 stats['whishi'] = np.max(wiskhi) 

1244 

1245 # get low extreme 

1246 wisklo = x[x >= loval] 

1247 if len(wisklo) == 0 or np.min(wisklo) > q1: 

1248 stats['whislo'] = q1 

1249 else: 

1250 stats['whislo'] = np.min(wisklo) 

1251 

1252 # compute a single array of outliers 

1253 stats['fliers'] = np.hstack([ 

1254 x[x < stats['whislo']], 

1255 x[x > stats['whishi']], 

1256 ]) 

1257 

1258 # add in the remaining stats 

1259 stats['q1'], stats['med'], stats['q3'] = q1, med, q3 

1260 

1261 return bxpstats 

1262 

1263 

1264# The ls_mapper maps short codes for line style to their full name used by 

1265# backends; the reverse mapper is for mapping full names to short ones. 

1266ls_mapper = {'-': 'solid', '--': 'dashed', '-.': 'dashdot', ':': 'dotted'} 

1267ls_mapper_r = {v: k for k, v in ls_mapper.items()} 

1268 

1269 

1270def contiguous_regions(mask): 

1271 """ 

1272 Return a list of (ind0, ind1) such that ``mask[ind0:ind1].all()`` is 

1273 True and we cover all such regions. 

1274 """ 

1275 mask = np.asarray(mask, dtype=bool) 

1276 

1277 if not mask.size: 

1278 return [] 

1279 

1280 # Find the indices of region changes, and correct offset 

1281 idx, = np.nonzero(mask[:-1] != mask[1:]) 

1282 idx += 1 

1283 

1284 # List operations are faster for moderately sized arrays 

1285 idx = idx.tolist() 

1286 

1287 # Add first and/or last index if needed 

1288 if mask[0]: 

1289 idx = [0] + idx 

1290 if mask[-1]: 

1291 idx.append(len(mask)) 

1292 

1293 return list(zip(idx[::2], idx[1::2])) 

1294 

1295 

1296def is_math_text(s): 

1297 """ 

1298 Returns whether the string *s* contains math expressions. 

1299 

1300 This is done by checking whether *s* contains an even number of 

1301 non-escaped dollar signs. 

1302 """ 

1303 s = str(s) 

1304 dollar_count = s.count(r'$') - s.count(r'\$') 

1305 even_dollars = (dollar_count > 0 and dollar_count % 2 == 0) 

1306 return even_dollars 

1307 

1308 

1309def _to_unmasked_float_array(x): 

1310 """ 

1311 Convert a sequence to a float array; if input was a masked array, masked 

1312 values are converted to nans. 

1313 """ 

1314 if hasattr(x, 'mask'): 

1315 return np.ma.asarray(x, float).filled(np.nan) 

1316 else: 

1317 return np.asarray(x, float) 

1318 

1319 

1320def _check_1d(x): 

1321 ''' 

1322 Converts a sequence of less than 1 dimension, to an array of 1 

1323 dimension; leaves everything else untouched. 

1324 ''' 

1325 if not hasattr(x, 'shape') or len(x.shape) < 1: 

1326 return np.atleast_1d(x) 

1327 else: 

1328 try: 

1329 # work around 

1330 # https://github.com/pandas-dev/pandas/issues/27775 which 

1331 # means the shape of multi-dimensional slicing is not as 

1332 # expected. That this ever worked was an unintentional 

1333 # quirk of pandas and will raise an exception in the 

1334 # future. This slicing warns in pandas >= 1.0rc0 via 

1335 # https://github.com/pandas-dev/pandas/pull/30588 

1336 # 

1337 # < 1.0rc0 : x[:, None].ndim == 1, no warning, custom type 

1338 # >= 1.0rc1 : x[:, None].ndim == 2, warns, numpy array 

1339 # future : x[:, None] -> raises 

1340 # 

1341 # This code should correctly identify and coerce to a 

1342 # numpy array all pandas versions. 

1343 with warnings.catch_warnings(record=True) as w: 

1344 warnings.filterwarnings( 

1345 "always", 

1346 category=DeprecationWarning, 

1347 message='Support for multi-dimensional indexing') 

1348 

1349 ndim = x[:, None].ndim 

1350 # we have definitely hit a pandas index or series object 

1351 # cast to a numpy array. 

1352 if len(w) > 0: 

1353 return np.asanyarray(x) 

1354 # We have likely hit a pandas object, or at least 

1355 # something where 2D slicing does not result in a 2D 

1356 # object. 

1357 if ndim < 2: 

1358 return np.atleast_1d(x) 

1359 return x 

1360 except (IndexError, TypeError): 

1361 return np.atleast_1d(x) 

1362 

1363 

1364def _reshape_2D(X, name): 

1365 """ 

1366 Use Fortran ordering to convert ndarrays and lists of iterables to lists of 

1367 1D arrays. 

1368 

1369 Lists of iterables are converted by applying `np.asarray` to each of their 

1370 elements. 1D ndarrays are returned in a singleton list containing them. 

1371 2D ndarrays are converted to the list of their *columns*. 

1372 

1373 *name* is used to generate the error message for invalid inputs. 

1374 """ 

1375 # Iterate over columns for ndarrays, over rows otherwise. 

1376 X = np.atleast_1d(X.T if isinstance(X, np.ndarray) else np.asarray(X)) 

1377 if len(X) == 0: 

1378 return [[]] 

1379 elif X.ndim == 1 and np.ndim(X[0]) == 0: 

1380 # 1D array of scalars: directly return it. 

1381 return [X] 

1382 elif X.ndim in [1, 2]: 

1383 # 2D array, or 1D array of iterables: flatten them first. 

1384 return [np.reshape(x, -1) for x in X] 

1385 else: 

1386 raise ValueError("{} must have 2 or fewer dimensions".format(name)) 

1387 

1388 

1389def violin_stats(X, method, points=100, quantiles=None): 

1390 """ 

1391 Returns a list of dictionaries of data which can be used to draw a series 

1392 of violin plots. 

1393 

1394 See the Returns section below to view the required keys of the dictionary. 

1395 

1396 Users can skip this function and pass a user-defined set of dictionaries 

1397 with the same keys to `~.axes.Axes.violinplot` instead of using Matplotlib 

1398 to do the calculations. See the *Returns* section below for the keys 

1399 that must be present in the dictionaries. 

1400 

1401 Parameters 

1402 ---------- 

1403 X : array-like 

1404 Sample data that will be used to produce the gaussian kernel density 

1405 estimates. Must have 2 or fewer dimensions. 

1406 

1407 method : callable 

1408 The method used to calculate the kernel density estimate for each 

1409 column of data. When called via `method(v, coords)`, it should 

1410 return a vector of the values of the KDE evaluated at the values 

1411 specified in coords. 

1412 

1413 points : int, default = 100 

1414 Defines the number of points to evaluate each of the gaussian kernel 

1415 density estimates at. 

1416 

1417 quantiles : array-like, default = None 

1418 Defines (if not None) a list of floats in interval [0, 1] for each 

1419 column of data, which represents the quantiles that will be rendered 

1420 for that column of data. Must have 2 or fewer dimensions. 1D array will 

1421 be treated as a singleton list containing them. 

1422 

1423 Returns 

1424 ------- 

1425 vpstats : list of dict 

1426 A list of dictionaries containing the results for each column of data. 

1427 The dictionaries contain at least the following: 

1428 

1429 - coords: A list of scalars containing the coordinates this particular 

1430 kernel density estimate was evaluated at. 

1431 - vals: A list of scalars containing the values of the kernel density 

1432 estimate at each of the coordinates given in `coords`. 

1433 - mean: The mean value for this column of data. 

1434 - median: The median value for this column of data. 

1435 - min: The minimum value for this column of data. 

1436 - max: The maximum value for this column of data. 

1437 - quantiles: The quantile values for this column of data. 

1438 """ 

1439 

1440 # List of dictionaries describing each of the violins. 

1441 vpstats = [] 

1442 

1443 # Want X to be a list of data sequences 

1444 X = _reshape_2D(X, "X") 

1445 

1446 # Want quantiles to be as the same shape as data sequences 

1447 if quantiles is not None and len(quantiles) != 0: 

1448 quantiles = _reshape_2D(quantiles, "quantiles") 

1449 # Else, mock quantiles if is none or empty 

1450 else: 

1451 quantiles = [[]] * np.shape(X)[0] 

1452 

1453 # quantiles should has the same size as dataset 

1454 if np.shape(X)[:1] != np.shape(quantiles)[:1]: 

1455 raise ValueError("List of violinplot statistics and quantiles values" 

1456 " must have the same length") 

1457 

1458 # Zip x and quantiles 

1459 for (x, q) in zip(X, quantiles): 

1460 # Dictionary of results for this distribution 

1461 stats = {} 

1462 

1463 # Calculate basic stats for the distribution 

1464 min_val = np.min(x) 

1465 max_val = np.max(x) 

1466 quantile_val = np.percentile(x, 100 * q) 

1467 

1468 # Evaluate the kernel density estimate 

1469 coords = np.linspace(min_val, max_val, points) 

1470 stats['vals'] = method(x, coords) 

1471 stats['coords'] = coords 

1472 

1473 # Store additional statistics for this distribution 

1474 stats['mean'] = np.mean(x) 

1475 stats['median'] = np.median(x) 

1476 stats['min'] = min_val 

1477 stats['max'] = max_val 

1478 stats['quantiles'] = np.atleast_1d(quantile_val) 

1479 

1480 # Append to output 

1481 vpstats.append(stats) 

1482 

1483 return vpstats 

1484 

1485 

1486def pts_to_prestep(x, *args): 

1487 """ 

1488 Convert continuous line to pre-steps. 

1489 

1490 Given a set of ``N`` points, convert to ``2N - 1`` points, which when 

1491 connected linearly give a step function which changes values at the 

1492 beginning of the intervals. 

1493 

1494 Parameters 

1495 ---------- 

1496 x : array 

1497 The x location of the steps. May be empty. 

1498 

1499 y1, ..., yp : array 

1500 y arrays to be turned into steps; all must be the same length as ``x``. 

1501 

1502 Returns 

1503 ------- 

1504 out : array 

1505 The x and y values converted to steps in the same order as the input; 

1506 can be unpacked as ``x_out, y1_out, ..., yp_out``. If the input is 

1507 length ``N``, each of these arrays will be length ``2N + 1``. For 

1508 ``N=0``, the length will be 0. 

1509 

1510 Examples 

1511 -------- 

1512 >>> x_s, y1_s, y2_s = pts_to_prestep(x, y1, y2) 

1513 """ 

1514 steps = np.zeros((1 + len(args), max(2 * len(x) - 1, 0))) 

1515 # In all `pts_to_*step` functions, only assign once using *x* and *args*, 

1516 # as converting to an array may be expensive. 

1517 steps[0, 0::2] = x 

1518 steps[0, 1::2] = steps[0, 0:-2:2] 

1519 steps[1:, 0::2] = args 

1520 steps[1:, 1::2] = steps[1:, 2::2] 

1521 return steps 

1522 

1523 

1524def pts_to_poststep(x, *args): 

1525 """ 

1526 Convert continuous line to post-steps. 

1527 

1528 Given a set of ``N`` points convert to ``2N + 1`` points, which when 

1529 connected linearly give a step function which changes values at the end of 

1530 the intervals. 

1531 

1532 Parameters 

1533 ---------- 

1534 x : array 

1535 The x location of the steps. May be empty. 

1536 

1537 y1, ..., yp : array 

1538 y arrays to be turned into steps; all must be the same length as ``x``. 

1539 

1540 Returns 

1541 ------- 

1542 out : array 

1543 The x and y values converted to steps in the same order as the input; 

1544 can be unpacked as ``x_out, y1_out, ..., yp_out``. If the input is 

1545 length ``N``, each of these arrays will be length ``2N + 1``. For 

1546 ``N=0``, the length will be 0. 

1547 

1548 Examples 

1549 -------- 

1550 >>> x_s, y1_s, y2_s = pts_to_poststep(x, y1, y2) 

1551 """ 

1552 steps = np.zeros((1 + len(args), max(2 * len(x) - 1, 0))) 

1553 steps[0, 0::2] = x 

1554 steps[0, 1::2] = steps[0, 2::2] 

1555 steps[1:, 0::2] = args 

1556 steps[1:, 1::2] = steps[1:, 0:-2:2] 

1557 return steps 

1558 

1559 

1560def pts_to_midstep(x, *args): 

1561 """ 

1562 Convert continuous line to mid-steps. 

1563 

1564 Given a set of ``N`` points convert to ``2N`` points which when connected 

1565 linearly give a step function which changes values at the middle of the 

1566 intervals. 

1567 

1568 Parameters 

1569 ---------- 

1570 x : array 

1571 The x location of the steps. May be empty. 

1572 

1573 y1, ..., yp : array 

1574 y arrays to be turned into steps; all must be the same length as 

1575 ``x``. 

1576 

1577 Returns 

1578 ------- 

1579 out : array 

1580 The x and y values converted to steps in the same order as the input; 

1581 can be unpacked as ``x_out, y1_out, ..., yp_out``. If the input is 

1582 length ``N``, each of these arrays will be length ``2N``. 

1583 

1584 Examples 

1585 -------- 

1586 >>> x_s, y1_s, y2_s = pts_to_midstep(x, y1, y2) 

1587 """ 

1588 steps = np.zeros((1 + len(args), 2 * len(x))) 

1589 x = np.asanyarray(x) 

1590 steps[0, 1:-1:2] = steps[0, 2::2] = (x[:-1] + x[1:]) / 2 

1591 steps[0, :1] = x[:1] # Also works for zero-sized input. 

1592 steps[0, -1:] = x[-1:] 

1593 steps[1:, 0::2] = args 

1594 steps[1:, 1::2] = steps[1:, 0::2] 

1595 return steps 

1596 

1597 

1598STEP_LOOKUP_MAP = {'default': lambda x, y: (x, y), 

1599 'steps': pts_to_prestep, 

1600 'steps-pre': pts_to_prestep, 

1601 'steps-post': pts_to_poststep, 

1602 'steps-mid': pts_to_midstep} 

1603 

1604 

1605def index_of(y): 

1606 """ 

1607 A helper function to create reasonable x values for the given *y*. 

1608 

1609 This is used for plotting (x, y) if x values are not explicitly given. 

1610 

1611 First try ``y.index`` (assuming *y* is a `pandas.Series`), if that 

1612 fails, use ``range(len(y))``. 

1613 

1614 This will be extended in the future to deal with more types of 

1615 labeled data. 

1616 

1617 Parameters 

1618 ---------- 

1619 y : scalar or array-like 

1620 

1621 Returns 

1622 ------- 

1623 x, y : ndarray 

1624 The x and y values to plot. 

1625 """ 

1626 try: 

1627 return y.index.values, y.values 

1628 except AttributeError: 

1629 y = _check_1d(y) 

1630 return np.arange(y.shape[0], dtype=float), y 

1631 

1632 

1633def safe_first_element(obj): 

1634 """ 

1635 Return the first element in *obj*. 

1636 

1637 This is an type-independent way of obtaining the first element, supporting 

1638 both index access and the iterator protocol. 

1639 """ 

1640 if isinstance(obj, collections.abc.Iterator): 

1641 # needed to accept `array.flat` as input. 

1642 # np.flatiter reports as an instance of collections.Iterator 

1643 # but can still be indexed via []. 

1644 # This has the side effect of re-setting the iterator, but 

1645 # that is acceptable. 

1646 try: 

1647 return obj[0] 

1648 except TypeError: 

1649 pass 

1650 raise RuntimeError("matplotlib does not support generators " 

1651 "as input") 

1652 return next(iter(obj)) 

1653 

1654 

1655def sanitize_sequence(data): 

1656 """ 

1657 Convert dictview objects to list. Other inputs are returned unchanged. 

1658 """ 

1659 return (list(data) if isinstance(data, collections.abc.MappingView) 

1660 else data) 

1661 

1662 

1663def normalize_kwargs(kw, alias_mapping=None, required=(), forbidden=(), 

1664 allowed=None): 

1665 """ 

1666 Helper function to normalize kwarg inputs. 

1667 

1668 The order they are resolved are: 

1669 

1670 1. aliasing 

1671 2. required 

1672 3. forbidden 

1673 4. allowed 

1674 

1675 This order means that only the canonical names need appear in 

1676 *allowed*, *forbidden*, *required*. 

1677 

1678 Parameters 

1679 ---------- 

1680 kw : dict 

1681 A dict of keyword arguments. 

1682 

1683 alias_mapping : dict or Artist subclass or Artist instance, optional 

1684 A mapping between a canonical name to a list of 

1685 aliases, in order of precedence from lowest to highest. 

1686 

1687 If the canonical value is not in the list it is assumed to have 

1688 the highest priority. 

1689 

1690 If an Artist subclass or instance is passed, use its properties alias 

1691 mapping. 

1692 

1693 required : list of str, optional 

1694 A list of keys that must be in *kws*. 

1695 

1696 forbidden : list of str, optional 

1697 A list of keys which may not be in *kw*. 

1698 

1699 allowed : list of str, optional 

1700 A list of allowed fields. If this not None, then raise if 

1701 *kw* contains any keys not in the union of *required* 

1702 and *allowed*. To allow only the required fields pass in 

1703 an empty tuple ``allowed=()``. 

1704 

1705 Raises 

1706 ------ 

1707 TypeError 

1708 To match what python raises if invalid args/kwargs are passed to 

1709 a callable. 

1710 """ 

1711 from matplotlib.artist import Artist 

1712 

1713 # deal with default value of alias_mapping 

1714 if alias_mapping is None: 

1715 alias_mapping = dict() 

1716 elif (isinstance(alias_mapping, type) and issubclass(alias_mapping, Artist) 

1717 or isinstance(alias_mapping, Artist)): 

1718 alias_mapping = getattr(alias_mapping, "_alias_map", {}) 

1719 

1720 # make a local so we can pop 

1721 kw = dict(kw) 

1722 # output dictionary 

1723 ret = dict() 

1724 

1725 # hit all alias mappings 

1726 for canonical, alias_list in alias_mapping.items(): 

1727 

1728 # the alias lists are ordered from lowest to highest priority 

1729 # so we know to use the last value in this list 

1730 tmp = [] 

1731 seen = [] 

1732 for a in alias_list: 

1733 try: 

1734 tmp.append(kw.pop(a)) 

1735 seen.append(a) 

1736 except KeyError: 

1737 pass 

1738 # if canonical is not in the alias_list assume highest priority 

1739 if canonical not in alias_list: 

1740 try: 

1741 tmp.append(kw.pop(canonical)) 

1742 seen.append(canonical) 

1743 except KeyError: 

1744 pass 

1745 # if we found anything in this set of aliases put it in the return 

1746 # dict 

1747 if tmp: 

1748 ret[canonical] = tmp[-1] 

1749 if len(tmp) > 1: 

1750 warn_deprecated( 

1751 "3.1", message=f"Saw kwargs {seen!r} which are all " 

1752 f"aliases for {canonical!r}. Kept value from " 

1753 f"{seen[-1]!r}. Passing multiple aliases for the same " 

1754 f"property will raise a TypeError %(removal)s.") 

1755 

1756 # at this point we know that all keys which are aliased are removed, update 

1757 # the return dictionary from the cleaned local copy of the input 

1758 ret.update(kw) 

1759 

1760 fail_keys = [k for k in required if k not in ret] 

1761 if fail_keys: 

1762 raise TypeError("The required keys {keys!r} " 

1763 "are not in kwargs".format(keys=fail_keys)) 

1764 

1765 fail_keys = [k for k in forbidden if k in ret] 

1766 if fail_keys: 

1767 raise TypeError("The forbidden keys {keys!r} " 

1768 "are in kwargs".format(keys=fail_keys)) 

1769 

1770 if allowed is not None: 

1771 allowed_set = {*required, *allowed} 

1772 fail_keys = [k for k in ret if k not in allowed_set] 

1773 if fail_keys: 

1774 raise TypeError( 

1775 "kwargs contains {keys!r} which are not in the required " 

1776 "{req!r} or allowed {allow!r} keys".format( 

1777 keys=fail_keys, req=required, allow=allowed)) 

1778 

1779 return ret 

1780 

1781 

1782@deprecated("3.1") 

1783def get_label(y, default_name): 

1784 try: 

1785 return y.name 

1786 except AttributeError: 

1787 return default_name 

1788 

1789 

1790_lockstr = """\ 

1791LOCKERROR: matplotlib is trying to acquire the lock 

1792 {!r} 

1793and has failed. This maybe due to any other process holding this 

1794lock. If you are sure no other matplotlib process is running try 

1795removing these folders and trying again. 

1796""" 

1797 

1798 

1799@contextlib.contextmanager 

1800def _lock_path(path): 

1801 """ 

1802 Context manager for locking a path. 

1803 

1804 Usage:: 

1805 

1806 with _lock_path(path): 

1807 ... 

1808 

1809 Another thread or process that attempts to lock the same path will wait 

1810 until this context manager is exited. 

1811 

1812 The lock is implemented by creating a temporary file in the parent 

1813 directory, so that directory must exist and be writable. 

1814 """ 

1815 path = Path(path) 

1816 lock_path = path.with_name(path.name + ".matplotlib-lock") 

1817 retries = 50 

1818 sleeptime = 0.1 

1819 for _ in range(retries): 

1820 try: 

1821 with lock_path.open("xb"): 

1822 break 

1823 except FileExistsError: 

1824 time.sleep(sleeptime) 

1825 else: 

1826 raise TimeoutError("""\ 

1827Lock error: Matplotlib failed to acquire the following lock file: 

1828 {} 

1829This maybe due to another process holding this lock file. If you are sure no 

1830other Matplotlib process is running, remove this file and try again.""".format( 

1831 lock_path)) 

1832 try: 

1833 yield 

1834 finally: 

1835 lock_path.unlink() 

1836 

1837 

1838def _topmost_artist( 

1839 artists, 

1840 _cached_max=functools.partial(max, key=operator.attrgetter("zorder"))): 

1841 """Get the topmost artist of a list. 

1842 

1843 In case of a tie, return the *last* of the tied artists, as it will be 

1844 drawn on top of the others. `max` returns the first maximum in case of 

1845 ties, so we need to iterate over the list in reverse order. 

1846 """ 

1847 return _cached_max(reversed(artists)) 

1848 

1849 

1850def _str_equal(obj, s): 

1851 """Return whether *obj* is a string equal to string *s*. 

1852 

1853 This helper solely exists to handle the case where *obj* is a numpy array, 

1854 because in such cases, a naive ``obj == s`` would yield an array, which 

1855 cannot be used in a boolean context. 

1856 """ 

1857 return isinstance(obj, str) and obj == s 

1858 

1859 

1860def _str_lower_equal(obj, s): 

1861 """Return whether *obj* is a string equal, when lowercased, to string *s*. 

1862 

1863 This helper solely exists to handle the case where *obj* is a numpy array, 

1864 because in such cases, a naive ``obj == s`` would yield an array, which 

1865 cannot be used in a boolean context. 

1866 """ 

1867 return isinstance(obj, str) and obj.lower() == s 

1868 

1869 

1870def _define_aliases(alias_d, cls=None): 

1871 """Class decorator for defining property aliases. 

1872 

1873 Use as :: 

1874 

1875 @cbook._define_aliases({"property": ["alias", ...], ...}) 

1876 class C: ... 

1877 

1878 For each property, if the corresponding ``get_property`` is defined in the 

1879 class so far, an alias named ``get_alias`` will be defined; the same will 

1880 be done for setters. If neither the getter nor the setter exists, an 

1881 exception will be raised. 

1882 

1883 The alias map is stored as the ``_alias_map`` attribute on the class and 

1884 can be used by `~.normalize_kwargs` (which assumes that higher priority 

1885 aliases come last). 

1886 """ 

1887 if cls is None: # Return the actual class decorator. 

1888 return functools.partial(_define_aliases, alias_d) 

1889 

1890 def make_alias(name): # Enforce a closure over *name*. 

1891 @functools.wraps(getattr(cls, name)) 

1892 def method(self, *args, **kwargs): 

1893 return getattr(self, name)(*args, **kwargs) 

1894 return method 

1895 

1896 for prop, aliases in alias_d.items(): 

1897 exists = False 

1898 for prefix in ["get_", "set_"]: 

1899 if prefix + prop in vars(cls): 

1900 exists = True 

1901 for alias in aliases: 

1902 method = make_alias(prefix + prop) 

1903 method.__name__ = prefix + alias 

1904 method.__doc__ = "Alias for `{}`.".format(prefix + prop) 

1905 setattr(cls, prefix + alias, method) 

1906 if not exists: 

1907 raise ValueError( 

1908 "Neither getter nor setter exists for {!r}".format(prop)) 

1909 

1910 if hasattr(cls, "_alias_map"): 

1911 # Need to decide on conflict resolution policy. 

1912 raise NotImplementedError("Parent class already defines aliases") 

1913 cls._alias_map = alias_d 

1914 return cls 

1915 

1916 

1917def _array_perimeter(arr): 

1918 """ 

1919 Get the elements on the perimeter of ``arr``, 

1920 

1921 Parameters 

1922 ---------- 

1923 arr : ndarray, shape (M, N) 

1924 The input array 

1925 

1926 Returns 

1927 ------- 

1928 perimeter : ndarray, shape (2*(M - 1) + 2*(N - 1),) 

1929 The elements on the perimeter of the array:: 

1930 

1931 [arr[0, 0], ..., arr[0, -1], ..., arr[-1, -1], ..., arr[-1, 0], ...] 

1932 

1933 Examples 

1934 -------- 

1935 >>> i, j = np.ogrid[:3,:4] 

1936 >>> a = i*10 + j 

1937 >>> a 

1938 array([[ 0, 1, 2, 3], 

1939 [10, 11, 12, 13], 

1940 [20, 21, 22, 23]]) 

1941 >>> _array_perimeter(a) 

1942 array([ 0, 1, 2, 3, 13, 23, 22, 21, 20, 10]) 

1943 """ 

1944 # note we use Python's half-open ranges to avoid repeating 

1945 # the corners 

1946 forward = np.s_[0:-1] # [0 ... -1) 

1947 backward = np.s_[-1:0:-1] # [-1 ... 0) 

1948 return np.concatenate(( 

1949 arr[0, forward], 

1950 arr[forward, -1], 

1951 arr[-1, backward], 

1952 arr[backward, 0], 

1953 )) 

1954 

1955 

1956@contextlib.contextmanager 

1957def _setattr_cm(obj, **kwargs): 

1958 """Temporarily set some attributes; restore original state at context exit. 

1959 """ 

1960 sentinel = object() 

1961 origs = [(attr, getattr(obj, attr, sentinel)) for attr in kwargs] 

1962 try: 

1963 for attr, val in kwargs.items(): 

1964 setattr(obj, attr, val) 

1965 yield 

1966 finally: 

1967 for attr, orig in origs: 

1968 if orig is sentinel: 

1969 delattr(obj, attr) 

1970 else: 

1971 setattr(obj, attr, orig) 

1972 

1973 

1974def _warn_external(message, category=None): 

1975 """ 

1976 `warnings.warn` wrapper that sets *stacklevel* to "outside Matplotlib". 

1977 

1978 The original emitter of the warning can be obtained by patching this 

1979 function back to `warnings.warn`, i.e. ``cbook._warn_external = 

1980 warnings.warn`` (or ``functools.partial(warnings.warn, stacklevel=2)``, 

1981 etc.). 

1982 """ 

1983 frame = sys._getframe() 

1984 for stacklevel in itertools.count(1): # lgtm[py/unused-loop-variable] 

1985 if frame is None: 

1986 # when called in embedded context may hit frame is None 

1987 break 

1988 if not re.match(r"\A(matplotlib|mpl_toolkits)(\Z|\.(?!tests\.))", 

1989 # Work around sphinx-gallery not setting __name__. 

1990 frame.f_globals.get("__name__", "")): 

1991 break 

1992 frame = frame.f_back 

1993 warnings.warn(message, category, stacklevel) 

1994 

1995 

1996class _OrderedSet(collections.abc.MutableSet): 

1997 def __init__(self): 

1998 self._od = collections.OrderedDict() 

1999 

2000 def __contains__(self, key): 

2001 return key in self._od 

2002 

2003 def __iter__(self): 

2004 return iter(self._od) 

2005 

2006 def __len__(self): 

2007 return len(self._od) 

2008 

2009 def add(self, key): 

2010 self._od.pop(key, None) 

2011 self._od[key] = None 

2012 

2013 def discard(self, key): 

2014 self._od.pop(key, None) 

2015 

2016 

2017# Agg's buffers are unmultiplied RGBA8888, which neither PyQt4 nor cairo 

2018# support; however, both do support premultiplied ARGB32. 

2019 

2020 

2021def _premultiplied_argb32_to_unmultiplied_rgba8888(buf): 

2022 """ 

2023 Convert a premultiplied ARGB32 buffer to an unmultiplied RGBA8888 buffer. 

2024 """ 

2025 rgba = np.take( # .take() ensures C-contiguity of the result. 

2026 buf, 

2027 [2, 1, 0, 3] if sys.byteorder == "little" else [1, 2, 3, 0], axis=2) 

2028 rgb = rgba[..., :-1] 

2029 alpha = rgba[..., -1] 

2030 # Un-premultiply alpha. The formula is the same as in cairo-png.c. 

2031 mask = alpha != 0 

2032 for channel in np.rollaxis(rgb, -1): 

2033 channel[mask] = ( 

2034 (channel[mask].astype(int) * 255 + alpha[mask] // 2) 

2035 // alpha[mask]) 

2036 return rgba 

2037 

2038 

2039def _unmultiplied_rgba8888_to_premultiplied_argb32(rgba8888): 

2040 """ 

2041 Convert an unmultiplied RGBA8888 buffer to a premultiplied ARGB32 buffer. 

2042 """ 

2043 if sys.byteorder == "little": 

2044 argb32 = np.take(rgba8888, [2, 1, 0, 3], axis=2) 

2045 rgb24 = argb32[..., :-1] 

2046 alpha8 = argb32[..., -1:] 

2047 else: 

2048 argb32 = np.take(rgba8888, [3, 0, 1, 2], axis=2) 

2049 alpha8 = argb32[..., :1] 

2050 rgb24 = argb32[..., 1:] 

2051 # Only bother premultiplying when the alpha channel is not fully opaque, 

2052 # as the cost is not negligible. The unsafe cast is needed to do the 

2053 # multiplication in-place in an integer buffer. 

2054 if alpha8.min() != 0xff: 

2055 np.multiply(rgb24, alpha8 / 0xff, out=rgb24, casting="unsafe") 

2056 return argb32 

2057 

2058 

2059def _pformat_subprocess(command): 

2060 """Pretty-format a subprocess command for printing/logging purposes.""" 

2061 return (command if isinstance(command, str) 

2062 else " ".join(shlex.quote(os.fspath(arg)) for arg in command)) 

2063 

2064 

2065def _check_and_log_subprocess(command, logger, **kwargs): 

2066 """ 

2067 Run *command*, returning its stdout output if it succeeds. 

2068 

2069 If it fails (exits with nonzero return code), raise an exception whose text 

2070 includes the failed command and captured stdout and stderr output. 

2071 

2072 Regardless of the return code, the command is logged at DEBUG level on 

2073 *logger*. In case of success, the output is likewise logged. 

2074 """ 

2075 logger.debug('%s', _pformat_subprocess(command)) 

2076 proc = subprocess.run( 

2077 command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) 

2078 if proc.returncode: 

2079 raise RuntimeError( 

2080 f"The command\n" 

2081 f" {_pformat_subprocess(command)}\n" 

2082 f"failed and generated the following output:\n" 

2083 f"{proc.stdout.decode('utf-8')}\n" 

2084 f"and the following error:\n" 

2085 f"{proc.stderr.decode('utf-8')}") 

2086 logger.debug("stdout:\n%s", proc.stdout) 

2087 logger.debug("stderr:\n%s", proc.stderr) 

2088 return proc.stdout 

2089 

2090 

2091# In the following _check_foo functions, the first parameter starts with an 

2092# underscore because it is intended to be positional-only (e.g., so that 

2093# `_check_isinstance([...], types=foo)` doesn't fail. 

2094 

2095 

2096def _check_isinstance(_types, **kwargs): 

2097 """ 

2098 For each *key, value* pair in *kwargs*, check that *value* is an instance 

2099 of one of *_types*; if not, raise an appropriate TypeError. 

2100 

2101 As a special case, a ``None`` entry in *_types* is treated as NoneType. 

2102 

2103 Examples 

2104 -------- 

2105 >>> cbook._check_isinstance((SomeClass, None), arg=arg) 

2106 """ 

2107 types = _types 

2108 if isinstance(types, type) or types is None: 

2109 types = (types,) 

2110 none_allowed = None in types 

2111 types = tuple(tp for tp in types if tp is not None) 

2112 

2113 def type_name(tp): 

2114 return (tp.__qualname__ if tp.__module__ == "builtins" 

2115 else f"{tp.__module__}.{tp.__qualname__}") 

2116 

2117 names = [*map(type_name, types)] 

2118 if none_allowed: 

2119 types = (*types, type(None)) 

2120 names.append("None") 

2121 for k, v in kwargs.items(): 

2122 if not isinstance(v, types): 

2123 raise TypeError( 

2124 "{!r} must be an instance of {}, not a {}".format( 

2125 k, 

2126 ", ".join(names[:-1]) + " or " + names[-1] 

2127 if len(names) > 1 else names[0], 

2128 type_name(type(v)))) 

2129 

2130 

2131def _check_in_list(_values, **kwargs): 

2132 """ 

2133 For each *key, value* pair in *kwargs*, check that *value* is in *_values*; 

2134 if not, raise an appropriate ValueError. 

2135 

2136 Examples 

2137 -------- 

2138 >>> cbook._check_in_list(["foo", "bar"], arg=arg, other_arg=other_arg) 

2139 """ 

2140 values = _values 

2141 for k, v in kwargs.items(): 

2142 if v not in values: 

2143 raise ValueError( 

2144 "{!r} is not a valid value for {}; supported values are {}" 

2145 .format(v, k, ', '.join(map(repr, values)))) 

2146 

2147 

2148def _check_getitem(_mapping, **kwargs): 

2149 """ 

2150 *kwargs* must consist of a single *key, value* pair. If *key* is in 

2151 *_mapping*, return ``_mapping[value]``; else, raise an appropriate 

2152 ValueError. 

2153 

2154 Examples 

2155 -------- 

2156 >>> cbook._check_getitem({"foo": "bar"}, arg=arg) 

2157 """ 

2158 mapping = _mapping 

2159 if len(kwargs) != 1: 

2160 raise ValueError("_check_getitem takes a single keyword argument") 

2161 (k, v), = kwargs.items() 

2162 try: 

2163 return mapping[v] 

2164 except KeyError: 

2165 raise ValueError( 

2166 "{!r} is not a valid value for {}; supported values are {}" 

2167 .format(v, k, ', '.join(map(repr, mapping)))) from None 

2168 

2169 

2170class _classproperty: 

2171 """ 

2172 Like `property`, but also triggers on access via the class, and it is the 

2173 *class* that's passed as argument. 

2174 

2175 Examples 

2176 -------- 

2177 :: 

2178 

2179 class C: 

2180 @classproperty 

2181 def foo(cls): 

2182 return cls.__name__ 

2183 

2184 assert C.foo == "C" 

2185 """ 

2186 

2187 def __init__(self, fget): 

2188 self._fget = fget 

2189 

2190 def __get__(self, instance, owner): 

2191 return self._fget(owner)