1 | n/a | """Supporting definitions for the Python regression tests.""" |
---|
2 | n/a | |
---|
3 | n/a | if __name__ != 'test.support': |
---|
4 | n/a | raise ImportError('support must be imported from the test package') |
---|
5 | n/a | |
---|
6 | n/a | import collections.abc |
---|
7 | n/a | import contextlib |
---|
8 | n/a | import errno |
---|
9 | n/a | import faulthandler |
---|
10 | n/a | import fnmatch |
---|
11 | n/a | import functools |
---|
12 | n/a | import gc |
---|
13 | n/a | import importlib |
---|
14 | n/a | import importlib.util |
---|
15 | n/a | import logging.handlers |
---|
16 | n/a | import nntplib |
---|
17 | n/a | import os |
---|
18 | n/a | import platform |
---|
19 | n/a | import re |
---|
20 | n/a | import shutil |
---|
21 | n/a | import socket |
---|
22 | n/a | import stat |
---|
23 | n/a | import struct |
---|
24 | n/a | import subprocess |
---|
25 | n/a | import sys |
---|
26 | n/a | import sysconfig |
---|
27 | n/a | import tempfile |
---|
28 | n/a | import time |
---|
29 | n/a | import types |
---|
30 | n/a | import unittest |
---|
31 | n/a | import urllib.error |
---|
32 | n/a | import warnings |
---|
33 | n/a | |
---|
34 | n/a | try: |
---|
35 | n/a | import _thread, threading |
---|
36 | n/a | except ImportError: |
---|
37 | n/a | _thread = None |
---|
38 | n/a | threading = None |
---|
39 | n/a | try: |
---|
40 | n/a | import multiprocessing.process |
---|
41 | n/a | except ImportError: |
---|
42 | n/a | multiprocessing = None |
---|
43 | n/a | |
---|
44 | n/a | try: |
---|
45 | n/a | import zlib |
---|
46 | n/a | except ImportError: |
---|
47 | n/a | zlib = None |
---|
48 | n/a | |
---|
49 | n/a | try: |
---|
50 | n/a | import gzip |
---|
51 | n/a | except ImportError: |
---|
52 | n/a | gzip = None |
---|
53 | n/a | |
---|
54 | n/a | try: |
---|
55 | n/a | import bz2 |
---|
56 | n/a | except ImportError: |
---|
57 | n/a | bz2 = None |
---|
58 | n/a | |
---|
59 | n/a | try: |
---|
60 | n/a | import lzma |
---|
61 | n/a | except ImportError: |
---|
62 | n/a | lzma = None |
---|
63 | n/a | |
---|
64 | n/a | try: |
---|
65 | n/a | import resource |
---|
66 | n/a | except ImportError: |
---|
67 | n/a | resource = None |
---|
68 | n/a | |
---|
69 | n/a | __all__ = [ |
---|
70 | n/a | # globals |
---|
71 | n/a | "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast", |
---|
72 | n/a | # exceptions |
---|
73 | n/a | "Error", "TestFailed", "ResourceDenied", |
---|
74 | n/a | # imports |
---|
75 | n/a | "import_module", "import_fresh_module", "CleanImport", |
---|
76 | n/a | # modules |
---|
77 | n/a | "unload", "forget", |
---|
78 | n/a | # io |
---|
79 | n/a | "record_original_stdout", "get_original_stdout", "captured_stdout", |
---|
80 | n/a | "captured_stdin", "captured_stderr", |
---|
81 | n/a | # filesystem |
---|
82 | n/a | "TESTFN", "SAVEDCWD", "unlink", "rmtree", "temp_cwd", "findfile", |
---|
83 | n/a | "create_empty_file", "can_symlink", "fs_is_case_insensitive", |
---|
84 | n/a | # unittest |
---|
85 | n/a | "is_resource_enabled", "requires", "requires_freebsd_version", |
---|
86 | n/a | "requires_linux_version", "requires_mac_ver", "check_syntax_error", |
---|
87 | n/a | "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", |
---|
88 | n/a | "transient_internet", "BasicTestRunner", "run_unittest", "run_doctest", |
---|
89 | n/a | "skip_unless_symlink", "requires_gzip", "requires_bz2", "requires_lzma", |
---|
90 | n/a | "bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute", |
---|
91 | n/a | "requires_IEEE_754", "skip_unless_xattr", "requires_zlib", |
---|
92 | n/a | "anticipate_failure", "load_package_tests", "detect_api_mismatch", |
---|
93 | n/a | "check__all__", "requires_android_level", "requires_multiprocessing_queue", |
---|
94 | n/a | # sys |
---|
95 | n/a | "is_jython", "is_android", "check_impl_detail", "unix_shell", |
---|
96 | n/a | "setswitchinterval", "android_not_root", |
---|
97 | n/a | # network |
---|
98 | n/a | "HOST", "IPV6_ENABLED", "find_unused_port", "bind_port", "open_urlresource", |
---|
99 | n/a | "bind_unix_socket", |
---|
100 | n/a | # processes |
---|
101 | n/a | 'temp_umask', "reap_children", |
---|
102 | n/a | # logging |
---|
103 | n/a | "TestHandler", |
---|
104 | n/a | # threads |
---|
105 | n/a | "threading_setup", "threading_cleanup", "reap_threads", "start_threads", |
---|
106 | n/a | # miscellaneous |
---|
107 | n/a | "check_warnings", "check_no_resource_warning", "EnvironmentVarGuard", |
---|
108 | n/a | "run_with_locale", "swap_item", |
---|
109 | n/a | "swap_attr", "Matcher", "set_memlimit", "SuppressCrashReport", "sortdict", |
---|
110 | n/a | "run_with_tz", "PGO", "missing_compiler_executable", |
---|
111 | n/a | ] |
---|
112 | n/a | |
---|
113 | n/a | class Error(Exception): |
---|
114 | n/a | """Base class for regression test exceptions.""" |
---|
115 | n/a | |
---|
116 | n/a | class TestFailed(Error): |
---|
117 | n/a | """Test failed.""" |
---|
118 | n/a | |
---|
119 | n/a | class ResourceDenied(unittest.SkipTest): |
---|
120 | n/a | """Test skipped because it requested a disallowed resource. |
---|
121 | n/a | |
---|
122 | n/a | This is raised when a test calls requires() for a resource that |
---|
123 | n/a | has not be enabled. It is used to distinguish between expected |
---|
124 | n/a | and unexpected skips. |
---|
125 | n/a | """ |
---|
126 | n/a | |
---|
127 | n/a | @contextlib.contextmanager |
---|
128 | n/a | def _ignore_deprecated_imports(ignore=True): |
---|
129 | n/a | """Context manager to suppress package and module deprecation |
---|
130 | n/a | warnings when importing them. |
---|
131 | n/a | |
---|
132 | n/a | If ignore is False, this context manager has no effect. |
---|
133 | n/a | """ |
---|
134 | n/a | if ignore: |
---|
135 | n/a | with warnings.catch_warnings(): |
---|
136 | n/a | warnings.filterwarnings("ignore", ".+ (module|package)", |
---|
137 | n/a | DeprecationWarning) |
---|
138 | n/a | yield |
---|
139 | n/a | else: |
---|
140 | n/a | yield |
---|
141 | n/a | |
---|
142 | n/a | |
---|
143 | n/a | def import_module(name, deprecated=False, *, required_on=()): |
---|
144 | n/a | """Import and return the module to be tested, raising SkipTest if |
---|
145 | n/a | it is not available. |
---|
146 | n/a | |
---|
147 | n/a | If deprecated is True, any module or package deprecation messages |
---|
148 | n/a | will be suppressed. If a module is required on a platform but optional for |
---|
149 | n/a | others, set required_on to an iterable of platform prefixes which will be |
---|
150 | n/a | compared against sys.platform. |
---|
151 | n/a | """ |
---|
152 | n/a | with _ignore_deprecated_imports(deprecated): |
---|
153 | n/a | try: |
---|
154 | n/a | return importlib.import_module(name) |
---|
155 | n/a | except ImportError as msg: |
---|
156 | n/a | if sys.platform.startswith(tuple(required_on)): |
---|
157 | n/a | raise |
---|
158 | n/a | raise unittest.SkipTest(str(msg)) |
---|
159 | n/a | |
---|
160 | n/a | |
---|
161 | n/a | def _save_and_remove_module(name, orig_modules): |
---|
162 | n/a | """Helper function to save and remove a module from sys.modules |
---|
163 | n/a | |
---|
164 | n/a | Raise ImportError if the module can't be imported. |
---|
165 | n/a | """ |
---|
166 | n/a | # try to import the module and raise an error if it can't be imported |
---|
167 | n/a | if name not in sys.modules: |
---|
168 | n/a | __import__(name) |
---|
169 | n/a | del sys.modules[name] |
---|
170 | n/a | for modname in list(sys.modules): |
---|
171 | n/a | if modname == name or modname.startswith(name + '.'): |
---|
172 | n/a | orig_modules[modname] = sys.modules[modname] |
---|
173 | n/a | del sys.modules[modname] |
---|
174 | n/a | |
---|
175 | n/a | def _save_and_block_module(name, orig_modules): |
---|
176 | n/a | """Helper function to save and block a module in sys.modules |
---|
177 | n/a | |
---|
178 | n/a | Return True if the module was in sys.modules, False otherwise. |
---|
179 | n/a | """ |
---|
180 | n/a | saved = True |
---|
181 | n/a | try: |
---|
182 | n/a | orig_modules[name] = sys.modules[name] |
---|
183 | n/a | except KeyError: |
---|
184 | n/a | saved = False |
---|
185 | n/a | sys.modules[name] = None |
---|
186 | n/a | return saved |
---|
187 | n/a | |
---|
188 | n/a | |
---|
189 | n/a | def anticipate_failure(condition): |
---|
190 | n/a | """Decorator to mark a test that is known to be broken in some cases |
---|
191 | n/a | |
---|
192 | n/a | Any use of this decorator should have a comment identifying the |
---|
193 | n/a | associated tracker issue. |
---|
194 | n/a | """ |
---|
195 | n/a | if condition: |
---|
196 | n/a | return unittest.expectedFailure |
---|
197 | n/a | return lambda f: f |
---|
198 | n/a | |
---|
199 | n/a | def load_package_tests(pkg_dir, loader, standard_tests, pattern): |
---|
200 | n/a | """Generic load_tests implementation for simple test packages. |
---|
201 | n/a | |
---|
202 | n/a | Most packages can implement load_tests using this function as follows: |
---|
203 | n/a | |
---|
204 | n/a | def load_tests(*args): |
---|
205 | n/a | return load_package_tests(os.path.dirname(__file__), *args) |
---|
206 | n/a | """ |
---|
207 | n/a | if pattern is None: |
---|
208 | n/a | pattern = "test*" |
---|
209 | n/a | top_dir = os.path.dirname( # Lib |
---|
210 | n/a | os.path.dirname( # test |
---|
211 | n/a | os.path.dirname(__file__))) # support |
---|
212 | n/a | package_tests = loader.discover(start_dir=pkg_dir, |
---|
213 | n/a | top_level_dir=top_dir, |
---|
214 | n/a | pattern=pattern) |
---|
215 | n/a | standard_tests.addTests(package_tests) |
---|
216 | n/a | return standard_tests |
---|
217 | n/a | |
---|
218 | n/a | |
---|
219 | n/a | def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): |
---|
220 | n/a | """Import and return a module, deliberately bypassing sys.modules. |
---|
221 | n/a | |
---|
222 | n/a | This function imports and returns a fresh copy of the named Python module |
---|
223 | n/a | by removing the named module from sys.modules before doing the import. |
---|
224 | n/a | Note that unlike reload, the original module is not affected by |
---|
225 | n/a | this operation. |
---|
226 | n/a | |
---|
227 | n/a | *fresh* is an iterable of additional module names that are also removed |
---|
228 | n/a | from the sys.modules cache before doing the import. |
---|
229 | n/a | |
---|
230 | n/a | *blocked* is an iterable of module names that are replaced with None |
---|
231 | n/a | in the module cache during the import to ensure that attempts to import |
---|
232 | n/a | them raise ImportError. |
---|
233 | n/a | |
---|
234 | n/a | The named module and any modules named in the *fresh* and *blocked* |
---|
235 | n/a | parameters are saved before starting the import and then reinserted into |
---|
236 | n/a | sys.modules when the fresh import is complete. |
---|
237 | n/a | |
---|
238 | n/a | Module and package deprecation messages are suppressed during this import |
---|
239 | n/a | if *deprecated* is True. |
---|
240 | n/a | |
---|
241 | n/a | This function will raise ImportError if the named module cannot be |
---|
242 | n/a | imported. |
---|
243 | n/a | """ |
---|
244 | n/a | # NOTE: test_heapq, test_json and test_warnings include extra sanity checks |
---|
245 | n/a | # to make sure that this utility function is working as expected |
---|
246 | n/a | with _ignore_deprecated_imports(deprecated): |
---|
247 | n/a | # Keep track of modules saved for later restoration as well |
---|
248 | n/a | # as those which just need a blocking entry removed |
---|
249 | n/a | orig_modules = {} |
---|
250 | n/a | names_to_remove = [] |
---|
251 | n/a | _save_and_remove_module(name, orig_modules) |
---|
252 | n/a | try: |
---|
253 | n/a | for fresh_name in fresh: |
---|
254 | n/a | _save_and_remove_module(fresh_name, orig_modules) |
---|
255 | n/a | for blocked_name in blocked: |
---|
256 | n/a | if not _save_and_block_module(blocked_name, orig_modules): |
---|
257 | n/a | names_to_remove.append(blocked_name) |
---|
258 | n/a | fresh_module = importlib.import_module(name) |
---|
259 | n/a | except ImportError: |
---|
260 | n/a | fresh_module = None |
---|
261 | n/a | finally: |
---|
262 | n/a | for orig_name, module in orig_modules.items(): |
---|
263 | n/a | sys.modules[orig_name] = module |
---|
264 | n/a | for name_to_remove in names_to_remove: |
---|
265 | n/a | del sys.modules[name_to_remove] |
---|
266 | n/a | return fresh_module |
---|
267 | n/a | |
---|
268 | n/a | |
---|
269 | n/a | def get_attribute(obj, name): |
---|
270 | n/a | """Get an attribute, raising SkipTest if AttributeError is raised.""" |
---|
271 | n/a | try: |
---|
272 | n/a | attribute = getattr(obj, name) |
---|
273 | n/a | except AttributeError: |
---|
274 | n/a | raise unittest.SkipTest("object %r has no attribute %r" % (obj, name)) |
---|
275 | n/a | else: |
---|
276 | n/a | return attribute |
---|
277 | n/a | |
---|
278 | n/a | verbose = 1 # Flag set to 0 by regrtest.py |
---|
279 | n/a | use_resources = None # Flag set to [] by regrtest.py |
---|
280 | n/a | max_memuse = 0 # Disable bigmem tests (they will still be run with |
---|
281 | n/a | # small sizes, to make sure they work.) |
---|
282 | n/a | real_max_memuse = 0 |
---|
283 | n/a | failfast = False |
---|
284 | n/a | match_tests = None |
---|
285 | n/a | |
---|
286 | n/a | # _original_stdout is meant to hold stdout at the time regrtest began. |
---|
287 | n/a | # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. |
---|
288 | n/a | # The point is to have some flavor of stdout the user can actually see. |
---|
289 | n/a | _original_stdout = None |
---|
290 | n/a | def record_original_stdout(stdout): |
---|
291 | n/a | global _original_stdout |
---|
292 | n/a | _original_stdout = stdout |
---|
293 | n/a | |
---|
294 | n/a | def get_original_stdout(): |
---|
295 | n/a | return _original_stdout or sys.stdout |
---|
296 | n/a | |
---|
297 | n/a | def unload(name): |
---|
298 | n/a | try: |
---|
299 | n/a | del sys.modules[name] |
---|
300 | n/a | except KeyError: |
---|
301 | n/a | pass |
---|
302 | n/a | |
---|
303 | n/a | def _force_run(path, func, *args): |
---|
304 | n/a | try: |
---|
305 | n/a | return func(*args) |
---|
306 | n/a | except OSError as err: |
---|
307 | n/a | if verbose >= 2: |
---|
308 | n/a | print('%s: %s' % (err.__class__.__name__, err)) |
---|
309 | n/a | print('re-run %s%r' % (func.__name__, args)) |
---|
310 | n/a | os.chmod(path, stat.S_IRWXU) |
---|
311 | n/a | return func(*args) |
---|
312 | n/a | |
---|
313 | n/a | if sys.platform.startswith("win"): |
---|
314 | n/a | def _waitfor(func, pathname, waitall=False): |
---|
315 | n/a | # Perform the operation |
---|
316 | n/a | func(pathname) |
---|
317 | n/a | # Now setup the wait loop |
---|
318 | n/a | if waitall: |
---|
319 | n/a | dirname = pathname |
---|
320 | n/a | else: |
---|
321 | n/a | dirname, name = os.path.split(pathname) |
---|
322 | n/a | dirname = dirname or '.' |
---|
323 | n/a | # Check for `pathname` to be removed from the filesystem. |
---|
324 | n/a | # The exponential backoff of the timeout amounts to a total |
---|
325 | n/a | # of ~1 second after which the deletion is probably an error |
---|
326 | n/a | # anyway. |
---|
327 | n/a | # Testing on an i7@4.3GHz shows that usually only 1 iteration is |
---|
328 | n/a | # required when contention occurs. |
---|
329 | n/a | timeout = 0.001 |
---|
330 | n/a | while timeout < 1.0: |
---|
331 | n/a | # Note we are only testing for the existence of the file(s) in |
---|
332 | n/a | # the contents of the directory regardless of any security or |
---|
333 | n/a | # access rights. If we have made it this far, we have sufficient |
---|
334 | n/a | # permissions to do that much using Python's equivalent of the |
---|
335 | n/a | # Windows API FindFirstFile. |
---|
336 | n/a | # Other Windows APIs can fail or give incorrect results when |
---|
337 | n/a | # dealing with files that are pending deletion. |
---|
338 | n/a | L = os.listdir(dirname) |
---|
339 | n/a | if not (L if waitall else name in L): |
---|
340 | n/a | return |
---|
341 | n/a | # Increase the timeout and try again |
---|
342 | n/a | time.sleep(timeout) |
---|
343 | n/a | timeout *= 2 |
---|
344 | n/a | warnings.warn('tests may fail, delete still pending for ' + pathname, |
---|
345 | n/a | RuntimeWarning, stacklevel=4) |
---|
346 | n/a | |
---|
347 | n/a | def _unlink(filename): |
---|
348 | n/a | _waitfor(os.unlink, filename) |
---|
349 | n/a | |
---|
350 | n/a | def _rmdir(dirname): |
---|
351 | n/a | _waitfor(os.rmdir, dirname) |
---|
352 | n/a | |
---|
353 | n/a | def _rmtree(path): |
---|
354 | n/a | def _rmtree_inner(path): |
---|
355 | n/a | for name in _force_run(path, os.listdir, path): |
---|
356 | n/a | fullname = os.path.join(path, name) |
---|
357 | n/a | try: |
---|
358 | n/a | mode = os.lstat(fullname).st_mode |
---|
359 | n/a | except OSError as exc: |
---|
360 | n/a | print("support.rmtree(): os.lstat(%r) failed with %s" % (fullname, exc), |
---|
361 | n/a | file=sys.__stderr__) |
---|
362 | n/a | mode = 0 |
---|
363 | n/a | if stat.S_ISDIR(mode): |
---|
364 | n/a | _waitfor(_rmtree_inner, fullname, waitall=True) |
---|
365 | n/a | _force_run(fullname, os.rmdir, fullname) |
---|
366 | n/a | else: |
---|
367 | n/a | _force_run(fullname, os.unlink, fullname) |
---|
368 | n/a | _waitfor(_rmtree_inner, path, waitall=True) |
---|
369 | n/a | _waitfor(lambda p: _force_run(p, os.rmdir, p), path) |
---|
370 | n/a | else: |
---|
371 | n/a | _unlink = os.unlink |
---|
372 | n/a | _rmdir = os.rmdir |
---|
373 | n/a | |
---|
374 | n/a | def _rmtree(path): |
---|
375 | n/a | try: |
---|
376 | n/a | shutil.rmtree(path) |
---|
377 | n/a | return |
---|
378 | n/a | except OSError: |
---|
379 | n/a | pass |
---|
380 | n/a | |
---|
381 | n/a | def _rmtree_inner(path): |
---|
382 | n/a | for name in _force_run(path, os.listdir, path): |
---|
383 | n/a | fullname = os.path.join(path, name) |
---|
384 | n/a | try: |
---|
385 | n/a | mode = os.lstat(fullname).st_mode |
---|
386 | n/a | except OSError: |
---|
387 | n/a | mode = 0 |
---|
388 | n/a | if stat.S_ISDIR(mode): |
---|
389 | n/a | _rmtree_inner(fullname) |
---|
390 | n/a | _force_run(path, os.rmdir, fullname) |
---|
391 | n/a | else: |
---|
392 | n/a | _force_run(path, os.unlink, fullname) |
---|
393 | n/a | _rmtree_inner(path) |
---|
394 | n/a | os.rmdir(path) |
---|
395 | n/a | |
---|
396 | n/a | def unlink(filename): |
---|
397 | n/a | try: |
---|
398 | n/a | _unlink(filename) |
---|
399 | n/a | except (FileNotFoundError, NotADirectoryError): |
---|
400 | n/a | pass |
---|
401 | n/a | |
---|
402 | n/a | def rmdir(dirname): |
---|
403 | n/a | try: |
---|
404 | n/a | _rmdir(dirname) |
---|
405 | n/a | except FileNotFoundError: |
---|
406 | n/a | pass |
---|
407 | n/a | |
---|
408 | n/a | def rmtree(path): |
---|
409 | n/a | try: |
---|
410 | n/a | _rmtree(path) |
---|
411 | n/a | except FileNotFoundError: |
---|
412 | n/a | pass |
---|
413 | n/a | |
---|
414 | n/a | def make_legacy_pyc(source): |
---|
415 | n/a | """Move a PEP 3147/488 pyc file to its legacy pyc location. |
---|
416 | n/a | |
---|
417 | n/a | :param source: The file system path to the source file. The source file |
---|
418 | n/a | does not need to exist, however the PEP 3147/488 pyc file must exist. |
---|
419 | n/a | :return: The file system path to the legacy pyc file. |
---|
420 | n/a | """ |
---|
421 | n/a | pyc_file = importlib.util.cache_from_source(source) |
---|
422 | n/a | up_one = os.path.dirname(os.path.abspath(source)) |
---|
423 | n/a | legacy_pyc = os.path.join(up_one, source + 'c') |
---|
424 | n/a | os.rename(pyc_file, legacy_pyc) |
---|
425 | n/a | return legacy_pyc |
---|
426 | n/a | |
---|
427 | n/a | def forget(modname): |
---|
428 | n/a | """'Forget' a module was ever imported. |
---|
429 | n/a | |
---|
430 | n/a | This removes the module from sys.modules and deletes any PEP 3147/488 or |
---|
431 | n/a | legacy .pyc files. |
---|
432 | n/a | """ |
---|
433 | n/a | unload(modname) |
---|
434 | n/a | for dirname in sys.path: |
---|
435 | n/a | source = os.path.join(dirname, modname + '.py') |
---|
436 | n/a | # It doesn't matter if they exist or not, unlink all possible |
---|
437 | n/a | # combinations of PEP 3147/488 and legacy pyc files. |
---|
438 | n/a | unlink(source + 'c') |
---|
439 | n/a | for opt in ('', 1, 2): |
---|
440 | n/a | unlink(importlib.util.cache_from_source(source, optimization=opt)) |
---|
441 | n/a | |
---|
442 | n/a | # Check whether a gui is actually available |
---|
443 | n/a | def _is_gui_available(): |
---|
444 | n/a | if hasattr(_is_gui_available, 'result'): |
---|
445 | n/a | return _is_gui_available.result |
---|
446 | n/a | reason = None |
---|
447 | n/a | if sys.platform.startswith('win'): |
---|
448 | n/a | # if Python is running as a service (such as the buildbot service), |
---|
449 | n/a | # gui interaction may be disallowed |
---|
450 | n/a | import ctypes |
---|
451 | n/a | import ctypes.wintypes |
---|
452 | n/a | UOI_FLAGS = 1 |
---|
453 | n/a | WSF_VISIBLE = 0x0001 |
---|
454 | n/a | class USEROBJECTFLAGS(ctypes.Structure): |
---|
455 | n/a | _fields_ = [("fInherit", ctypes.wintypes.BOOL), |
---|
456 | n/a | ("fReserved", ctypes.wintypes.BOOL), |
---|
457 | n/a | ("dwFlags", ctypes.wintypes.DWORD)] |
---|
458 | n/a | dll = ctypes.windll.user32 |
---|
459 | n/a | h = dll.GetProcessWindowStation() |
---|
460 | n/a | if not h: |
---|
461 | n/a | raise ctypes.WinError() |
---|
462 | n/a | uof = USEROBJECTFLAGS() |
---|
463 | n/a | needed = ctypes.wintypes.DWORD() |
---|
464 | n/a | res = dll.GetUserObjectInformationW(h, |
---|
465 | n/a | UOI_FLAGS, |
---|
466 | n/a | ctypes.byref(uof), |
---|
467 | n/a | ctypes.sizeof(uof), |
---|
468 | n/a | ctypes.byref(needed)) |
---|
469 | n/a | if not res: |
---|
470 | n/a | raise ctypes.WinError() |
---|
471 | n/a | if not bool(uof.dwFlags & WSF_VISIBLE): |
---|
472 | n/a | reason = "gui not available (WSF_VISIBLE flag not set)" |
---|
473 | n/a | elif sys.platform == 'darwin': |
---|
474 | n/a | # The Aqua Tk implementations on OS X can abort the process if |
---|
475 | n/a | # being called in an environment where a window server connection |
---|
476 | n/a | # cannot be made, for instance when invoked by a buildbot or ssh |
---|
477 | n/a | # process not running under the same user id as the current console |
---|
478 | n/a | # user. To avoid that, raise an exception if the window manager |
---|
479 | n/a | # connection is not available. |
---|
480 | n/a | from ctypes import cdll, c_int, pointer, Structure |
---|
481 | n/a | from ctypes.util import find_library |
---|
482 | n/a | |
---|
483 | n/a | app_services = cdll.LoadLibrary(find_library("ApplicationServices")) |
---|
484 | n/a | |
---|
485 | n/a | if app_services.CGMainDisplayID() == 0: |
---|
486 | n/a | reason = "gui tests cannot run without OS X window manager" |
---|
487 | n/a | else: |
---|
488 | n/a | class ProcessSerialNumber(Structure): |
---|
489 | n/a | _fields_ = [("highLongOfPSN", c_int), |
---|
490 | n/a | ("lowLongOfPSN", c_int)] |
---|
491 | n/a | psn = ProcessSerialNumber() |
---|
492 | n/a | psn_p = pointer(psn) |
---|
493 | n/a | if ( (app_services.GetCurrentProcess(psn_p) < 0) or |
---|
494 | n/a | (app_services.SetFrontProcess(psn_p) < 0) ): |
---|
495 | n/a | reason = "cannot run without OS X gui process" |
---|
496 | n/a | |
---|
497 | n/a | # check on every platform whether tkinter can actually do anything |
---|
498 | n/a | if not reason: |
---|
499 | n/a | try: |
---|
500 | n/a | from tkinter import Tk |
---|
501 | n/a | root = Tk() |
---|
502 | n/a | root.withdraw() |
---|
503 | n/a | root.update() |
---|
504 | n/a | root.destroy() |
---|
505 | n/a | except Exception as e: |
---|
506 | n/a | err_string = str(e) |
---|
507 | n/a | if len(err_string) > 50: |
---|
508 | n/a | err_string = err_string[:50] + ' [...]' |
---|
509 | n/a | reason = 'Tk unavailable due to {}: {}'.format(type(e).__name__, |
---|
510 | n/a | err_string) |
---|
511 | n/a | |
---|
512 | n/a | _is_gui_available.reason = reason |
---|
513 | n/a | _is_gui_available.result = not reason |
---|
514 | n/a | |
---|
515 | n/a | return _is_gui_available.result |
---|
516 | n/a | |
---|
517 | n/a | def is_resource_enabled(resource): |
---|
518 | n/a | """Test whether a resource is enabled. |
---|
519 | n/a | |
---|
520 | n/a | Known resources are set by regrtest.py. If not running under regrtest.py, |
---|
521 | n/a | all resources are assumed enabled unless use_resources has been set. |
---|
522 | n/a | """ |
---|
523 | n/a | return use_resources is None or resource in use_resources |
---|
524 | n/a | |
---|
525 | n/a | def requires(resource, msg=None): |
---|
526 | n/a | """Raise ResourceDenied if the specified resource is not available.""" |
---|
527 | n/a | if not is_resource_enabled(resource): |
---|
528 | n/a | if msg is None: |
---|
529 | n/a | msg = "Use of the %r resource not enabled" % resource |
---|
530 | n/a | raise ResourceDenied(msg) |
---|
531 | n/a | if resource == 'gui' and not _is_gui_available(): |
---|
532 | n/a | raise ResourceDenied(_is_gui_available.reason) |
---|
533 | n/a | |
---|
534 | n/a | def _requires_unix_version(sysname, min_version): |
---|
535 | n/a | """Decorator raising SkipTest if the OS is `sysname` and the version is less |
---|
536 | n/a | than `min_version`. |
---|
537 | n/a | |
---|
538 | n/a | For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if |
---|
539 | n/a | the FreeBSD version is less than 7.2. |
---|
540 | n/a | """ |
---|
541 | n/a | def decorator(func): |
---|
542 | n/a | @functools.wraps(func) |
---|
543 | n/a | def wrapper(*args, **kw): |
---|
544 | n/a | if platform.system() == sysname: |
---|
545 | n/a | version_txt = platform.release().split('-', 1)[0] |
---|
546 | n/a | try: |
---|
547 | n/a | version = tuple(map(int, version_txt.split('.'))) |
---|
548 | n/a | except ValueError: |
---|
549 | n/a | pass |
---|
550 | n/a | else: |
---|
551 | n/a | if version < min_version: |
---|
552 | n/a | min_version_txt = '.'.join(map(str, min_version)) |
---|
553 | n/a | raise unittest.SkipTest( |
---|
554 | n/a | "%s version %s or higher required, not %s" |
---|
555 | n/a | % (sysname, min_version_txt, version_txt)) |
---|
556 | n/a | return func(*args, **kw) |
---|
557 | n/a | wrapper.min_version = min_version |
---|
558 | n/a | return wrapper |
---|
559 | n/a | return decorator |
---|
560 | n/a | |
---|
561 | n/a | def requires_freebsd_version(*min_version): |
---|
562 | n/a | """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is |
---|
563 | n/a | less than `min_version`. |
---|
564 | n/a | |
---|
565 | n/a | For example, @requires_freebsd_version(7, 2) raises SkipTest if the FreeBSD |
---|
566 | n/a | version is less than 7.2. |
---|
567 | n/a | """ |
---|
568 | n/a | return _requires_unix_version('FreeBSD', min_version) |
---|
569 | n/a | |
---|
570 | n/a | def requires_linux_version(*min_version): |
---|
571 | n/a | """Decorator raising SkipTest if the OS is Linux and the Linux version is |
---|
572 | n/a | less than `min_version`. |
---|
573 | n/a | |
---|
574 | n/a | For example, @requires_linux_version(2, 6, 32) raises SkipTest if the Linux |
---|
575 | n/a | version is less than 2.6.32. |
---|
576 | n/a | """ |
---|
577 | n/a | return _requires_unix_version('Linux', min_version) |
---|
578 | n/a | |
---|
579 | n/a | def requires_mac_ver(*min_version): |
---|
580 | n/a | """Decorator raising SkipTest if the OS is Mac OS X and the OS X |
---|
581 | n/a | version if less than min_version. |
---|
582 | n/a | |
---|
583 | n/a | For example, @requires_mac_ver(10, 5) raises SkipTest if the OS X version |
---|
584 | n/a | is lesser than 10.5. |
---|
585 | n/a | """ |
---|
586 | n/a | def decorator(func): |
---|
587 | n/a | @functools.wraps(func) |
---|
588 | n/a | def wrapper(*args, **kw): |
---|
589 | n/a | if sys.platform == 'darwin': |
---|
590 | n/a | version_txt = platform.mac_ver()[0] |
---|
591 | n/a | try: |
---|
592 | n/a | version = tuple(map(int, version_txt.split('.'))) |
---|
593 | n/a | except ValueError: |
---|
594 | n/a | pass |
---|
595 | n/a | else: |
---|
596 | n/a | if version < min_version: |
---|
597 | n/a | min_version_txt = '.'.join(map(str, min_version)) |
---|
598 | n/a | raise unittest.SkipTest( |
---|
599 | n/a | "Mac OS X %s or higher required, not %s" |
---|
600 | n/a | % (min_version_txt, version_txt)) |
---|
601 | n/a | return func(*args, **kw) |
---|
602 | n/a | wrapper.min_version = min_version |
---|
603 | n/a | return wrapper |
---|
604 | n/a | return decorator |
---|
605 | n/a | |
---|
606 | n/a | |
---|
607 | n/a | # Don't use "localhost", since resolving it uses the DNS under recent |
---|
608 | n/a | # Windows versions (see issue #18792). |
---|
609 | n/a | HOST = "127.0.0.1" |
---|
610 | n/a | HOSTv6 = "::1" |
---|
611 | n/a | |
---|
612 | n/a | |
---|
613 | n/a | def find_unused_port(family=socket.AF_INET, socktype=socket.SOCK_STREAM): |
---|
614 | n/a | """Returns an unused port that should be suitable for binding. This is |
---|
615 | n/a | achieved by creating a temporary socket with the same family and type as |
---|
616 | n/a | the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to |
---|
617 | n/a | the specified host address (defaults to 0.0.0.0) with the port set to 0, |
---|
618 | n/a | eliciting an unused ephemeral port from the OS. The temporary socket is |
---|
619 | n/a | then closed and deleted, and the ephemeral port is returned. |
---|
620 | n/a | |
---|
621 | n/a | Either this method or bind_port() should be used for any tests where a |
---|
622 | n/a | server socket needs to be bound to a particular port for the duration of |
---|
623 | n/a | the test. Which one to use depends on whether the calling code is creating |
---|
624 | n/a | a python socket, or if an unused port needs to be provided in a constructor |
---|
625 | n/a | or passed to an external program (i.e. the -accept argument to openssl's |
---|
626 | n/a | s_server mode). Always prefer bind_port() over find_unused_port() where |
---|
627 | n/a | possible. Hard coded ports should *NEVER* be used. As soon as a server |
---|
628 | n/a | socket is bound to a hard coded port, the ability to run multiple instances |
---|
629 | n/a | of the test simultaneously on the same host is compromised, which makes the |
---|
630 | n/a | test a ticking time bomb in a buildbot environment. On Unix buildbots, this |
---|
631 | n/a | may simply manifest as a failed test, which can be recovered from without |
---|
632 | n/a | intervention in most cases, but on Windows, the entire python process can |
---|
633 | n/a | completely and utterly wedge, requiring someone to log in to the buildbot |
---|
634 | n/a | and manually kill the affected process. |
---|
635 | n/a | |
---|
636 | n/a | (This is easy to reproduce on Windows, unfortunately, and can be traced to |
---|
637 | n/a | the SO_REUSEADDR socket option having different semantics on Windows versus |
---|
638 | n/a | Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, |
---|
639 | n/a | listen and then accept connections on identical host/ports. An EADDRINUSE |
---|
640 | n/a | OSError will be raised at some point (depending on the platform and |
---|
641 | n/a | the order bind and listen were called on each socket). |
---|
642 | n/a | |
---|
643 | n/a | However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE |
---|
644 | n/a | will ever be raised when attempting to bind two identical host/ports. When |
---|
645 | n/a | accept() is called on each socket, the second caller's process will steal |
---|
646 | n/a | the port from the first caller, leaving them both in an awkwardly wedged |
---|
647 | n/a | state where they'll no longer respond to any signals or graceful kills, and |
---|
648 | n/a | must be forcibly killed via OpenProcess()/TerminateProcess(). |
---|
649 | n/a | |
---|
650 | n/a | The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option |
---|
651 | n/a | instead of SO_REUSEADDR, which effectively affords the same semantics as |
---|
652 | n/a | SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open |
---|
653 | n/a | Source world compared to Windows ones, this is a common mistake. A quick |
---|
654 | n/a | look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when |
---|
655 | n/a | openssl.exe is called with the 's_server' option, for example. See |
---|
656 | n/a | http://bugs.python.org/issue2550 for more info. The following site also |
---|
657 | n/a | has a very thorough description about the implications of both REUSEADDR |
---|
658 | n/a | and EXCLUSIVEADDRUSE on Windows: |
---|
659 | n/a | http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) |
---|
660 | n/a | |
---|
661 | n/a | XXX: although this approach is a vast improvement on previous attempts to |
---|
662 | n/a | elicit unused ports, it rests heavily on the assumption that the ephemeral |
---|
663 | n/a | port returned to us by the OS won't immediately be dished back out to some |
---|
664 | n/a | other process when we close and delete our temporary socket but before our |
---|
665 | n/a | calling code has a chance to bind the returned port. We can deal with this |
---|
666 | n/a | issue if/when we come across it. |
---|
667 | n/a | """ |
---|
668 | n/a | |
---|
669 | n/a | tempsock = socket.socket(family, socktype) |
---|
670 | n/a | port = bind_port(tempsock) |
---|
671 | n/a | tempsock.close() |
---|
672 | n/a | del tempsock |
---|
673 | n/a | return port |
---|
674 | n/a | |
---|
675 | n/a | def bind_port(sock, host=HOST): |
---|
676 | n/a | """Bind the socket to a free port and return the port number. Relies on |
---|
677 | n/a | ephemeral ports in order to ensure we are using an unbound port. This is |
---|
678 | n/a | important as many tests may be running simultaneously, especially in a |
---|
679 | n/a | buildbot environment. This method raises an exception if the sock.family |
---|
680 | n/a | is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR |
---|
681 | n/a | or SO_REUSEPORT set on it. Tests should *never* set these socket options |
---|
682 | n/a | for TCP/IP sockets. The only case for setting these options is testing |
---|
683 | n/a | multicasting via multiple UDP sockets. |
---|
684 | n/a | |
---|
685 | n/a | Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. |
---|
686 | n/a | on Windows), it will be set on the socket. This will prevent anyone else |
---|
687 | n/a | from bind()'ing to our host/port for the duration of the test. |
---|
688 | n/a | """ |
---|
689 | n/a | |
---|
690 | n/a | if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: |
---|
691 | n/a | if hasattr(socket, 'SO_REUSEADDR'): |
---|
692 | n/a | if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: |
---|
693 | n/a | raise TestFailed("tests should never set the SO_REUSEADDR " \ |
---|
694 | n/a | "socket option on TCP/IP sockets!") |
---|
695 | n/a | if hasattr(socket, 'SO_REUSEPORT'): |
---|
696 | n/a | try: |
---|
697 | n/a | if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: |
---|
698 | n/a | raise TestFailed("tests should never set the SO_REUSEPORT " \ |
---|
699 | n/a | "socket option on TCP/IP sockets!") |
---|
700 | n/a | except OSError: |
---|
701 | n/a | # Python's socket module was compiled using modern headers |
---|
702 | n/a | # thus defining SO_REUSEPORT but this process is running |
---|
703 | n/a | # under an older kernel that does not support SO_REUSEPORT. |
---|
704 | n/a | pass |
---|
705 | n/a | if hasattr(socket, 'SO_EXCLUSIVEADDRUSE'): |
---|
706 | n/a | sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) |
---|
707 | n/a | |
---|
708 | n/a | sock.bind((host, 0)) |
---|
709 | n/a | port = sock.getsockname()[1] |
---|
710 | n/a | return port |
---|
711 | n/a | |
---|
712 | n/a | def bind_unix_socket(sock, addr): |
---|
713 | n/a | """Bind a unix socket, raising SkipTest if PermissionError is raised.""" |
---|
714 | n/a | assert sock.family == socket.AF_UNIX |
---|
715 | n/a | try: |
---|
716 | n/a | sock.bind(addr) |
---|
717 | n/a | except PermissionError: |
---|
718 | n/a | sock.close() |
---|
719 | n/a | raise unittest.SkipTest('cannot bind AF_UNIX sockets') |
---|
720 | n/a | |
---|
721 | n/a | def _is_ipv6_enabled(): |
---|
722 | n/a | """Check whether IPv6 is enabled on this host.""" |
---|
723 | n/a | if socket.has_ipv6: |
---|
724 | n/a | sock = None |
---|
725 | n/a | try: |
---|
726 | n/a | sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) |
---|
727 | n/a | sock.bind((HOSTv6, 0)) |
---|
728 | n/a | return True |
---|
729 | n/a | except OSError: |
---|
730 | n/a | pass |
---|
731 | n/a | finally: |
---|
732 | n/a | if sock: |
---|
733 | n/a | sock.close() |
---|
734 | n/a | return False |
---|
735 | n/a | |
---|
736 | n/a | IPV6_ENABLED = _is_ipv6_enabled() |
---|
737 | n/a | |
---|
738 | n/a | def system_must_validate_cert(f): |
---|
739 | n/a | """Skip the test on TLS certificate validation failures.""" |
---|
740 | n/a | @functools.wraps(f) |
---|
741 | n/a | def dec(*args, **kwargs): |
---|
742 | n/a | try: |
---|
743 | n/a | f(*args, **kwargs) |
---|
744 | n/a | except IOError as e: |
---|
745 | n/a | if "CERTIFICATE_VERIFY_FAILED" in str(e): |
---|
746 | n/a | raise unittest.SkipTest("system does not contain " |
---|
747 | n/a | "necessary certificates") |
---|
748 | n/a | raise |
---|
749 | n/a | return dec |
---|
750 | n/a | |
---|
751 | n/a | # A constant likely larger than the underlying OS pipe buffer size, to |
---|
752 | n/a | # make writes blocking. |
---|
753 | n/a | # Windows limit seems to be around 512 B, and many Unix kernels have a |
---|
754 | n/a | # 64 KiB pipe buffer size or 16 * PAGE_SIZE: take a few megs to be sure. |
---|
755 | n/a | # (see issue #17835 for a discussion of this number). |
---|
756 | n/a | PIPE_MAX_SIZE = 4 * 1024 * 1024 + 1 |
---|
757 | n/a | |
---|
758 | n/a | # A constant likely larger than the underlying OS socket buffer size, to make |
---|
759 | n/a | # writes blocking. |
---|
760 | n/a | # The socket buffer sizes can usually be tuned system-wide (e.g. through sysctl |
---|
761 | n/a | # on Linux), or on a per-socket basis (SO_SNDBUF/SO_RCVBUF). See issue #18643 |
---|
762 | n/a | # for a discussion of this number). |
---|
763 | n/a | SOCK_MAX_SIZE = 16 * 1024 * 1024 + 1 |
---|
764 | n/a | |
---|
765 | n/a | # decorator for skipping tests on non-IEEE 754 platforms |
---|
766 | n/a | requires_IEEE_754 = unittest.skipUnless( |
---|
767 | n/a | float.__getformat__("double").startswith("IEEE"), |
---|
768 | n/a | "test requires IEEE 754 doubles") |
---|
769 | n/a | |
---|
770 | n/a | requires_zlib = unittest.skipUnless(zlib, 'requires zlib') |
---|
771 | n/a | |
---|
772 | n/a | requires_gzip = unittest.skipUnless(gzip, 'requires gzip') |
---|
773 | n/a | |
---|
774 | n/a | requires_bz2 = unittest.skipUnless(bz2, 'requires bz2') |
---|
775 | n/a | |
---|
776 | n/a | requires_lzma = unittest.skipUnless(lzma, 'requires lzma') |
---|
777 | n/a | |
---|
778 | n/a | is_jython = sys.platform.startswith('java') |
---|
779 | n/a | |
---|
780 | n/a | try: |
---|
781 | n/a | # constant used by requires_android_level() |
---|
782 | n/a | _ANDROID_API_LEVEL = sys.getandroidapilevel() |
---|
783 | n/a | is_android = True |
---|
784 | n/a | except AttributeError: |
---|
785 | n/a | # sys.getandroidapilevel() is only available on Android |
---|
786 | n/a | is_android = False |
---|
787 | n/a | android_not_root = (is_android and os.geteuid() != 0) |
---|
788 | n/a | |
---|
789 | n/a | if sys.platform != 'win32': |
---|
790 | n/a | unix_shell = '/system/bin/sh' if is_android else '/bin/sh' |
---|
791 | n/a | else: |
---|
792 | n/a | unix_shell = None |
---|
793 | n/a | |
---|
794 | n/a | # Filename used for testing |
---|
795 | n/a | if os.name == 'java': |
---|
796 | n/a | # Jython disallows @ in module names |
---|
797 | n/a | TESTFN = '$test' |
---|
798 | n/a | else: |
---|
799 | n/a | TESTFN = '@test' |
---|
800 | n/a | |
---|
801 | n/a | # Disambiguate TESTFN for parallel testing, while letting it remain a valid |
---|
802 | n/a | # module name. |
---|
803 | n/a | TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) |
---|
804 | n/a | |
---|
805 | n/a | # FS_NONASCII: non-ASCII character encodable by os.fsencode(), |
---|
806 | n/a | # or None if there is no such character. |
---|
807 | n/a | FS_NONASCII = None |
---|
808 | n/a | for character in ( |
---|
809 | n/a | # First try printable and common characters to have a readable filename. |
---|
810 | n/a | # For each character, the encoding list are just example of encodings able |
---|
811 | n/a | # to encode the character (the list is not exhaustive). |
---|
812 | n/a | |
---|
813 | n/a | # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 |
---|
814 | n/a | '\u00E6', |
---|
815 | n/a | # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 |
---|
816 | n/a | '\u0130', |
---|
817 | n/a | # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 |
---|
818 | n/a | '\u0141', |
---|
819 | n/a | # U+03C6 (Greek Small Letter Phi): cp1253 |
---|
820 | n/a | '\u03C6', |
---|
821 | n/a | # U+041A (Cyrillic Capital Letter Ka): cp1251 |
---|
822 | n/a | '\u041A', |
---|
823 | n/a | # U+05D0 (Hebrew Letter Alef): Encodable to cp424 |
---|
824 | n/a | '\u05D0', |
---|
825 | n/a | # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic |
---|
826 | n/a | '\u060C', |
---|
827 | n/a | # U+062A (Arabic Letter Teh): cp720 |
---|
828 | n/a | '\u062A', |
---|
829 | n/a | # U+0E01 (Thai Character Ko Kai): cp874 |
---|
830 | n/a | '\u0E01', |
---|
831 | n/a | |
---|
832 | n/a | # Then try more "special" characters. "special" because they may be |
---|
833 | n/a | # interpreted or displayed differently depending on the exact locale |
---|
834 | n/a | # encoding and the font. |
---|
835 | n/a | |
---|
836 | n/a | # U+00A0 (No-Break Space) |
---|
837 | n/a | '\u00A0', |
---|
838 | n/a | # U+20AC (Euro Sign) |
---|
839 | n/a | '\u20AC', |
---|
840 | n/a | ): |
---|
841 | n/a | try: |
---|
842 | n/a | os.fsdecode(os.fsencode(character)) |
---|
843 | n/a | except UnicodeError: |
---|
844 | n/a | pass |
---|
845 | n/a | else: |
---|
846 | n/a | FS_NONASCII = character |
---|
847 | n/a | break |
---|
848 | n/a | |
---|
849 | n/a | # TESTFN_UNICODE is a non-ascii filename |
---|
850 | n/a | TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" |
---|
851 | n/a | if sys.platform == 'darwin': |
---|
852 | n/a | # In Mac OS X's VFS API file names are, by definition, canonically |
---|
853 | n/a | # decomposed Unicode, encoded using UTF-8. See QA1173: |
---|
854 | n/a | # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html |
---|
855 | n/a | import unicodedata |
---|
856 | n/a | TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) |
---|
857 | n/a | TESTFN_ENCODING = sys.getfilesystemencoding() |
---|
858 | n/a | |
---|
859 | n/a | # TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be |
---|
860 | n/a | # encoded by the filesystem encoding (in strict mode). It can be None if we |
---|
861 | n/a | # cannot generate such filename. |
---|
862 | n/a | TESTFN_UNENCODABLE = None |
---|
863 | n/a | if os.name == 'nt': |
---|
864 | n/a | # skip win32s (0) or Windows 9x/ME (1) |
---|
865 | n/a | if sys.getwindowsversion().platform >= 2: |
---|
866 | n/a | # Different kinds of characters from various languages to minimize the |
---|
867 | n/a | # probability that the whole name is encodable to MBCS (issue #9819) |
---|
868 | n/a | TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" |
---|
869 | n/a | try: |
---|
870 | n/a | TESTFN_UNENCODABLE.encode(TESTFN_ENCODING) |
---|
871 | n/a | except UnicodeEncodeError: |
---|
872 | n/a | pass |
---|
873 | n/a | else: |
---|
874 | n/a | print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' |
---|
875 | n/a | 'Unicode filename tests may not be effective' |
---|
876 | n/a | % (TESTFN_UNENCODABLE, TESTFN_ENCODING)) |
---|
877 | n/a | TESTFN_UNENCODABLE = None |
---|
878 | n/a | # Mac OS X denies unencodable filenames (invalid utf-8) |
---|
879 | n/a | elif sys.platform != 'darwin': |
---|
880 | n/a | try: |
---|
881 | n/a | # ascii and utf-8 cannot encode the byte 0xff |
---|
882 | n/a | b'\xff'.decode(TESTFN_ENCODING) |
---|
883 | n/a | except UnicodeDecodeError: |
---|
884 | n/a | # 0xff will be encoded using the surrogate character u+DCFF |
---|
885 | n/a | TESTFN_UNENCODABLE = TESTFN \ |
---|
886 | n/a | + b'-\xff'.decode(TESTFN_ENCODING, 'surrogateescape') |
---|
887 | n/a | else: |
---|
888 | n/a | # File system encoding (eg. ISO-8859-* encodings) can encode |
---|
889 | n/a | # the byte 0xff. Skip some unicode filename tests. |
---|
890 | n/a | pass |
---|
891 | n/a | |
---|
892 | n/a | # TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be |
---|
893 | n/a | # decoded from the filesystem encoding (in strict mode). It can be None if we |
---|
894 | n/a | # cannot generate such filename (ex: the latin1 encoding can decode any byte |
---|
895 | n/a | # sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks |
---|
896 | n/a | # to the surrogateescape error handler (PEP 383), but not from the filesystem |
---|
897 | n/a | # encoding in strict mode. |
---|
898 | n/a | TESTFN_UNDECODABLE = None |
---|
899 | n/a | for name in ( |
---|
900 | n/a | # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows |
---|
901 | n/a | # accepts it to create a file or a directory, or don't accept to enter to |
---|
902 | n/a | # such directory (when the bytes name is used). So test b'\xe7' first: it is |
---|
903 | n/a | # not decodable from cp932. |
---|
904 | n/a | b'\xe7w\xf0', |
---|
905 | n/a | # undecodable from ASCII, UTF-8 |
---|
906 | n/a | b'\xff', |
---|
907 | n/a | # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 |
---|
908 | n/a | # and cp857 |
---|
909 | n/a | b'\xae\xd5' |
---|
910 | n/a | # undecodable from UTF-8 (UNIX and Mac OS X) |
---|
911 | n/a | b'\xed\xb2\x80', b'\xed\xb4\x80', |
---|
912 | n/a | # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, |
---|
913 | n/a | # cp1253, cp1254, cp1255, cp1257, cp1258 |
---|
914 | n/a | b'\x81\x98', |
---|
915 | n/a | ): |
---|
916 | n/a | try: |
---|
917 | n/a | name.decode(TESTFN_ENCODING) |
---|
918 | n/a | except UnicodeDecodeError: |
---|
919 | n/a | TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name |
---|
920 | n/a | break |
---|
921 | n/a | |
---|
922 | n/a | if FS_NONASCII: |
---|
923 | n/a | TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII |
---|
924 | n/a | else: |
---|
925 | n/a | TESTFN_NONASCII = None |
---|
926 | n/a | |
---|
927 | n/a | # Save the initial cwd |
---|
928 | n/a | SAVEDCWD = os.getcwd() |
---|
929 | n/a | |
---|
930 | n/a | # Set by libregrtest/main.py so we can skip tests that are not |
---|
931 | n/a | # useful for PGO |
---|
932 | n/a | PGO = False |
---|
933 | n/a | |
---|
934 | n/a | @contextlib.contextmanager |
---|
935 | n/a | def temp_dir(path=None, quiet=False): |
---|
936 | n/a | """Return a context manager that creates a temporary directory. |
---|
937 | n/a | |
---|
938 | n/a | Arguments: |
---|
939 | n/a | |
---|
940 | n/a | path: the directory to create temporarily. If omitted or None, |
---|
941 | n/a | defaults to creating a temporary directory using tempfile.mkdtemp. |
---|
942 | n/a | |
---|
943 | n/a | quiet: if False (the default), the context manager raises an exception |
---|
944 | n/a | on error. Otherwise, if the path is specified and cannot be |
---|
945 | n/a | created, only a warning is issued. |
---|
946 | n/a | |
---|
947 | n/a | """ |
---|
948 | n/a | dir_created = False |
---|
949 | n/a | if path is None: |
---|
950 | n/a | path = tempfile.mkdtemp() |
---|
951 | n/a | dir_created = True |
---|
952 | n/a | path = os.path.realpath(path) |
---|
953 | n/a | else: |
---|
954 | n/a | try: |
---|
955 | n/a | os.mkdir(path) |
---|
956 | n/a | dir_created = True |
---|
957 | n/a | except OSError as exc: |
---|
958 | n/a | if not quiet: |
---|
959 | n/a | raise |
---|
960 | n/a | warnings.warn(f'tests may fail, unable to create ' |
---|
961 | n/a | f'temporary directory {path!r}: {exc}', |
---|
962 | n/a | RuntimeWarning, stacklevel=3) |
---|
963 | n/a | try: |
---|
964 | n/a | yield path |
---|
965 | n/a | finally: |
---|
966 | n/a | if dir_created: |
---|
967 | n/a | rmtree(path) |
---|
968 | n/a | |
---|
969 | n/a | @contextlib.contextmanager |
---|
970 | n/a | def change_cwd(path, quiet=False): |
---|
971 | n/a | """Return a context manager that changes the current working directory. |
---|
972 | n/a | |
---|
973 | n/a | Arguments: |
---|
974 | n/a | |
---|
975 | n/a | path: the directory to use as the temporary current working directory. |
---|
976 | n/a | |
---|
977 | n/a | quiet: if False (the default), the context manager raises an exception |
---|
978 | n/a | on error. Otherwise, it issues only a warning and keeps the current |
---|
979 | n/a | working directory the same. |
---|
980 | n/a | |
---|
981 | n/a | """ |
---|
982 | n/a | saved_dir = os.getcwd() |
---|
983 | n/a | try: |
---|
984 | n/a | os.chdir(path) |
---|
985 | n/a | except OSError as exc: |
---|
986 | n/a | if not quiet: |
---|
987 | n/a | raise |
---|
988 | n/a | warnings.warn(f'tests may fail, unable to change the current working ' |
---|
989 | n/a | f'directory to {path!r}: {exc}', |
---|
990 | n/a | RuntimeWarning, stacklevel=3) |
---|
991 | n/a | try: |
---|
992 | n/a | yield os.getcwd() |
---|
993 | n/a | finally: |
---|
994 | n/a | os.chdir(saved_dir) |
---|
995 | n/a | |
---|
996 | n/a | |
---|
997 | n/a | @contextlib.contextmanager |
---|
998 | n/a | def temp_cwd(name='tempcwd', quiet=False): |
---|
999 | n/a | """ |
---|
1000 | n/a | Context manager that temporarily creates and changes the CWD. |
---|
1001 | n/a | |
---|
1002 | n/a | The function temporarily changes the current working directory |
---|
1003 | n/a | after creating a temporary directory in the current directory with |
---|
1004 | n/a | name *name*. If *name* is None, the temporary directory is |
---|
1005 | n/a | created using tempfile.mkdtemp. |
---|
1006 | n/a | |
---|
1007 | n/a | If *quiet* is False (default) and it is not possible to |
---|
1008 | n/a | create or change the CWD, an error is raised. If *quiet* is True, |
---|
1009 | n/a | only a warning is raised and the original CWD is used. |
---|
1010 | n/a | |
---|
1011 | n/a | """ |
---|
1012 | n/a | with temp_dir(path=name, quiet=quiet) as temp_path: |
---|
1013 | n/a | with change_cwd(temp_path, quiet=quiet) as cwd_dir: |
---|
1014 | n/a | yield cwd_dir |
---|
1015 | n/a | |
---|
1016 | n/a | if hasattr(os, "umask"): |
---|
1017 | n/a | @contextlib.contextmanager |
---|
1018 | n/a | def temp_umask(umask): |
---|
1019 | n/a | """Context manager that temporarily sets the process umask.""" |
---|
1020 | n/a | oldmask = os.umask(umask) |
---|
1021 | n/a | try: |
---|
1022 | n/a | yield |
---|
1023 | n/a | finally: |
---|
1024 | n/a | os.umask(oldmask) |
---|
1025 | n/a | |
---|
1026 | n/a | # TEST_HOME_DIR refers to the top level directory of the "test" package |
---|
1027 | n/a | # that contains Python's regression test suite |
---|
1028 | n/a | TEST_SUPPORT_DIR = os.path.dirname(os.path.abspath(__file__)) |
---|
1029 | n/a | TEST_HOME_DIR = os.path.dirname(TEST_SUPPORT_DIR) |
---|
1030 | n/a | |
---|
1031 | n/a | # TEST_DATA_DIR is used as a target download location for remote resources |
---|
1032 | n/a | TEST_DATA_DIR = os.path.join(TEST_HOME_DIR, "data") |
---|
1033 | n/a | |
---|
1034 | n/a | def findfile(filename, subdir=None): |
---|
1035 | n/a | """Try to find a file on sys.path or in the test directory. If it is not |
---|
1036 | n/a | found the argument passed to the function is returned (this does not |
---|
1037 | n/a | necessarily signal failure; could still be the legitimate path). |
---|
1038 | n/a | |
---|
1039 | n/a | Setting *subdir* indicates a relative path to use to find the file |
---|
1040 | n/a | rather than looking directly in the path directories. |
---|
1041 | n/a | """ |
---|
1042 | n/a | if os.path.isabs(filename): |
---|
1043 | n/a | return filename |
---|
1044 | n/a | if subdir is not None: |
---|
1045 | n/a | filename = os.path.join(subdir, filename) |
---|
1046 | n/a | path = [TEST_HOME_DIR] + sys.path |
---|
1047 | n/a | for dn in path: |
---|
1048 | n/a | fn = os.path.join(dn, filename) |
---|
1049 | n/a | if os.path.exists(fn): return fn |
---|
1050 | n/a | return filename |
---|
1051 | n/a | |
---|
1052 | n/a | def create_empty_file(filename): |
---|
1053 | n/a | """Create an empty file. If the file already exists, truncate it.""" |
---|
1054 | n/a | fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) |
---|
1055 | n/a | os.close(fd) |
---|
1056 | n/a | |
---|
1057 | n/a | def sortdict(dict): |
---|
1058 | n/a | "Like repr(dict), but in sorted order." |
---|
1059 | n/a | items = sorted(dict.items()) |
---|
1060 | n/a | reprpairs = ["%r: %r" % pair for pair in items] |
---|
1061 | n/a | withcommas = ", ".join(reprpairs) |
---|
1062 | n/a | return "{%s}" % withcommas |
---|
1063 | n/a | |
---|
1064 | n/a | def make_bad_fd(): |
---|
1065 | n/a | """ |
---|
1066 | n/a | Create an invalid file descriptor by opening and closing a file and return |
---|
1067 | n/a | its fd. |
---|
1068 | n/a | """ |
---|
1069 | n/a | file = open(TESTFN, "wb") |
---|
1070 | n/a | try: |
---|
1071 | n/a | return file.fileno() |
---|
1072 | n/a | finally: |
---|
1073 | n/a | file.close() |
---|
1074 | n/a | unlink(TESTFN) |
---|
1075 | n/a | |
---|
1076 | n/a | def check_syntax_error(testcase, statement, *, lineno=None, offset=None): |
---|
1077 | n/a | with testcase.assertRaises(SyntaxError) as cm: |
---|
1078 | n/a | compile(statement, '<test string>', 'exec') |
---|
1079 | n/a | err = cm.exception |
---|
1080 | n/a | testcase.assertIsNotNone(err.lineno) |
---|
1081 | n/a | if lineno is not None: |
---|
1082 | n/a | testcase.assertEqual(err.lineno, lineno) |
---|
1083 | n/a | testcase.assertIsNotNone(err.offset) |
---|
1084 | n/a | if offset is not None: |
---|
1085 | n/a | testcase.assertEqual(err.offset, offset) |
---|
1086 | n/a | |
---|
1087 | n/a | def open_urlresource(url, *args, **kw): |
---|
1088 | n/a | import urllib.request, urllib.parse |
---|
1089 | n/a | |
---|
1090 | n/a | check = kw.pop('check', None) |
---|
1091 | n/a | |
---|
1092 | n/a | filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL! |
---|
1093 | n/a | |
---|
1094 | n/a | fn = os.path.join(TEST_DATA_DIR, filename) |
---|
1095 | n/a | |
---|
1096 | n/a | def check_valid_file(fn): |
---|
1097 | n/a | f = open(fn, *args, **kw) |
---|
1098 | n/a | if check is None: |
---|
1099 | n/a | return f |
---|
1100 | n/a | elif check(f): |
---|
1101 | n/a | f.seek(0) |
---|
1102 | n/a | return f |
---|
1103 | n/a | f.close() |
---|
1104 | n/a | |
---|
1105 | n/a | if os.path.exists(fn): |
---|
1106 | n/a | f = check_valid_file(fn) |
---|
1107 | n/a | if f is not None: |
---|
1108 | n/a | return f |
---|
1109 | n/a | unlink(fn) |
---|
1110 | n/a | |
---|
1111 | n/a | # Verify the requirement before downloading the file |
---|
1112 | n/a | requires('urlfetch') |
---|
1113 | n/a | |
---|
1114 | n/a | if verbose: |
---|
1115 | n/a | print('\tfetching %s ...' % url, file=get_original_stdout()) |
---|
1116 | n/a | opener = urllib.request.build_opener() |
---|
1117 | n/a | if gzip: |
---|
1118 | n/a | opener.addheaders.append(('Accept-Encoding', 'gzip')) |
---|
1119 | n/a | f = opener.open(url, timeout=15) |
---|
1120 | n/a | if gzip and f.headers.get('Content-Encoding') == 'gzip': |
---|
1121 | n/a | f = gzip.GzipFile(fileobj=f) |
---|
1122 | n/a | try: |
---|
1123 | n/a | with open(fn, "wb") as out: |
---|
1124 | n/a | s = f.read() |
---|
1125 | n/a | while s: |
---|
1126 | n/a | out.write(s) |
---|
1127 | n/a | s = f.read() |
---|
1128 | n/a | finally: |
---|
1129 | n/a | f.close() |
---|
1130 | n/a | |
---|
1131 | n/a | f = check_valid_file(fn) |
---|
1132 | n/a | if f is not None: |
---|
1133 | n/a | return f |
---|
1134 | n/a | raise TestFailed('invalid resource %r' % fn) |
---|
1135 | n/a | |
---|
1136 | n/a | |
---|
1137 | n/a | class WarningsRecorder(object): |
---|
1138 | n/a | """Convenience wrapper for the warnings list returned on |
---|
1139 | n/a | entry to the warnings.catch_warnings() context manager. |
---|
1140 | n/a | """ |
---|
1141 | n/a | def __init__(self, warnings_list): |
---|
1142 | n/a | self._warnings = warnings_list |
---|
1143 | n/a | self._last = 0 |
---|
1144 | n/a | |
---|
1145 | n/a | def __getattr__(self, attr): |
---|
1146 | n/a | if len(self._warnings) > self._last: |
---|
1147 | n/a | return getattr(self._warnings[-1], attr) |
---|
1148 | n/a | elif attr in warnings.WarningMessage._WARNING_DETAILS: |
---|
1149 | n/a | return None |
---|
1150 | n/a | raise AttributeError("%r has no attribute %r" % (self, attr)) |
---|
1151 | n/a | |
---|
1152 | n/a | @property |
---|
1153 | n/a | def warnings(self): |
---|
1154 | n/a | return self._warnings[self._last:] |
---|
1155 | n/a | |
---|
1156 | n/a | def reset(self): |
---|
1157 | n/a | self._last = len(self._warnings) |
---|
1158 | n/a | |
---|
1159 | n/a | |
---|
1160 | n/a | def _filterwarnings(filters, quiet=False): |
---|
1161 | n/a | """Catch the warnings, then check if all the expected |
---|
1162 | n/a | warnings have been raised and re-raise unexpected warnings. |
---|
1163 | n/a | If 'quiet' is True, only re-raise the unexpected warnings. |
---|
1164 | n/a | """ |
---|
1165 | n/a | # Clear the warning registry of the calling module |
---|
1166 | n/a | # in order to re-raise the warnings. |
---|
1167 | n/a | frame = sys._getframe(2) |
---|
1168 | n/a | registry = frame.f_globals.get('__warningregistry__') |
---|
1169 | n/a | if registry: |
---|
1170 | n/a | registry.clear() |
---|
1171 | n/a | with warnings.catch_warnings(record=True) as w: |
---|
1172 | n/a | # Set filter "always" to record all warnings. Because |
---|
1173 | n/a | # test_warnings swap the module, we need to look up in |
---|
1174 | n/a | # the sys.modules dictionary. |
---|
1175 | n/a | sys.modules['warnings'].simplefilter("always") |
---|
1176 | n/a | yield WarningsRecorder(w) |
---|
1177 | n/a | # Filter the recorded warnings |
---|
1178 | n/a | reraise = list(w) |
---|
1179 | n/a | missing = [] |
---|
1180 | n/a | for msg, cat in filters: |
---|
1181 | n/a | seen = False |
---|
1182 | n/a | for w in reraise[:]: |
---|
1183 | n/a | warning = w.message |
---|
1184 | n/a | # Filter out the matching messages |
---|
1185 | n/a | if (re.match(msg, str(warning), re.I) and |
---|
1186 | n/a | issubclass(warning.__class__, cat)): |
---|
1187 | n/a | seen = True |
---|
1188 | n/a | reraise.remove(w) |
---|
1189 | n/a | if not seen and not quiet: |
---|
1190 | n/a | # This filter caught nothing |
---|
1191 | n/a | missing.append((msg, cat.__name__)) |
---|
1192 | n/a | if reraise: |
---|
1193 | n/a | raise AssertionError("unhandled warning %s" % reraise[0]) |
---|
1194 | n/a | if missing: |
---|
1195 | n/a | raise AssertionError("filter (%r, %s) did not catch any warning" % |
---|
1196 | n/a | missing[0]) |
---|
1197 | n/a | |
---|
1198 | n/a | |
---|
1199 | n/a | @contextlib.contextmanager |
---|
1200 | n/a | def check_warnings(*filters, **kwargs): |
---|
1201 | n/a | """Context manager to silence warnings. |
---|
1202 | n/a | |
---|
1203 | n/a | Accept 2-tuples as positional arguments: |
---|
1204 | n/a | ("message regexp", WarningCategory) |
---|
1205 | n/a | |
---|
1206 | n/a | Optional argument: |
---|
1207 | n/a | - if 'quiet' is True, it does not fail if a filter catches nothing |
---|
1208 | n/a | (default True without argument, |
---|
1209 | n/a | default False if some filters are defined) |
---|
1210 | n/a | |
---|
1211 | n/a | Without argument, it defaults to: |
---|
1212 | n/a | check_warnings(("", Warning), quiet=True) |
---|
1213 | n/a | """ |
---|
1214 | n/a | quiet = kwargs.get('quiet') |
---|
1215 | n/a | if not filters: |
---|
1216 | n/a | filters = (("", Warning),) |
---|
1217 | n/a | # Preserve backward compatibility |
---|
1218 | n/a | if quiet is None: |
---|
1219 | n/a | quiet = True |
---|
1220 | n/a | return _filterwarnings(filters, quiet) |
---|
1221 | n/a | |
---|
1222 | n/a | |
---|
1223 | n/a | @contextlib.contextmanager |
---|
1224 | n/a | def check_no_resource_warning(testcase): |
---|
1225 | n/a | """Context manager to check that no ResourceWarning is emitted. |
---|
1226 | n/a | |
---|
1227 | n/a | Usage: |
---|
1228 | n/a | |
---|
1229 | n/a | with check_no_resource_warning(self): |
---|
1230 | n/a | f = open(...) |
---|
1231 | n/a | ... |
---|
1232 | n/a | del f |
---|
1233 | n/a | |
---|
1234 | n/a | You must remove the object which may emit ResourceWarning before |
---|
1235 | n/a | the end of the context manager. |
---|
1236 | n/a | """ |
---|
1237 | n/a | with warnings.catch_warnings(record=True) as warns: |
---|
1238 | n/a | warnings.filterwarnings('always', category=ResourceWarning) |
---|
1239 | n/a | yield |
---|
1240 | n/a | gc_collect() |
---|
1241 | n/a | testcase.assertEqual(warns, []) |
---|
1242 | n/a | |
---|
1243 | n/a | |
---|
1244 | n/a | class CleanImport(object): |
---|
1245 | n/a | """Context manager to force import to return a new module reference. |
---|
1246 | n/a | |
---|
1247 | n/a | This is useful for testing module-level behaviours, such as |
---|
1248 | n/a | the emission of a DeprecationWarning on import. |
---|
1249 | n/a | |
---|
1250 | n/a | Use like this: |
---|
1251 | n/a | |
---|
1252 | n/a | with CleanImport("foo"): |
---|
1253 | n/a | importlib.import_module("foo") # new reference |
---|
1254 | n/a | """ |
---|
1255 | n/a | |
---|
1256 | n/a | def __init__(self, *module_names): |
---|
1257 | n/a | self.original_modules = sys.modules.copy() |
---|
1258 | n/a | for module_name in module_names: |
---|
1259 | n/a | if module_name in sys.modules: |
---|
1260 | n/a | module = sys.modules[module_name] |
---|
1261 | n/a | # It is possible that module_name is just an alias for |
---|
1262 | n/a | # another module (e.g. stub for modules renamed in 3.x). |
---|
1263 | n/a | # In that case, we also need delete the real module to clear |
---|
1264 | n/a | # the import cache. |
---|
1265 | n/a | if module.__name__ != module_name: |
---|
1266 | n/a | del sys.modules[module.__name__] |
---|
1267 | n/a | del sys.modules[module_name] |
---|
1268 | n/a | |
---|
1269 | n/a | def __enter__(self): |
---|
1270 | n/a | return self |
---|
1271 | n/a | |
---|
1272 | n/a | def __exit__(self, *ignore_exc): |
---|
1273 | n/a | sys.modules.update(self.original_modules) |
---|
1274 | n/a | |
---|
1275 | n/a | |
---|
1276 | n/a | class EnvironmentVarGuard(collections.abc.MutableMapping): |
---|
1277 | n/a | |
---|
1278 | n/a | """Class to help protect the environment variable properly. Can be used as |
---|
1279 | n/a | a context manager.""" |
---|
1280 | n/a | |
---|
1281 | n/a | def __init__(self): |
---|
1282 | n/a | self._environ = os.environ |
---|
1283 | n/a | self._changed = {} |
---|
1284 | n/a | |
---|
1285 | n/a | def __getitem__(self, envvar): |
---|
1286 | n/a | return self._environ[envvar] |
---|
1287 | n/a | |
---|
1288 | n/a | def __setitem__(self, envvar, value): |
---|
1289 | n/a | # Remember the initial value on the first access |
---|
1290 | n/a | if envvar not in self._changed: |
---|
1291 | n/a | self._changed[envvar] = self._environ.get(envvar) |
---|
1292 | n/a | self._environ[envvar] = value |
---|
1293 | n/a | |
---|
1294 | n/a | def __delitem__(self, envvar): |
---|
1295 | n/a | # Remember the initial value on the first access |
---|
1296 | n/a | if envvar not in self._changed: |
---|
1297 | n/a | self._changed[envvar] = self._environ.get(envvar) |
---|
1298 | n/a | if envvar in self._environ: |
---|
1299 | n/a | del self._environ[envvar] |
---|
1300 | n/a | |
---|
1301 | n/a | def keys(self): |
---|
1302 | n/a | return self._environ.keys() |
---|
1303 | n/a | |
---|
1304 | n/a | def __iter__(self): |
---|
1305 | n/a | return iter(self._environ) |
---|
1306 | n/a | |
---|
1307 | n/a | def __len__(self): |
---|
1308 | n/a | return len(self._environ) |
---|
1309 | n/a | |
---|
1310 | n/a | def set(self, envvar, value): |
---|
1311 | n/a | self[envvar] = value |
---|
1312 | n/a | |
---|
1313 | n/a | def unset(self, envvar): |
---|
1314 | n/a | del self[envvar] |
---|
1315 | n/a | |
---|
1316 | n/a | def __enter__(self): |
---|
1317 | n/a | return self |
---|
1318 | n/a | |
---|
1319 | n/a | def __exit__(self, *ignore_exc): |
---|
1320 | n/a | for (k, v) in self._changed.items(): |
---|
1321 | n/a | if v is None: |
---|
1322 | n/a | if k in self._environ: |
---|
1323 | n/a | del self._environ[k] |
---|
1324 | n/a | else: |
---|
1325 | n/a | self._environ[k] = v |
---|
1326 | n/a | os.environ = self._environ |
---|
1327 | n/a | |
---|
1328 | n/a | |
---|
1329 | n/a | class DirsOnSysPath(object): |
---|
1330 | n/a | """Context manager to temporarily add directories to sys.path. |
---|
1331 | n/a | |
---|
1332 | n/a | This makes a copy of sys.path, appends any directories given |
---|
1333 | n/a | as positional arguments, then reverts sys.path to the copied |
---|
1334 | n/a | settings when the context ends. |
---|
1335 | n/a | |
---|
1336 | n/a | Note that *all* sys.path modifications in the body of the |
---|
1337 | n/a | context manager, including replacement of the object, |
---|
1338 | n/a | will be reverted at the end of the block. |
---|
1339 | n/a | """ |
---|
1340 | n/a | |
---|
1341 | n/a | def __init__(self, *paths): |
---|
1342 | n/a | self.original_value = sys.path[:] |
---|
1343 | n/a | self.original_object = sys.path |
---|
1344 | n/a | sys.path.extend(paths) |
---|
1345 | n/a | |
---|
1346 | n/a | def __enter__(self): |
---|
1347 | n/a | return self |
---|
1348 | n/a | |
---|
1349 | n/a | def __exit__(self, *ignore_exc): |
---|
1350 | n/a | sys.path = self.original_object |
---|
1351 | n/a | sys.path[:] = self.original_value |
---|
1352 | n/a | |
---|
1353 | n/a | |
---|
1354 | n/a | class TransientResource(object): |
---|
1355 | n/a | |
---|
1356 | n/a | """Raise ResourceDenied if an exception is raised while the context manager |
---|
1357 | n/a | is in effect that matches the specified exception and attributes.""" |
---|
1358 | n/a | |
---|
1359 | n/a | def __init__(self, exc, **kwargs): |
---|
1360 | n/a | self.exc = exc |
---|
1361 | n/a | self.attrs = kwargs |
---|
1362 | n/a | |
---|
1363 | n/a | def __enter__(self): |
---|
1364 | n/a | return self |
---|
1365 | n/a | |
---|
1366 | n/a | def __exit__(self, type_=None, value=None, traceback=None): |
---|
1367 | n/a | """If type_ is a subclass of self.exc and value has attributes matching |
---|
1368 | n/a | self.attrs, raise ResourceDenied. Otherwise let the exception |
---|
1369 | n/a | propagate (if any).""" |
---|
1370 | n/a | if type_ is not None and issubclass(self.exc, type_): |
---|
1371 | n/a | for attr, attr_value in self.attrs.items(): |
---|
1372 | n/a | if not hasattr(value, attr): |
---|
1373 | n/a | break |
---|
1374 | n/a | if getattr(value, attr) != attr_value: |
---|
1375 | n/a | break |
---|
1376 | n/a | else: |
---|
1377 | n/a | raise ResourceDenied("an optional resource is not available") |
---|
1378 | n/a | |
---|
1379 | n/a | # Context managers that raise ResourceDenied when various issues |
---|
1380 | n/a | # with the Internet connection manifest themselves as exceptions. |
---|
1381 | n/a | # XXX deprecate these and use transient_internet() instead |
---|
1382 | n/a | time_out = TransientResource(OSError, errno=errno.ETIMEDOUT) |
---|
1383 | n/a | socket_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) |
---|
1384 | n/a | ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) |
---|
1385 | n/a | |
---|
1386 | n/a | |
---|
1387 | n/a | @contextlib.contextmanager |
---|
1388 | n/a | def transient_internet(resource_name, *, timeout=30.0, errnos=()): |
---|
1389 | n/a | """Return a context manager that raises ResourceDenied when various issues |
---|
1390 | n/a | with the Internet connection manifest themselves as exceptions.""" |
---|
1391 | n/a | default_errnos = [ |
---|
1392 | n/a | ('ECONNREFUSED', 111), |
---|
1393 | n/a | ('ECONNRESET', 104), |
---|
1394 | n/a | ('EHOSTUNREACH', 113), |
---|
1395 | n/a | ('ENETUNREACH', 101), |
---|
1396 | n/a | ('ETIMEDOUT', 110), |
---|
1397 | n/a | ] |
---|
1398 | n/a | default_gai_errnos = [ |
---|
1399 | n/a | ('EAI_AGAIN', -3), |
---|
1400 | n/a | ('EAI_FAIL', -4), |
---|
1401 | n/a | ('EAI_NONAME', -2), |
---|
1402 | n/a | ('EAI_NODATA', -5), |
---|
1403 | n/a | # Encountered when trying to resolve IPv6-only hostnames |
---|
1404 | n/a | ('WSANO_DATA', 11004), |
---|
1405 | n/a | ] |
---|
1406 | n/a | |
---|
1407 | n/a | denied = ResourceDenied("Resource %r is not available" % resource_name) |
---|
1408 | n/a | captured_errnos = errnos |
---|
1409 | n/a | gai_errnos = [] |
---|
1410 | n/a | if not captured_errnos: |
---|
1411 | n/a | captured_errnos = [getattr(errno, name, num) |
---|
1412 | n/a | for (name, num) in default_errnos] |
---|
1413 | n/a | gai_errnos = [getattr(socket, name, num) |
---|
1414 | n/a | for (name, num) in default_gai_errnos] |
---|
1415 | n/a | |
---|
1416 | n/a | def filter_error(err): |
---|
1417 | n/a | n = getattr(err, 'errno', None) |
---|
1418 | n/a | if (isinstance(err, socket.timeout) or |
---|
1419 | n/a | (isinstance(err, socket.gaierror) and n in gai_errnos) or |
---|
1420 | n/a | (isinstance(err, urllib.error.HTTPError) and |
---|
1421 | n/a | 500 <= err.code <= 599) or |
---|
1422 | n/a | (isinstance(err, urllib.error.URLError) and |
---|
1423 | n/a | (("ConnectionRefusedError" in err.reason) or |
---|
1424 | n/a | ("TimeoutError" in err.reason) or |
---|
1425 | n/a | ("EOFError" in err.reason))) or |
---|
1426 | n/a | n in captured_errnos): |
---|
1427 | n/a | if not verbose: |
---|
1428 | n/a | sys.stderr.write(denied.args[0] + "\n") |
---|
1429 | n/a | raise denied from err |
---|
1430 | n/a | |
---|
1431 | n/a | old_timeout = socket.getdefaulttimeout() |
---|
1432 | n/a | try: |
---|
1433 | n/a | if timeout is not None: |
---|
1434 | n/a | socket.setdefaulttimeout(timeout) |
---|
1435 | n/a | yield |
---|
1436 | n/a | except nntplib.NNTPTemporaryError as err: |
---|
1437 | n/a | if verbose: |
---|
1438 | n/a | sys.stderr.write(denied.args[0] + "\n") |
---|
1439 | n/a | raise denied from err |
---|
1440 | n/a | except OSError as err: |
---|
1441 | n/a | # urllib can wrap original socket errors multiple times (!), we must |
---|
1442 | n/a | # unwrap to get at the original error. |
---|
1443 | n/a | while True: |
---|
1444 | n/a | a = err.args |
---|
1445 | n/a | if len(a) >= 1 and isinstance(a[0], OSError): |
---|
1446 | n/a | err = a[0] |
---|
1447 | n/a | # The error can also be wrapped as args[1]: |
---|
1448 | n/a | # except socket.error as msg: |
---|
1449 | n/a | # raise OSError('socket error', msg).with_traceback(sys.exc_info()[2]) |
---|
1450 | n/a | elif len(a) >= 2 and isinstance(a[1], OSError): |
---|
1451 | n/a | err = a[1] |
---|
1452 | n/a | else: |
---|
1453 | n/a | break |
---|
1454 | n/a | filter_error(err) |
---|
1455 | n/a | raise |
---|
1456 | n/a | # XXX should we catch generic exceptions and look for their |
---|
1457 | n/a | # __cause__ or __context__? |
---|
1458 | n/a | finally: |
---|
1459 | n/a | socket.setdefaulttimeout(old_timeout) |
---|
1460 | n/a | |
---|
1461 | n/a | |
---|
1462 | n/a | @contextlib.contextmanager |
---|
1463 | n/a | def captured_output(stream_name): |
---|
1464 | n/a | """Return a context manager used by captured_stdout/stdin/stderr |
---|
1465 | n/a | that temporarily replaces the sys stream *stream_name* with a StringIO.""" |
---|
1466 | n/a | import io |
---|
1467 | n/a | orig_stdout = getattr(sys, stream_name) |
---|
1468 | n/a | setattr(sys, stream_name, io.StringIO()) |
---|
1469 | n/a | try: |
---|
1470 | n/a | yield getattr(sys, stream_name) |
---|
1471 | n/a | finally: |
---|
1472 | n/a | setattr(sys, stream_name, orig_stdout) |
---|
1473 | n/a | |
---|
1474 | n/a | def captured_stdout(): |
---|
1475 | n/a | """Capture the output of sys.stdout: |
---|
1476 | n/a | |
---|
1477 | n/a | with captured_stdout() as stdout: |
---|
1478 | n/a | print("hello") |
---|
1479 | n/a | self.assertEqual(stdout.getvalue(), "hello\\n") |
---|
1480 | n/a | """ |
---|
1481 | n/a | return captured_output("stdout") |
---|
1482 | n/a | |
---|
1483 | n/a | def captured_stderr(): |
---|
1484 | n/a | """Capture the output of sys.stderr: |
---|
1485 | n/a | |
---|
1486 | n/a | with captured_stderr() as stderr: |
---|
1487 | n/a | print("hello", file=sys.stderr) |
---|
1488 | n/a | self.assertEqual(stderr.getvalue(), "hello\\n") |
---|
1489 | n/a | """ |
---|
1490 | n/a | return captured_output("stderr") |
---|
1491 | n/a | |
---|
1492 | n/a | def captured_stdin(): |
---|
1493 | n/a | """Capture the input to sys.stdin: |
---|
1494 | n/a | |
---|
1495 | n/a | with captured_stdin() as stdin: |
---|
1496 | n/a | stdin.write('hello\\n') |
---|
1497 | n/a | stdin.seek(0) |
---|
1498 | n/a | # call test code that consumes from sys.stdin |
---|
1499 | n/a | captured = input() |
---|
1500 | n/a | self.assertEqual(captured, "hello") |
---|
1501 | n/a | """ |
---|
1502 | n/a | return captured_output("stdin") |
---|
1503 | n/a | |
---|
1504 | n/a | |
---|
1505 | n/a | def gc_collect(): |
---|
1506 | n/a | """Force as many objects as possible to be collected. |
---|
1507 | n/a | |
---|
1508 | n/a | In non-CPython implementations of Python, this is needed because timely |
---|
1509 | n/a | deallocation is not guaranteed by the garbage collector. (Even in CPython |
---|
1510 | n/a | this can be the case in case of reference cycles.) This means that __del__ |
---|
1511 | n/a | methods may be called later than expected and weakrefs may remain alive for |
---|
1512 | n/a | longer than expected. This function tries its best to force all garbage |
---|
1513 | n/a | objects to disappear. |
---|
1514 | n/a | """ |
---|
1515 | n/a | gc.collect() |
---|
1516 | n/a | if is_jython: |
---|
1517 | n/a | time.sleep(0.1) |
---|
1518 | n/a | gc.collect() |
---|
1519 | n/a | gc.collect() |
---|
1520 | n/a | |
---|
1521 | n/a | @contextlib.contextmanager |
---|
1522 | n/a | def disable_gc(): |
---|
1523 | n/a | have_gc = gc.isenabled() |
---|
1524 | n/a | gc.disable() |
---|
1525 | n/a | try: |
---|
1526 | n/a | yield |
---|
1527 | n/a | finally: |
---|
1528 | n/a | if have_gc: |
---|
1529 | n/a | gc.enable() |
---|
1530 | n/a | |
---|
1531 | n/a | |
---|
1532 | n/a | def python_is_optimized(): |
---|
1533 | n/a | """Find if Python was built with optimizations.""" |
---|
1534 | n/a | cflags = sysconfig.get_config_var('PY_CFLAGS') or '' |
---|
1535 | n/a | final_opt = "" |
---|
1536 | n/a | for opt in cflags.split(): |
---|
1537 | n/a | if opt.startswith('-O'): |
---|
1538 | n/a | final_opt = opt |
---|
1539 | n/a | return final_opt not in ('', '-O0', '-Og') |
---|
1540 | n/a | |
---|
1541 | n/a | |
---|
1542 | n/a | _header = 'nP' |
---|
1543 | n/a | _align = '0n' |
---|
1544 | n/a | if hasattr(sys, "gettotalrefcount"): |
---|
1545 | n/a | _header = '2P' + _header |
---|
1546 | n/a | _align = '0P' |
---|
1547 | n/a | _vheader = _header + 'n' |
---|
1548 | n/a | |
---|
1549 | n/a | def calcobjsize(fmt): |
---|
1550 | n/a | return struct.calcsize(_header + fmt + _align) |
---|
1551 | n/a | |
---|
1552 | n/a | def calcvobjsize(fmt): |
---|
1553 | n/a | return struct.calcsize(_vheader + fmt + _align) |
---|
1554 | n/a | |
---|
1555 | n/a | |
---|
1556 | n/a | _TPFLAGS_HAVE_GC = 1<<14 |
---|
1557 | n/a | _TPFLAGS_HEAPTYPE = 1<<9 |
---|
1558 | n/a | |
---|
1559 | n/a | def check_sizeof(test, o, size): |
---|
1560 | n/a | import _testcapi |
---|
1561 | n/a | result = sys.getsizeof(o) |
---|
1562 | n/a | # add GC header size |
---|
1563 | n/a | if ((type(o) == type) and (o.__flags__ & _TPFLAGS_HEAPTYPE) or\ |
---|
1564 | n/a | ((type(o) != type) and (type(o).__flags__ & _TPFLAGS_HAVE_GC))): |
---|
1565 | n/a | size += _testcapi.SIZEOF_PYGC_HEAD |
---|
1566 | n/a | msg = 'wrong size for %s: got %d, expected %d' \ |
---|
1567 | n/a | % (type(o), result, size) |
---|
1568 | n/a | test.assertEqual(result, size, msg) |
---|
1569 | n/a | |
---|
1570 | n/a | #======================================================================= |
---|
1571 | n/a | # Decorator for running a function in a different locale, correctly resetting |
---|
1572 | n/a | # it afterwards. |
---|
1573 | n/a | |
---|
1574 | n/a | def run_with_locale(catstr, *locales): |
---|
1575 | n/a | def decorator(func): |
---|
1576 | n/a | def inner(*args, **kwds): |
---|
1577 | n/a | try: |
---|
1578 | n/a | import locale |
---|
1579 | n/a | category = getattr(locale, catstr) |
---|
1580 | n/a | orig_locale = locale.setlocale(category) |
---|
1581 | n/a | except AttributeError: |
---|
1582 | n/a | # if the test author gives us an invalid category string |
---|
1583 | n/a | raise |
---|
1584 | n/a | except: |
---|
1585 | n/a | # cannot retrieve original locale, so do nothing |
---|
1586 | n/a | locale = orig_locale = None |
---|
1587 | n/a | else: |
---|
1588 | n/a | for loc in locales: |
---|
1589 | n/a | try: |
---|
1590 | n/a | locale.setlocale(category, loc) |
---|
1591 | n/a | break |
---|
1592 | n/a | except: |
---|
1593 | n/a | pass |
---|
1594 | n/a | |
---|
1595 | n/a | # now run the function, resetting the locale on exceptions |
---|
1596 | n/a | try: |
---|
1597 | n/a | return func(*args, **kwds) |
---|
1598 | n/a | finally: |
---|
1599 | n/a | if locale and orig_locale: |
---|
1600 | n/a | locale.setlocale(category, orig_locale) |
---|
1601 | n/a | inner.__name__ = func.__name__ |
---|
1602 | n/a | inner.__doc__ = func.__doc__ |
---|
1603 | n/a | return inner |
---|
1604 | n/a | return decorator |
---|
1605 | n/a | |
---|
1606 | n/a | #======================================================================= |
---|
1607 | n/a | # Decorator for running a function in a specific timezone, correctly |
---|
1608 | n/a | # resetting it afterwards. |
---|
1609 | n/a | |
---|
1610 | n/a | def run_with_tz(tz): |
---|
1611 | n/a | def decorator(func): |
---|
1612 | n/a | def inner(*args, **kwds): |
---|
1613 | n/a | try: |
---|
1614 | n/a | tzset = time.tzset |
---|
1615 | n/a | except AttributeError: |
---|
1616 | n/a | raise unittest.SkipTest("tzset required") |
---|
1617 | n/a | if 'TZ' in os.environ: |
---|
1618 | n/a | orig_tz = os.environ['TZ'] |
---|
1619 | n/a | else: |
---|
1620 | n/a | orig_tz = None |
---|
1621 | n/a | os.environ['TZ'] = tz |
---|
1622 | n/a | tzset() |
---|
1623 | n/a | |
---|
1624 | n/a | # now run the function, resetting the tz on exceptions |
---|
1625 | n/a | try: |
---|
1626 | n/a | return func(*args, **kwds) |
---|
1627 | n/a | finally: |
---|
1628 | n/a | if orig_tz is None: |
---|
1629 | n/a | del os.environ['TZ'] |
---|
1630 | n/a | else: |
---|
1631 | n/a | os.environ['TZ'] = orig_tz |
---|
1632 | n/a | time.tzset() |
---|
1633 | n/a | |
---|
1634 | n/a | inner.__name__ = func.__name__ |
---|
1635 | n/a | inner.__doc__ = func.__doc__ |
---|
1636 | n/a | return inner |
---|
1637 | n/a | return decorator |
---|
1638 | n/a | |
---|
1639 | n/a | #======================================================================= |
---|
1640 | n/a | # Big-memory-test support. Separate from 'resources' because memory use |
---|
1641 | n/a | # should be configurable. |
---|
1642 | n/a | |
---|
1643 | n/a | # Some handy shorthands. Note that these are used for byte-limits as well |
---|
1644 | n/a | # as size-limits, in the various bigmem tests |
---|
1645 | n/a | _1M = 1024*1024 |
---|
1646 | n/a | _1G = 1024 * _1M |
---|
1647 | n/a | _2G = 2 * _1G |
---|
1648 | n/a | _4G = 4 * _1G |
---|
1649 | n/a | |
---|
1650 | n/a | MAX_Py_ssize_t = sys.maxsize |
---|
1651 | n/a | |
---|
1652 | n/a | def set_memlimit(limit): |
---|
1653 | n/a | global max_memuse |
---|
1654 | n/a | global real_max_memuse |
---|
1655 | n/a | sizes = { |
---|
1656 | n/a | 'k': 1024, |
---|
1657 | n/a | 'm': _1M, |
---|
1658 | n/a | 'g': _1G, |
---|
1659 | n/a | 't': 1024*_1G, |
---|
1660 | n/a | } |
---|
1661 | n/a | m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit, |
---|
1662 | n/a | re.IGNORECASE | re.VERBOSE) |
---|
1663 | n/a | if m is None: |
---|
1664 | n/a | raise ValueError('Invalid memory limit %r' % (limit,)) |
---|
1665 | n/a | memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()]) |
---|
1666 | n/a | real_max_memuse = memlimit |
---|
1667 | n/a | if memlimit > MAX_Py_ssize_t: |
---|
1668 | n/a | memlimit = MAX_Py_ssize_t |
---|
1669 | n/a | if memlimit < _2G - 1: |
---|
1670 | n/a | raise ValueError('Memory limit %r too low to be useful' % (limit,)) |
---|
1671 | n/a | max_memuse = memlimit |
---|
1672 | n/a | |
---|
1673 | n/a | class _MemoryWatchdog: |
---|
1674 | n/a | """An object which periodically watches the process' memory consumption |
---|
1675 | n/a | and prints it out. |
---|
1676 | n/a | """ |
---|
1677 | n/a | |
---|
1678 | n/a | def __init__(self): |
---|
1679 | n/a | self.procfile = '/proc/{pid}/statm'.format(pid=os.getpid()) |
---|
1680 | n/a | self.started = False |
---|
1681 | n/a | |
---|
1682 | n/a | def start(self): |
---|
1683 | n/a | try: |
---|
1684 | n/a | f = open(self.procfile, 'r') |
---|
1685 | n/a | except OSError as e: |
---|
1686 | n/a | warnings.warn('/proc not available for stats: {}'.format(e), |
---|
1687 | n/a | RuntimeWarning) |
---|
1688 | n/a | sys.stderr.flush() |
---|
1689 | n/a | return |
---|
1690 | n/a | |
---|
1691 | n/a | watchdog_script = findfile("memory_watchdog.py") |
---|
1692 | n/a | self.mem_watchdog = subprocess.Popen([sys.executable, watchdog_script], |
---|
1693 | n/a | stdin=f, stderr=subprocess.DEVNULL) |
---|
1694 | n/a | f.close() |
---|
1695 | n/a | self.started = True |
---|
1696 | n/a | |
---|
1697 | n/a | def stop(self): |
---|
1698 | n/a | if self.started: |
---|
1699 | n/a | self.mem_watchdog.terminate() |
---|
1700 | n/a | self.mem_watchdog.wait() |
---|
1701 | n/a | |
---|
1702 | n/a | |
---|
1703 | n/a | def bigmemtest(size, memuse, dry_run=True): |
---|
1704 | n/a | """Decorator for bigmem tests. |
---|
1705 | n/a | |
---|
1706 | n/a | 'size' is a requested size for the test (in arbitrary, test-interpreted |
---|
1707 | n/a | units.) 'memuse' is the number of bytes per unit for the test, or a good |
---|
1708 | n/a | estimate of it. For example, a test that needs two byte buffers, of 4 GiB |
---|
1709 | n/a | each, could be decorated with @bigmemtest(size=_4G, memuse=2). |
---|
1710 | n/a | |
---|
1711 | n/a | The 'size' argument is normally passed to the decorated test method as an |
---|
1712 | n/a | extra argument. If 'dry_run' is true, the value passed to the test method |
---|
1713 | n/a | may be less than the requested value. If 'dry_run' is false, it means the |
---|
1714 | n/a | test doesn't support dummy runs when -M is not specified. |
---|
1715 | n/a | """ |
---|
1716 | n/a | def decorator(f): |
---|
1717 | n/a | def wrapper(self): |
---|
1718 | n/a | size = wrapper.size |
---|
1719 | n/a | memuse = wrapper.memuse |
---|
1720 | n/a | if not real_max_memuse: |
---|
1721 | n/a | maxsize = 5147 |
---|
1722 | n/a | else: |
---|
1723 | n/a | maxsize = size |
---|
1724 | n/a | |
---|
1725 | n/a | if ((real_max_memuse or not dry_run) |
---|
1726 | n/a | and real_max_memuse < maxsize * memuse): |
---|
1727 | n/a | raise unittest.SkipTest( |
---|
1728 | n/a | "not enough memory: %.1fG minimum needed" |
---|
1729 | n/a | % (size * memuse / (1024 ** 3))) |
---|
1730 | n/a | |
---|
1731 | n/a | if real_max_memuse and verbose: |
---|
1732 | n/a | print() |
---|
1733 | n/a | print(" ... expected peak memory use: {peak:.1f}G" |
---|
1734 | n/a | .format(peak=size * memuse / (1024 ** 3))) |
---|
1735 | n/a | watchdog = _MemoryWatchdog() |
---|
1736 | n/a | watchdog.start() |
---|
1737 | n/a | else: |
---|
1738 | n/a | watchdog = None |
---|
1739 | n/a | |
---|
1740 | n/a | try: |
---|
1741 | n/a | return f(self, maxsize) |
---|
1742 | n/a | finally: |
---|
1743 | n/a | if watchdog: |
---|
1744 | n/a | watchdog.stop() |
---|
1745 | n/a | |
---|
1746 | n/a | wrapper.size = size |
---|
1747 | n/a | wrapper.memuse = memuse |
---|
1748 | n/a | return wrapper |
---|
1749 | n/a | return decorator |
---|
1750 | n/a | |
---|
1751 | n/a | def bigaddrspacetest(f): |
---|
1752 | n/a | """Decorator for tests that fill the address space.""" |
---|
1753 | n/a | def wrapper(self): |
---|
1754 | n/a | if max_memuse < MAX_Py_ssize_t: |
---|
1755 | n/a | if MAX_Py_ssize_t >= 2**63 - 1 and max_memuse >= 2**31: |
---|
1756 | n/a | raise unittest.SkipTest( |
---|
1757 | n/a | "not enough memory: try a 32-bit build instead") |
---|
1758 | n/a | else: |
---|
1759 | n/a | raise unittest.SkipTest( |
---|
1760 | n/a | "not enough memory: %.1fG minimum needed" |
---|
1761 | n/a | % (MAX_Py_ssize_t / (1024 ** 3))) |
---|
1762 | n/a | else: |
---|
1763 | n/a | return f(self) |
---|
1764 | n/a | return wrapper |
---|
1765 | n/a | |
---|
1766 | n/a | #======================================================================= |
---|
1767 | n/a | # unittest integration. |
---|
1768 | n/a | |
---|
1769 | n/a | class BasicTestRunner: |
---|
1770 | n/a | def run(self, test): |
---|
1771 | n/a | result = unittest.TestResult() |
---|
1772 | n/a | test(result) |
---|
1773 | n/a | return result |
---|
1774 | n/a | |
---|
1775 | n/a | def _id(obj): |
---|
1776 | n/a | return obj |
---|
1777 | n/a | |
---|
1778 | n/a | def requires_resource(resource): |
---|
1779 | n/a | if resource == 'gui' and not _is_gui_available(): |
---|
1780 | n/a | return unittest.skip(_is_gui_available.reason) |
---|
1781 | n/a | if is_resource_enabled(resource): |
---|
1782 | n/a | return _id |
---|
1783 | n/a | else: |
---|
1784 | n/a | return unittest.skip("resource {0!r} is not enabled".format(resource)) |
---|
1785 | n/a | |
---|
1786 | n/a | def requires_android_level(level, reason): |
---|
1787 | n/a | if is_android and _ANDROID_API_LEVEL < level: |
---|
1788 | n/a | return unittest.skip('%s at Android API level %d' % |
---|
1789 | n/a | (reason, _ANDROID_API_LEVEL)) |
---|
1790 | n/a | else: |
---|
1791 | n/a | return _id |
---|
1792 | n/a | |
---|
1793 | n/a | def cpython_only(test): |
---|
1794 | n/a | """ |
---|
1795 | n/a | Decorator for tests only applicable on CPython. |
---|
1796 | n/a | """ |
---|
1797 | n/a | return impl_detail(cpython=True)(test) |
---|
1798 | n/a | |
---|
1799 | n/a | def impl_detail(msg=None, **guards): |
---|
1800 | n/a | if check_impl_detail(**guards): |
---|
1801 | n/a | return _id |
---|
1802 | n/a | if msg is None: |
---|
1803 | n/a | guardnames, default = _parse_guards(guards) |
---|
1804 | n/a | if default: |
---|
1805 | n/a | msg = "implementation detail not available on {0}" |
---|
1806 | n/a | else: |
---|
1807 | n/a | msg = "implementation detail specific to {0}" |
---|
1808 | n/a | guardnames = sorted(guardnames.keys()) |
---|
1809 | n/a | msg = msg.format(' or '.join(guardnames)) |
---|
1810 | n/a | return unittest.skip(msg) |
---|
1811 | n/a | |
---|
1812 | n/a | _have_mp_queue = None |
---|
1813 | n/a | def requires_multiprocessing_queue(test): |
---|
1814 | n/a | """Skip decorator for tests that use multiprocessing.Queue.""" |
---|
1815 | n/a | global _have_mp_queue |
---|
1816 | n/a | if _have_mp_queue is None: |
---|
1817 | n/a | import multiprocessing |
---|
1818 | n/a | # Without a functioning shared semaphore implementation attempts to |
---|
1819 | n/a | # instantiate a Queue will result in an ImportError (issue #3770). |
---|
1820 | n/a | try: |
---|
1821 | n/a | multiprocessing.Queue() |
---|
1822 | n/a | _have_mp_queue = True |
---|
1823 | n/a | except ImportError: |
---|
1824 | n/a | _have_mp_queue = False |
---|
1825 | n/a | msg = "requires a functioning shared semaphore implementation" |
---|
1826 | n/a | return test if _have_mp_queue else unittest.skip(msg)(test) |
---|
1827 | n/a | |
---|
1828 | n/a | def _parse_guards(guards): |
---|
1829 | n/a | # Returns a tuple ({platform_name: run_me}, default_value) |
---|
1830 | n/a | if not guards: |
---|
1831 | n/a | return ({'cpython': True}, False) |
---|
1832 | n/a | is_true = list(guards.values())[0] |
---|
1833 | n/a | assert list(guards.values()) == [is_true] * len(guards) # all True or all False |
---|
1834 | n/a | return (guards, not is_true) |
---|
1835 | n/a | |
---|
1836 | n/a | # Use the following check to guard CPython's implementation-specific tests -- |
---|
1837 | n/a | # or to run them only on the implementation(s) guarded by the arguments. |
---|
1838 | n/a | def check_impl_detail(**guards): |
---|
1839 | n/a | """This function returns True or False depending on the host platform. |
---|
1840 | n/a | Examples: |
---|
1841 | n/a | if check_impl_detail(): # only on CPython (default) |
---|
1842 | n/a | if check_impl_detail(jython=True): # only on Jython |
---|
1843 | n/a | if check_impl_detail(cpython=False): # everywhere except on CPython |
---|
1844 | n/a | """ |
---|
1845 | n/a | guards, default = _parse_guards(guards) |
---|
1846 | n/a | return guards.get(platform.python_implementation().lower(), default) |
---|
1847 | n/a | |
---|
1848 | n/a | |
---|
1849 | n/a | def no_tracing(func): |
---|
1850 | n/a | """Decorator to temporarily turn off tracing for the duration of a test.""" |
---|
1851 | n/a | if not hasattr(sys, 'gettrace'): |
---|
1852 | n/a | return func |
---|
1853 | n/a | else: |
---|
1854 | n/a | @functools.wraps(func) |
---|
1855 | n/a | def wrapper(*args, **kwargs): |
---|
1856 | n/a | original_trace = sys.gettrace() |
---|
1857 | n/a | try: |
---|
1858 | n/a | sys.settrace(None) |
---|
1859 | n/a | return func(*args, **kwargs) |
---|
1860 | n/a | finally: |
---|
1861 | n/a | sys.settrace(original_trace) |
---|
1862 | n/a | return wrapper |
---|
1863 | n/a | |
---|
1864 | n/a | |
---|
1865 | n/a | def refcount_test(test): |
---|
1866 | n/a | """Decorator for tests which involve reference counting. |
---|
1867 | n/a | |
---|
1868 | n/a | To start, the decorator does not run the test if is not run by CPython. |
---|
1869 | n/a | After that, any trace function is unset during the test to prevent |
---|
1870 | n/a | unexpected refcounts caused by the trace function. |
---|
1871 | n/a | |
---|
1872 | n/a | """ |
---|
1873 | n/a | return no_tracing(cpython_only(test)) |
---|
1874 | n/a | |
---|
1875 | n/a | |
---|
1876 | n/a | def _filter_suite(suite, pred): |
---|
1877 | n/a | """Recursively filter test cases in a suite based on a predicate.""" |
---|
1878 | n/a | newtests = [] |
---|
1879 | n/a | for test in suite._tests: |
---|
1880 | n/a | if isinstance(test, unittest.TestSuite): |
---|
1881 | n/a | _filter_suite(test, pred) |
---|
1882 | n/a | newtests.append(test) |
---|
1883 | n/a | else: |
---|
1884 | n/a | if pred(test): |
---|
1885 | n/a | newtests.append(test) |
---|
1886 | n/a | suite._tests = newtests |
---|
1887 | n/a | |
---|
1888 | n/a | def _run_suite(suite): |
---|
1889 | n/a | """Run tests from a unittest.TestSuite-derived class.""" |
---|
1890 | n/a | if verbose: |
---|
1891 | n/a | runner = unittest.TextTestRunner(sys.stdout, verbosity=2, |
---|
1892 | n/a | failfast=failfast) |
---|
1893 | n/a | else: |
---|
1894 | n/a | runner = BasicTestRunner() |
---|
1895 | n/a | |
---|
1896 | n/a | result = runner.run(suite) |
---|
1897 | n/a | if not result.wasSuccessful(): |
---|
1898 | n/a | if len(result.errors) == 1 and not result.failures: |
---|
1899 | n/a | err = result.errors[0][1] |
---|
1900 | n/a | elif len(result.failures) == 1 and not result.errors: |
---|
1901 | n/a | err = result.failures[0][1] |
---|
1902 | n/a | else: |
---|
1903 | n/a | err = "multiple errors occurred" |
---|
1904 | n/a | if not verbose: err += "; run in verbose mode for details" |
---|
1905 | n/a | raise TestFailed(err) |
---|
1906 | n/a | |
---|
1907 | n/a | |
---|
1908 | n/a | def run_unittest(*classes): |
---|
1909 | n/a | """Run tests from unittest.TestCase-derived classes.""" |
---|
1910 | n/a | valid_types = (unittest.TestSuite, unittest.TestCase) |
---|
1911 | n/a | suite = unittest.TestSuite() |
---|
1912 | n/a | for cls in classes: |
---|
1913 | n/a | if isinstance(cls, str): |
---|
1914 | n/a | if cls in sys.modules: |
---|
1915 | n/a | suite.addTest(unittest.findTestCases(sys.modules[cls])) |
---|
1916 | n/a | else: |
---|
1917 | n/a | raise ValueError("str arguments must be keys in sys.modules") |
---|
1918 | n/a | elif isinstance(cls, valid_types): |
---|
1919 | n/a | suite.addTest(cls) |
---|
1920 | n/a | else: |
---|
1921 | n/a | suite.addTest(unittest.makeSuite(cls)) |
---|
1922 | n/a | def case_pred(test): |
---|
1923 | n/a | if match_tests is None: |
---|
1924 | n/a | return True |
---|
1925 | n/a | for name in test.id().split("."): |
---|
1926 | n/a | if fnmatch.fnmatchcase(name, match_tests): |
---|
1927 | n/a | return True |
---|
1928 | n/a | return False |
---|
1929 | n/a | _filter_suite(suite, case_pred) |
---|
1930 | n/a | _run_suite(suite) |
---|
1931 | n/a | |
---|
1932 | n/a | #======================================================================= |
---|
1933 | n/a | # Check for the presence of docstrings. |
---|
1934 | n/a | |
---|
1935 | n/a | # Rather than trying to enumerate all the cases where docstrings may be |
---|
1936 | n/a | # disabled, we just check for that directly |
---|
1937 | n/a | |
---|
1938 | n/a | def _check_docstrings(): |
---|
1939 | n/a | """Just used to check if docstrings are enabled""" |
---|
1940 | n/a | |
---|
1941 | n/a | MISSING_C_DOCSTRINGS = (check_impl_detail() and |
---|
1942 | n/a | sys.platform != 'win32' and |
---|
1943 | n/a | not sysconfig.get_config_var('WITH_DOC_STRINGS')) |
---|
1944 | n/a | |
---|
1945 | n/a | HAVE_DOCSTRINGS = (_check_docstrings.__doc__ is not None and |
---|
1946 | n/a | not MISSING_C_DOCSTRINGS) |
---|
1947 | n/a | |
---|
1948 | n/a | requires_docstrings = unittest.skipUnless(HAVE_DOCSTRINGS, |
---|
1949 | n/a | "test requires docstrings") |
---|
1950 | n/a | |
---|
1951 | n/a | |
---|
1952 | n/a | #======================================================================= |
---|
1953 | n/a | # doctest driver. |
---|
1954 | n/a | |
---|
1955 | n/a | def run_doctest(module, verbosity=None, optionflags=0): |
---|
1956 | n/a | """Run doctest on the given module. Return (#failures, #tests). |
---|
1957 | n/a | |
---|
1958 | n/a | If optional argument verbosity is not specified (or is None), pass |
---|
1959 | n/a | support's belief about verbosity on to doctest. Else doctest's |
---|
1960 | n/a | usual behavior is used (it searches sys.argv for -v). |
---|
1961 | n/a | """ |
---|
1962 | n/a | |
---|
1963 | n/a | import doctest |
---|
1964 | n/a | |
---|
1965 | n/a | if verbosity is None: |
---|
1966 | n/a | verbosity = verbose |
---|
1967 | n/a | else: |
---|
1968 | n/a | verbosity = None |
---|
1969 | n/a | |
---|
1970 | n/a | f, t = doctest.testmod(module, verbose=verbosity, optionflags=optionflags) |
---|
1971 | n/a | if f: |
---|
1972 | n/a | raise TestFailed("%d of %d doctests failed" % (f, t)) |
---|
1973 | n/a | if verbose: |
---|
1974 | n/a | print('doctest (%s) ... %d tests with zero failures' % |
---|
1975 | n/a | (module.__name__, t)) |
---|
1976 | n/a | return f, t |
---|
1977 | n/a | |
---|
1978 | n/a | |
---|
1979 | n/a | #======================================================================= |
---|
1980 | n/a | # Support for saving and restoring the imported modules. |
---|
1981 | n/a | |
---|
1982 | n/a | def modules_setup(): |
---|
1983 | n/a | return sys.modules.copy(), |
---|
1984 | n/a | |
---|
1985 | n/a | def modules_cleanup(oldmodules): |
---|
1986 | n/a | # Encoders/decoders are registered permanently within the internal |
---|
1987 | n/a | # codec cache. If we destroy the corresponding modules their |
---|
1988 | n/a | # globals will be set to None which will trip up the cached functions. |
---|
1989 | n/a | encodings = [(k, v) for k, v in sys.modules.items() |
---|
1990 | n/a | if k.startswith('encodings.')] |
---|
1991 | n/a | sys.modules.clear() |
---|
1992 | n/a | sys.modules.update(encodings) |
---|
1993 | n/a | # XXX: This kind of problem can affect more than just encodings. In particular |
---|
1994 | n/a | # extension modules (such as _ssl) don't cope with reloading properly. |
---|
1995 | n/a | # Really, test modules should be cleaning out the test specific modules they |
---|
1996 | n/a | # know they added (ala test_runpy) rather than relying on this function (as |
---|
1997 | n/a | # test_importhooks and test_pkg do currently). |
---|
1998 | n/a | # Implicitly imported *real* modules should be left alone (see issue 10556). |
---|
1999 | n/a | sys.modules.update(oldmodules) |
---|
2000 | n/a | |
---|
2001 | n/a | #======================================================================= |
---|
2002 | n/a | # Threading support to prevent reporting refleaks when running regrtest.py -R |
---|
2003 | n/a | |
---|
2004 | n/a | # NOTE: we use thread._count() rather than threading.enumerate() (or the |
---|
2005 | n/a | # moral equivalent thereof) because a threading.Thread object is still alive |
---|
2006 | n/a | # until its __bootstrap() method has returned, even after it has been |
---|
2007 | n/a | # unregistered from the threading module. |
---|
2008 | n/a | # thread._count(), on the other hand, only gets decremented *after* the |
---|
2009 | n/a | # __bootstrap() method has returned, which gives us reliable reference counts |
---|
2010 | n/a | # at the end of a test run. |
---|
2011 | n/a | |
---|
2012 | n/a | def threading_setup(): |
---|
2013 | n/a | if _thread: |
---|
2014 | n/a | return _thread._count(), threading._dangling.copy() |
---|
2015 | n/a | else: |
---|
2016 | n/a | return 1, () |
---|
2017 | n/a | |
---|
2018 | n/a | def threading_cleanup(*original_values): |
---|
2019 | n/a | if not _thread: |
---|
2020 | n/a | return |
---|
2021 | n/a | _MAX_COUNT = 100 |
---|
2022 | n/a | for count in range(_MAX_COUNT): |
---|
2023 | n/a | values = _thread._count(), threading._dangling |
---|
2024 | n/a | if values == original_values: |
---|
2025 | n/a | break |
---|
2026 | n/a | time.sleep(0.01) |
---|
2027 | n/a | gc_collect() |
---|
2028 | n/a | # XXX print a warning in case of failure? |
---|
2029 | n/a | |
---|
2030 | n/a | def reap_threads(func): |
---|
2031 | n/a | """Use this function when threads are being used. This will |
---|
2032 | n/a | ensure that the threads are cleaned up even when the test fails. |
---|
2033 | n/a | If threading is unavailable this function does nothing. |
---|
2034 | n/a | """ |
---|
2035 | n/a | if not _thread: |
---|
2036 | n/a | return func |
---|
2037 | n/a | |
---|
2038 | n/a | @functools.wraps(func) |
---|
2039 | n/a | def decorator(*args): |
---|
2040 | n/a | key = threading_setup() |
---|
2041 | n/a | try: |
---|
2042 | n/a | return func(*args) |
---|
2043 | n/a | finally: |
---|
2044 | n/a | threading_cleanup(*key) |
---|
2045 | n/a | return decorator |
---|
2046 | n/a | |
---|
2047 | n/a | def reap_children(): |
---|
2048 | n/a | """Use this function at the end of test_main() whenever sub-processes |
---|
2049 | n/a | are started. This will help ensure that no extra children (zombies) |
---|
2050 | n/a | stick around to hog resources and create problems when looking |
---|
2051 | n/a | for refleaks. |
---|
2052 | n/a | """ |
---|
2053 | n/a | |
---|
2054 | n/a | # Reap all our dead child processes so we don't leave zombies around. |
---|
2055 | n/a | # These hog resources and might be causing some of the buildbots to die. |
---|
2056 | n/a | if hasattr(os, 'waitpid'): |
---|
2057 | n/a | any_process = -1 |
---|
2058 | n/a | while True: |
---|
2059 | n/a | try: |
---|
2060 | n/a | # This will raise an exception on Windows. That's ok. |
---|
2061 | n/a | pid, status = os.waitpid(any_process, os.WNOHANG) |
---|
2062 | n/a | if pid == 0: |
---|
2063 | n/a | break |
---|
2064 | n/a | except: |
---|
2065 | n/a | break |
---|
2066 | n/a | |
---|
2067 | n/a | @contextlib.contextmanager |
---|
2068 | n/a | def start_threads(threads, unlock=None): |
---|
2069 | n/a | threads = list(threads) |
---|
2070 | n/a | started = [] |
---|
2071 | n/a | try: |
---|
2072 | n/a | try: |
---|
2073 | n/a | for t in threads: |
---|
2074 | n/a | t.start() |
---|
2075 | n/a | started.append(t) |
---|
2076 | n/a | except: |
---|
2077 | n/a | if verbose: |
---|
2078 | n/a | print("Can't start %d threads, only %d threads started" % |
---|
2079 | n/a | (len(threads), len(started))) |
---|
2080 | n/a | raise |
---|
2081 | n/a | yield |
---|
2082 | n/a | finally: |
---|
2083 | n/a | try: |
---|
2084 | n/a | if unlock: |
---|
2085 | n/a | unlock() |
---|
2086 | n/a | endtime = starttime = time.time() |
---|
2087 | n/a | for timeout in range(1, 16): |
---|
2088 | n/a | endtime += 60 |
---|
2089 | n/a | for t in started: |
---|
2090 | n/a | t.join(max(endtime - time.time(), 0.01)) |
---|
2091 | n/a | started = [t for t in started if t.isAlive()] |
---|
2092 | n/a | if not started: |
---|
2093 | n/a | break |
---|
2094 | n/a | if verbose: |
---|
2095 | n/a | print('Unable to join %d threads during a period of ' |
---|
2096 | n/a | '%d minutes' % (len(started), timeout)) |
---|
2097 | n/a | finally: |
---|
2098 | n/a | started = [t for t in started if t.isAlive()] |
---|
2099 | n/a | if started: |
---|
2100 | n/a | faulthandler.dump_traceback(sys.stdout) |
---|
2101 | n/a | raise AssertionError('Unable to join %d threads' % len(started)) |
---|
2102 | n/a | |
---|
2103 | n/a | @contextlib.contextmanager |
---|
2104 | n/a | def swap_attr(obj, attr, new_val): |
---|
2105 | n/a | """Temporary swap out an attribute with a new object. |
---|
2106 | n/a | |
---|
2107 | n/a | Usage: |
---|
2108 | n/a | with swap_attr(obj, "attr", 5): |
---|
2109 | n/a | ... |
---|
2110 | n/a | |
---|
2111 | n/a | This will set obj.attr to 5 for the duration of the with: block, |
---|
2112 | n/a | restoring the old value at the end of the block. If `attr` doesn't |
---|
2113 | n/a | exist on `obj`, it will be created and then deleted at the end of the |
---|
2114 | n/a | block. |
---|
2115 | n/a | """ |
---|
2116 | n/a | if hasattr(obj, attr): |
---|
2117 | n/a | real_val = getattr(obj, attr) |
---|
2118 | n/a | setattr(obj, attr, new_val) |
---|
2119 | n/a | try: |
---|
2120 | n/a | yield |
---|
2121 | n/a | finally: |
---|
2122 | n/a | setattr(obj, attr, real_val) |
---|
2123 | n/a | else: |
---|
2124 | n/a | setattr(obj, attr, new_val) |
---|
2125 | n/a | try: |
---|
2126 | n/a | yield |
---|
2127 | n/a | finally: |
---|
2128 | n/a | delattr(obj, attr) |
---|
2129 | n/a | |
---|
2130 | n/a | @contextlib.contextmanager |
---|
2131 | n/a | def swap_item(obj, item, new_val): |
---|
2132 | n/a | """Temporary swap out an item with a new object. |
---|
2133 | n/a | |
---|
2134 | n/a | Usage: |
---|
2135 | n/a | with swap_item(obj, "item", 5): |
---|
2136 | n/a | ... |
---|
2137 | n/a | |
---|
2138 | n/a | This will set obj["item"] to 5 for the duration of the with: block, |
---|
2139 | n/a | restoring the old value at the end of the block. If `item` doesn't |
---|
2140 | n/a | exist on `obj`, it will be created and then deleted at the end of the |
---|
2141 | n/a | block. |
---|
2142 | n/a | """ |
---|
2143 | n/a | if item in obj: |
---|
2144 | n/a | real_val = obj[item] |
---|
2145 | n/a | obj[item] = new_val |
---|
2146 | n/a | try: |
---|
2147 | n/a | yield |
---|
2148 | n/a | finally: |
---|
2149 | n/a | obj[item] = real_val |
---|
2150 | n/a | else: |
---|
2151 | n/a | obj[item] = new_val |
---|
2152 | n/a | try: |
---|
2153 | n/a | yield |
---|
2154 | n/a | finally: |
---|
2155 | n/a | del obj[item] |
---|
2156 | n/a | |
---|
2157 | n/a | def strip_python_stderr(stderr): |
---|
2158 | n/a | """Strip the stderr of a Python process from potential debug output |
---|
2159 | n/a | emitted by the interpreter. |
---|
2160 | n/a | |
---|
2161 | n/a | This will typically be run on the result of the communicate() method |
---|
2162 | n/a | of a subprocess.Popen object. |
---|
2163 | n/a | """ |
---|
2164 | n/a | stderr = re.sub(br"\[\d+ refs, \d+ blocks\]\r?\n?", b"", stderr).strip() |
---|
2165 | n/a | return stderr |
---|
2166 | n/a | |
---|
2167 | n/a | requires_type_collecting = unittest.skipIf(hasattr(sys, 'getcounts'), |
---|
2168 | n/a | 'types are immortal if COUNT_ALLOCS is defined') |
---|
2169 | n/a | |
---|
2170 | n/a | def args_from_interpreter_flags(): |
---|
2171 | n/a | """Return a list of command-line arguments reproducing the current |
---|
2172 | n/a | settings in sys.flags and sys.warnoptions.""" |
---|
2173 | n/a | return subprocess._args_from_interpreter_flags() |
---|
2174 | n/a | |
---|
2175 | n/a | def optim_args_from_interpreter_flags(): |
---|
2176 | n/a | """Return a list of command-line arguments reproducing the current |
---|
2177 | n/a | optimization settings in sys.flags.""" |
---|
2178 | n/a | return subprocess._optim_args_from_interpreter_flags() |
---|
2179 | n/a | |
---|
2180 | n/a | #============================================================ |
---|
2181 | n/a | # Support for assertions about logging. |
---|
2182 | n/a | #============================================================ |
---|
2183 | n/a | |
---|
2184 | n/a | class TestHandler(logging.handlers.BufferingHandler): |
---|
2185 | n/a | def __init__(self, matcher): |
---|
2186 | n/a | # BufferingHandler takes a "capacity" argument |
---|
2187 | n/a | # so as to know when to flush. As we're overriding |
---|
2188 | n/a | # shouldFlush anyway, we can set a capacity of zero. |
---|
2189 | n/a | # You can call flush() manually to clear out the |
---|
2190 | n/a | # buffer. |
---|
2191 | n/a | logging.handlers.BufferingHandler.__init__(self, 0) |
---|
2192 | n/a | self.matcher = matcher |
---|
2193 | n/a | |
---|
2194 | n/a | def shouldFlush(self): |
---|
2195 | n/a | return False |
---|
2196 | n/a | |
---|
2197 | n/a | def emit(self, record): |
---|
2198 | n/a | self.format(record) |
---|
2199 | n/a | self.buffer.append(record.__dict__) |
---|
2200 | n/a | |
---|
2201 | n/a | def matches(self, **kwargs): |
---|
2202 | n/a | """ |
---|
2203 | n/a | Look for a saved dict whose keys/values match the supplied arguments. |
---|
2204 | n/a | """ |
---|
2205 | n/a | result = False |
---|
2206 | n/a | for d in self.buffer: |
---|
2207 | n/a | if self.matcher.matches(d, **kwargs): |
---|
2208 | n/a | result = True |
---|
2209 | n/a | break |
---|
2210 | n/a | return result |
---|
2211 | n/a | |
---|
2212 | n/a | class Matcher(object): |
---|
2213 | n/a | |
---|
2214 | n/a | _partial_matches = ('msg', 'message') |
---|
2215 | n/a | |
---|
2216 | n/a | def matches(self, d, **kwargs): |
---|
2217 | n/a | """ |
---|
2218 | n/a | Try to match a single dict with the supplied arguments. |
---|
2219 | n/a | |
---|
2220 | n/a | Keys whose values are strings and which are in self._partial_matches |
---|
2221 | n/a | will be checked for partial (i.e. substring) matches. You can extend |
---|
2222 | n/a | this scheme to (for example) do regular expression matching, etc. |
---|
2223 | n/a | """ |
---|
2224 | n/a | result = True |
---|
2225 | n/a | for k in kwargs: |
---|
2226 | n/a | v = kwargs[k] |
---|
2227 | n/a | dv = d.get(k) |
---|
2228 | n/a | if not self.match_value(k, dv, v): |
---|
2229 | n/a | result = False |
---|
2230 | n/a | break |
---|
2231 | n/a | return result |
---|
2232 | n/a | |
---|
2233 | n/a | def match_value(self, k, dv, v): |
---|
2234 | n/a | """ |
---|
2235 | n/a | Try to match a single stored value (dv) with a supplied value (v). |
---|
2236 | n/a | """ |
---|
2237 | n/a | if type(v) != type(dv): |
---|
2238 | n/a | result = False |
---|
2239 | n/a | elif type(dv) is not str or k not in self._partial_matches: |
---|
2240 | n/a | result = (v == dv) |
---|
2241 | n/a | else: |
---|
2242 | n/a | result = dv.find(v) >= 0 |
---|
2243 | n/a | return result |
---|
2244 | n/a | |
---|
2245 | n/a | |
---|
2246 | n/a | _can_symlink = None |
---|
2247 | n/a | def can_symlink(): |
---|
2248 | n/a | global _can_symlink |
---|
2249 | n/a | if _can_symlink is not None: |
---|
2250 | n/a | return _can_symlink |
---|
2251 | n/a | symlink_path = TESTFN + "can_symlink" |
---|
2252 | n/a | try: |
---|
2253 | n/a | os.symlink(TESTFN, symlink_path) |
---|
2254 | n/a | can = True |
---|
2255 | n/a | except (OSError, NotImplementedError, AttributeError): |
---|
2256 | n/a | can = False |
---|
2257 | n/a | else: |
---|
2258 | n/a | os.remove(symlink_path) |
---|
2259 | n/a | _can_symlink = can |
---|
2260 | n/a | return can |
---|
2261 | n/a | |
---|
2262 | n/a | def skip_unless_symlink(test): |
---|
2263 | n/a | """Skip decorator for tests that require functional symlink""" |
---|
2264 | n/a | ok = can_symlink() |
---|
2265 | n/a | msg = "Requires functional symlink implementation" |
---|
2266 | n/a | return test if ok else unittest.skip(msg)(test) |
---|
2267 | n/a | |
---|
2268 | n/a | _can_xattr = None |
---|
2269 | n/a | def can_xattr(): |
---|
2270 | n/a | global _can_xattr |
---|
2271 | n/a | if _can_xattr is not None: |
---|
2272 | n/a | return _can_xattr |
---|
2273 | n/a | if not hasattr(os, "setxattr"): |
---|
2274 | n/a | can = False |
---|
2275 | n/a | else: |
---|
2276 | n/a | tmp_fp, tmp_name = tempfile.mkstemp() |
---|
2277 | n/a | try: |
---|
2278 | n/a | with open(TESTFN, "wb") as fp: |
---|
2279 | n/a | try: |
---|
2280 | n/a | # TESTFN & tempfile may use different file systems with |
---|
2281 | n/a | # different capabilities |
---|
2282 | n/a | os.setxattr(tmp_fp, b"user.test", b"") |
---|
2283 | n/a | os.setxattr(fp.fileno(), b"user.test", b"") |
---|
2284 | n/a | # Kernels < 2.6.39 don't respect setxattr flags. |
---|
2285 | n/a | kernel_version = platform.release() |
---|
2286 | n/a | m = re.match(r"2.6.(\d{1,2})", kernel_version) |
---|
2287 | n/a | can = m is None or int(m.group(1)) >= 39 |
---|
2288 | n/a | except OSError: |
---|
2289 | n/a | can = False |
---|
2290 | n/a | finally: |
---|
2291 | n/a | unlink(TESTFN) |
---|
2292 | n/a | unlink(tmp_name) |
---|
2293 | n/a | _can_xattr = can |
---|
2294 | n/a | return can |
---|
2295 | n/a | |
---|
2296 | n/a | def skip_unless_xattr(test): |
---|
2297 | n/a | """Skip decorator for tests that require functional extended attributes""" |
---|
2298 | n/a | ok = can_xattr() |
---|
2299 | n/a | msg = "no non-broken extended attribute support" |
---|
2300 | n/a | return test if ok else unittest.skip(msg)(test) |
---|
2301 | n/a | |
---|
2302 | n/a | |
---|
2303 | n/a | def fs_is_case_insensitive(directory): |
---|
2304 | n/a | """Detects if the file system for the specified directory is case-insensitive.""" |
---|
2305 | n/a | with tempfile.NamedTemporaryFile(dir=directory) as base: |
---|
2306 | n/a | base_path = base.name |
---|
2307 | n/a | case_path = base_path.upper() |
---|
2308 | n/a | if case_path == base_path: |
---|
2309 | n/a | case_path = base_path.lower() |
---|
2310 | n/a | try: |
---|
2311 | n/a | return os.path.samefile(base_path, case_path) |
---|
2312 | n/a | except FileNotFoundError: |
---|
2313 | n/a | return False |
---|
2314 | n/a | |
---|
2315 | n/a | |
---|
2316 | n/a | def detect_api_mismatch(ref_api, other_api, *, ignore=()): |
---|
2317 | n/a | """Returns the set of items in ref_api not in other_api, except for a |
---|
2318 | n/a | defined list of items to be ignored in this check. |
---|
2319 | n/a | |
---|
2320 | n/a | By default this skips private attributes beginning with '_' but |
---|
2321 | n/a | includes all magic methods, i.e. those starting and ending in '__'. |
---|
2322 | n/a | """ |
---|
2323 | n/a | missing_items = set(dir(ref_api)) - set(dir(other_api)) |
---|
2324 | n/a | if ignore: |
---|
2325 | n/a | missing_items -= set(ignore) |
---|
2326 | n/a | missing_items = set(m for m in missing_items |
---|
2327 | n/a | if not m.startswith('_') or m.endswith('__')) |
---|
2328 | n/a | return missing_items |
---|
2329 | n/a | |
---|
2330 | n/a | |
---|
2331 | n/a | def check__all__(test_case, module, name_of_module=None, extra=(), |
---|
2332 | n/a | blacklist=()): |
---|
2333 | n/a | """Assert that the __all__ variable of 'module' contains all public names. |
---|
2334 | n/a | |
---|
2335 | n/a | The module's public names (its API) are detected automatically based on |
---|
2336 | n/a | whether they match the public name convention and were defined in |
---|
2337 | n/a | 'module'. |
---|
2338 | n/a | |
---|
2339 | n/a | The 'name_of_module' argument can specify (as a string or tuple thereof) |
---|
2340 | n/a | what module(s) an API could be defined in in order to be detected as a |
---|
2341 | n/a | public API. One case for this is when 'module' imports part of its public |
---|
2342 | n/a | API from other modules, possibly a C backend (like 'csv' and its '_csv'). |
---|
2343 | n/a | |
---|
2344 | n/a | The 'extra' argument can be a set of names that wouldn't otherwise be |
---|
2345 | n/a | automatically detected as "public", like objects without a proper |
---|
2346 | n/a | '__module__' attriubute. If provided, it will be added to the |
---|
2347 | n/a | automatically detected ones. |
---|
2348 | n/a | |
---|
2349 | n/a | The 'blacklist' argument can be a set of names that must not be treated |
---|
2350 | n/a | as part of the public API even though their names indicate otherwise. |
---|
2351 | n/a | |
---|
2352 | n/a | Usage: |
---|
2353 | n/a | import bar |
---|
2354 | n/a | import foo |
---|
2355 | n/a | import unittest |
---|
2356 | n/a | from test import support |
---|
2357 | n/a | |
---|
2358 | n/a | class MiscTestCase(unittest.TestCase): |
---|
2359 | n/a | def test__all__(self): |
---|
2360 | n/a | support.check__all__(self, foo) |
---|
2361 | n/a | |
---|
2362 | n/a | class OtherTestCase(unittest.TestCase): |
---|
2363 | n/a | def test__all__(self): |
---|
2364 | n/a | extra = {'BAR_CONST', 'FOO_CONST'} |
---|
2365 | n/a | blacklist = {'baz'} # Undocumented name. |
---|
2366 | n/a | # bar imports part of its API from _bar. |
---|
2367 | n/a | support.check__all__(self, bar, ('bar', '_bar'), |
---|
2368 | n/a | extra=extra, blacklist=blacklist) |
---|
2369 | n/a | |
---|
2370 | n/a | """ |
---|
2371 | n/a | |
---|
2372 | n/a | if name_of_module is None: |
---|
2373 | n/a | name_of_module = (module.__name__, ) |
---|
2374 | n/a | elif isinstance(name_of_module, str): |
---|
2375 | n/a | name_of_module = (name_of_module, ) |
---|
2376 | n/a | |
---|
2377 | n/a | expected = set(extra) |
---|
2378 | n/a | |
---|
2379 | n/a | for name in dir(module): |
---|
2380 | n/a | if name.startswith('_') or name in blacklist: |
---|
2381 | n/a | continue |
---|
2382 | n/a | obj = getattr(module, name) |
---|
2383 | n/a | if (getattr(obj, '__module__', None) in name_of_module or |
---|
2384 | n/a | (not hasattr(obj, '__module__') and |
---|
2385 | n/a | not isinstance(obj, types.ModuleType))): |
---|
2386 | n/a | expected.add(name) |
---|
2387 | n/a | test_case.assertCountEqual(module.__all__, expected) |
---|
2388 | n/a | |
---|
2389 | n/a | |
---|
2390 | n/a | class SuppressCrashReport: |
---|
2391 | n/a | """Try to prevent a crash report from popping up. |
---|
2392 | n/a | |
---|
2393 | n/a | On Windows, don't display the Windows Error Reporting dialog. On UNIX, |
---|
2394 | n/a | disable the creation of coredump file. |
---|
2395 | n/a | """ |
---|
2396 | n/a | old_value = None |
---|
2397 | n/a | old_modes = None |
---|
2398 | n/a | |
---|
2399 | n/a | def __enter__(self): |
---|
2400 | n/a | """On Windows, disable Windows Error Reporting dialogs using |
---|
2401 | n/a | SetErrorMode. |
---|
2402 | n/a | |
---|
2403 | n/a | On UNIX, try to save the previous core file size limit, then set |
---|
2404 | n/a | soft limit to 0. |
---|
2405 | n/a | """ |
---|
2406 | n/a | if sys.platform.startswith('win'): |
---|
2407 | n/a | # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx |
---|
2408 | n/a | # GetErrorMode is not available on Windows XP and Windows Server 2003, |
---|
2409 | n/a | # but SetErrorMode returns the previous value, so we can use that |
---|
2410 | n/a | import ctypes |
---|
2411 | n/a | self._k32 = ctypes.windll.kernel32 |
---|
2412 | n/a | SEM_NOGPFAULTERRORBOX = 0x02 |
---|
2413 | n/a | self.old_value = self._k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) |
---|
2414 | n/a | self._k32.SetErrorMode(self.old_value | SEM_NOGPFAULTERRORBOX) |
---|
2415 | n/a | |
---|
2416 | n/a | # Suppress assert dialogs in debug builds |
---|
2417 | n/a | # (see http://bugs.python.org/issue23314) |
---|
2418 | n/a | try: |
---|
2419 | n/a | import msvcrt |
---|
2420 | n/a | msvcrt.CrtSetReportMode |
---|
2421 | n/a | except (AttributeError, ImportError): |
---|
2422 | n/a | # no msvcrt or a release build |
---|
2423 | n/a | pass |
---|
2424 | n/a | else: |
---|
2425 | n/a | self.old_modes = {} |
---|
2426 | n/a | for report_type in [msvcrt.CRT_WARN, |
---|
2427 | n/a | msvcrt.CRT_ERROR, |
---|
2428 | n/a | msvcrt.CRT_ASSERT]: |
---|
2429 | n/a | old_mode = msvcrt.CrtSetReportMode(report_type, |
---|
2430 | n/a | msvcrt.CRTDBG_MODE_FILE) |
---|
2431 | n/a | old_file = msvcrt.CrtSetReportFile(report_type, |
---|
2432 | n/a | msvcrt.CRTDBG_FILE_STDERR) |
---|
2433 | n/a | self.old_modes[report_type] = old_mode, old_file |
---|
2434 | n/a | |
---|
2435 | n/a | else: |
---|
2436 | n/a | if resource is not None: |
---|
2437 | n/a | try: |
---|
2438 | n/a | self.old_value = resource.getrlimit(resource.RLIMIT_CORE) |
---|
2439 | n/a | resource.setrlimit(resource.RLIMIT_CORE, |
---|
2440 | n/a | (0, self.old_value[1])) |
---|
2441 | n/a | except (ValueError, OSError): |
---|
2442 | n/a | pass |
---|
2443 | n/a | if sys.platform == 'darwin': |
---|
2444 | n/a | # Check if the 'Crash Reporter' on OSX was configured |
---|
2445 | n/a | # in 'Developer' mode and warn that it will get triggered |
---|
2446 | n/a | # when it is. |
---|
2447 | n/a | # |
---|
2448 | n/a | # This assumes that this context manager is used in tests |
---|
2449 | n/a | # that might trigger the next manager. |
---|
2450 | n/a | value = subprocess.Popen(['/usr/bin/defaults', 'read', |
---|
2451 | n/a | 'com.apple.CrashReporter', 'DialogType'], |
---|
2452 | n/a | stdout=subprocess.PIPE).communicate()[0] |
---|
2453 | n/a | if value.strip() == b'developer': |
---|
2454 | n/a | print("this test triggers the Crash Reporter, " |
---|
2455 | n/a | "that is intentional", end='', flush=True) |
---|
2456 | n/a | |
---|
2457 | n/a | return self |
---|
2458 | n/a | |
---|
2459 | n/a | def __exit__(self, *ignore_exc): |
---|
2460 | n/a | """Restore Windows ErrorMode or core file behavior to initial value.""" |
---|
2461 | n/a | if self.old_value is None: |
---|
2462 | n/a | return |
---|
2463 | n/a | |
---|
2464 | n/a | if sys.platform.startswith('win'): |
---|
2465 | n/a | self._k32.SetErrorMode(self.old_value) |
---|
2466 | n/a | |
---|
2467 | n/a | if self.old_modes: |
---|
2468 | n/a | import msvcrt |
---|
2469 | n/a | for report_type, (old_mode, old_file) in self.old_modes.items(): |
---|
2470 | n/a | msvcrt.CrtSetReportMode(report_type, old_mode) |
---|
2471 | n/a | msvcrt.CrtSetReportFile(report_type, old_file) |
---|
2472 | n/a | else: |
---|
2473 | n/a | if resource is not None: |
---|
2474 | n/a | try: |
---|
2475 | n/a | resource.setrlimit(resource.RLIMIT_CORE, self.old_value) |
---|
2476 | n/a | except (ValueError, OSError): |
---|
2477 | n/a | pass |
---|
2478 | n/a | |
---|
2479 | n/a | |
---|
2480 | n/a | def patch(test_instance, object_to_patch, attr_name, new_value): |
---|
2481 | n/a | """Override 'object_to_patch'.'attr_name' with 'new_value'. |
---|
2482 | n/a | |
---|
2483 | n/a | Also, add a cleanup procedure to 'test_instance' to restore |
---|
2484 | n/a | 'object_to_patch' value for 'attr_name'. |
---|
2485 | n/a | The 'attr_name' should be a valid attribute for 'object_to_patch'. |
---|
2486 | n/a | |
---|
2487 | n/a | """ |
---|
2488 | n/a | # check that 'attr_name' is a real attribute for 'object_to_patch' |
---|
2489 | n/a | # will raise AttributeError if it does not exist |
---|
2490 | n/a | getattr(object_to_patch, attr_name) |
---|
2491 | n/a | |
---|
2492 | n/a | # keep a copy of the old value |
---|
2493 | n/a | attr_is_local = False |
---|
2494 | n/a | try: |
---|
2495 | n/a | old_value = object_to_patch.__dict__[attr_name] |
---|
2496 | n/a | except (AttributeError, KeyError): |
---|
2497 | n/a | old_value = getattr(object_to_patch, attr_name, None) |
---|
2498 | n/a | else: |
---|
2499 | n/a | attr_is_local = True |
---|
2500 | n/a | |
---|
2501 | n/a | # restore the value when the test is done |
---|
2502 | n/a | def cleanup(): |
---|
2503 | n/a | if attr_is_local: |
---|
2504 | n/a | setattr(object_to_patch, attr_name, old_value) |
---|
2505 | n/a | else: |
---|
2506 | n/a | delattr(object_to_patch, attr_name) |
---|
2507 | n/a | |
---|
2508 | n/a | test_instance.addCleanup(cleanup) |
---|
2509 | n/a | |
---|
2510 | n/a | # actually override the attribute |
---|
2511 | n/a | setattr(object_to_patch, attr_name, new_value) |
---|
2512 | n/a | |
---|
2513 | n/a | |
---|
2514 | n/a | def run_in_subinterp(code): |
---|
2515 | n/a | """ |
---|
2516 | n/a | Run code in a subinterpreter. Raise unittest.SkipTest if the tracemalloc |
---|
2517 | n/a | module is enabled. |
---|
2518 | n/a | """ |
---|
2519 | n/a | # Issue #10915, #15751: PyGILState_*() functions don't work with |
---|
2520 | n/a | # sub-interpreters, the tracemalloc module uses these functions internally |
---|
2521 | n/a | try: |
---|
2522 | n/a | import tracemalloc |
---|
2523 | n/a | except ImportError: |
---|
2524 | n/a | pass |
---|
2525 | n/a | else: |
---|
2526 | n/a | if tracemalloc.is_tracing(): |
---|
2527 | n/a | raise unittest.SkipTest("run_in_subinterp() cannot be used " |
---|
2528 | n/a | "if tracemalloc module is tracing " |
---|
2529 | n/a | "memory allocations") |
---|
2530 | n/a | import _testcapi |
---|
2531 | n/a | return _testcapi.run_in_subinterp(code) |
---|
2532 | n/a | |
---|
2533 | n/a | |
---|
2534 | n/a | def check_free_after_iterating(test, iter, cls, args=()): |
---|
2535 | n/a | class A(cls): |
---|
2536 | n/a | def __del__(self): |
---|
2537 | n/a | nonlocal done |
---|
2538 | n/a | done = True |
---|
2539 | n/a | try: |
---|
2540 | n/a | next(it) |
---|
2541 | n/a | except StopIteration: |
---|
2542 | n/a | pass |
---|
2543 | n/a | |
---|
2544 | n/a | done = False |
---|
2545 | n/a | it = iter(A(*args)) |
---|
2546 | n/a | # Issue 26494: Shouldn't crash |
---|
2547 | n/a | test.assertRaises(StopIteration, next, it) |
---|
2548 | n/a | # The sequence should be deallocated just after the end of iterating |
---|
2549 | n/a | gc_collect() |
---|
2550 | n/a | test.assertTrue(done) |
---|
2551 | n/a | |
---|
2552 | n/a | |
---|
2553 | n/a | def missing_compiler_executable(cmd_names=[]): |
---|
2554 | n/a | """Check if the compiler components used to build the interpreter exist. |
---|
2555 | n/a | |
---|
2556 | n/a | Check for the existence of the compiler executables whose names are listed |
---|
2557 | n/a | in 'cmd_names' or all the compiler executables when 'cmd_names' is empty |
---|
2558 | n/a | and return the first missing executable or None when none is found |
---|
2559 | n/a | missing. |
---|
2560 | n/a | |
---|
2561 | n/a | """ |
---|
2562 | n/a | from distutils import ccompiler, sysconfig, spawn |
---|
2563 | n/a | compiler = ccompiler.new_compiler() |
---|
2564 | n/a | sysconfig.customize_compiler(compiler) |
---|
2565 | n/a | for name in compiler.executables: |
---|
2566 | n/a | if cmd_names and name not in cmd_names: |
---|
2567 | n/a | continue |
---|
2568 | n/a | cmd = getattr(compiler, name) |
---|
2569 | n/a | if cmd_names: |
---|
2570 | n/a | assert cmd is not None, \ |
---|
2571 | n/a | "the '%s' executable is not configured" % name |
---|
2572 | n/a | elif cmd is None: |
---|
2573 | n/a | continue |
---|
2574 | n/a | if spawn.find_executable(cmd[0]) is None: |
---|
2575 | n/a | return cmd[0] |
---|
2576 | n/a | |
---|
2577 | n/a | |
---|
2578 | n/a | _is_android_emulator = None |
---|
2579 | n/a | def setswitchinterval(interval): |
---|
2580 | n/a | # Setting a very low gil interval on the Android emulator causes python |
---|
2581 | n/a | # to hang (issue #26939). |
---|
2582 | n/a | minimum_interval = 1e-5 |
---|
2583 | n/a | if is_android and interval < minimum_interval: |
---|
2584 | n/a | global _is_android_emulator |
---|
2585 | n/a | if _is_android_emulator is None: |
---|
2586 | n/a | _is_android_emulator = (subprocess.check_output( |
---|
2587 | n/a | ['getprop', 'ro.kernel.qemu']).strip() == b'1') |
---|
2588 | n/a | if _is_android_emulator: |
---|
2589 | n/a | interval = minimum_interval |
---|
2590 | n/a | return sys.setswitchinterval(interval) |
---|