| 1 | n/a | # This is a variant of the very old (early 90's) file |
|---|
| 2 | n/a | # Demo/threads/bug.py. It simply provokes a number of threads into |
|---|
| 3 | n/a | # trying to import the same module "at the same time". |
|---|
| 4 | n/a | # There are no pleasant failure modes -- most likely is that Python |
|---|
| 5 | n/a | # complains several times about module random having no attribute |
|---|
| 6 | n/a | # randrange, and then Python hangs. |
|---|
| 7 | n/a | |
|---|
| 8 | n/a | import _imp as imp |
|---|
| 9 | n/a | import os |
|---|
| 10 | n/a | import importlib |
|---|
| 11 | n/a | import sys |
|---|
| 12 | n/a | import time |
|---|
| 13 | n/a | import shutil |
|---|
| 14 | n/a | import unittest |
|---|
| 15 | n/a | from test.support import ( |
|---|
| 16 | n/a | verbose, import_module, run_unittest, TESTFN, reap_threads, |
|---|
| 17 | n/a | forget, unlink, rmtree, start_threads) |
|---|
| 18 | n/a | threading = import_module('threading') |
|---|
| 19 | n/a | |
|---|
| 20 | n/a | def task(N, done, done_tasks, errors): |
|---|
| 21 | n/a | try: |
|---|
| 22 | n/a | # We don't use modulefinder but still import it in order to stress |
|---|
| 23 | n/a | # importing of different modules from several threads. |
|---|
| 24 | n/a | if len(done_tasks) % 2: |
|---|
| 25 | n/a | import modulefinder |
|---|
| 26 | n/a | import random |
|---|
| 27 | n/a | else: |
|---|
| 28 | n/a | import random |
|---|
| 29 | n/a | import modulefinder |
|---|
| 30 | n/a | # This will fail if random is not completely initialized |
|---|
| 31 | n/a | x = random.randrange(1, 3) |
|---|
| 32 | n/a | except Exception as e: |
|---|
| 33 | n/a | errors.append(e.with_traceback(None)) |
|---|
| 34 | n/a | finally: |
|---|
| 35 | n/a | done_tasks.append(threading.get_ident()) |
|---|
| 36 | n/a | finished = len(done_tasks) == N |
|---|
| 37 | n/a | if finished: |
|---|
| 38 | n/a | done.set() |
|---|
| 39 | n/a | |
|---|
| 40 | n/a | # Create a circular import structure: A -> C -> B -> D -> A |
|---|
| 41 | n/a | # NOTE: `time` is already loaded and therefore doesn't threaten to deadlock. |
|---|
| 42 | n/a | |
|---|
| 43 | n/a | circular_imports_modules = { |
|---|
| 44 | n/a | 'A': """if 1: |
|---|
| 45 | n/a | import time |
|---|
| 46 | n/a | time.sleep(%(delay)s) |
|---|
| 47 | n/a | x = 'a' |
|---|
| 48 | n/a | import C |
|---|
| 49 | n/a | """, |
|---|
| 50 | n/a | 'B': """if 1: |
|---|
| 51 | n/a | import time |
|---|
| 52 | n/a | time.sleep(%(delay)s) |
|---|
| 53 | n/a | x = 'b' |
|---|
| 54 | n/a | import D |
|---|
| 55 | n/a | """, |
|---|
| 56 | n/a | 'C': """import B""", |
|---|
| 57 | n/a | 'D': """import A""", |
|---|
| 58 | n/a | } |
|---|
| 59 | n/a | |
|---|
| 60 | n/a | class Finder: |
|---|
| 61 | n/a | """A dummy finder to detect concurrent access to its find_spec() |
|---|
| 62 | n/a | method.""" |
|---|
| 63 | n/a | |
|---|
| 64 | n/a | def __init__(self): |
|---|
| 65 | n/a | self.numcalls = 0 |
|---|
| 66 | n/a | self.x = 0 |
|---|
| 67 | n/a | self.lock = threading.Lock() |
|---|
| 68 | n/a | |
|---|
| 69 | n/a | def find_spec(self, name, path=None, target=None): |
|---|
| 70 | n/a | # Simulate some thread-unsafe behaviour. If calls to find_spec() |
|---|
| 71 | n/a | # are properly serialized, `x` will end up the same as `numcalls`. |
|---|
| 72 | n/a | # Otherwise not. |
|---|
| 73 | n/a | assert imp.lock_held() |
|---|
| 74 | n/a | with self.lock: |
|---|
| 75 | n/a | self.numcalls += 1 |
|---|
| 76 | n/a | x = self.x |
|---|
| 77 | n/a | time.sleep(0.01) |
|---|
| 78 | n/a | self.x = x + 1 |
|---|
| 79 | n/a | |
|---|
| 80 | n/a | class FlushingFinder: |
|---|
| 81 | n/a | """A dummy finder which flushes sys.path_importer_cache when it gets |
|---|
| 82 | n/a | called.""" |
|---|
| 83 | n/a | |
|---|
| 84 | n/a | def find_spec(self, name, path=None, target=None): |
|---|
| 85 | n/a | sys.path_importer_cache.clear() |
|---|
| 86 | n/a | |
|---|
| 87 | n/a | |
|---|
| 88 | n/a | class ThreadedImportTests(unittest.TestCase): |
|---|
| 89 | n/a | |
|---|
| 90 | n/a | def setUp(self): |
|---|
| 91 | n/a | self.old_random = sys.modules.pop('random', None) |
|---|
| 92 | n/a | |
|---|
| 93 | n/a | def tearDown(self): |
|---|
| 94 | n/a | # If the `random` module was already initialized, we restore the |
|---|
| 95 | n/a | # old module at the end so that pickling tests don't fail. |
|---|
| 96 | n/a | # See http://bugs.python.org/issue3657#msg110461 |
|---|
| 97 | n/a | if self.old_random is not None: |
|---|
| 98 | n/a | sys.modules['random'] = self.old_random |
|---|
| 99 | n/a | |
|---|
| 100 | n/a | def check_parallel_module_init(self): |
|---|
| 101 | n/a | if imp.lock_held(): |
|---|
| 102 | n/a | # This triggers on, e.g., from test import autotest. |
|---|
| 103 | n/a | raise unittest.SkipTest("can't run when import lock is held") |
|---|
| 104 | n/a | |
|---|
| 105 | n/a | done = threading.Event() |
|---|
| 106 | n/a | for N in (20, 50) * 3: |
|---|
| 107 | n/a | if verbose: |
|---|
| 108 | n/a | print("Trying", N, "threads ...", end=' ') |
|---|
| 109 | n/a | # Make sure that random and modulefinder get reimported freshly |
|---|
| 110 | n/a | for modname in ['random', 'modulefinder']: |
|---|
| 111 | n/a | try: |
|---|
| 112 | n/a | del sys.modules[modname] |
|---|
| 113 | n/a | except KeyError: |
|---|
| 114 | n/a | pass |
|---|
| 115 | n/a | errors = [] |
|---|
| 116 | n/a | done_tasks = [] |
|---|
| 117 | n/a | done.clear() |
|---|
| 118 | n/a | t0 = time.monotonic() |
|---|
| 119 | n/a | with start_threads(threading.Thread(target=task, |
|---|
| 120 | n/a | args=(N, done, done_tasks, errors,)) |
|---|
| 121 | n/a | for i in range(N)): |
|---|
| 122 | n/a | pass |
|---|
| 123 | n/a | completed = done.wait(10 * 60) |
|---|
| 124 | n/a | dt = time.monotonic() - t0 |
|---|
| 125 | n/a | if verbose: |
|---|
| 126 | n/a | print("%.1f ms" % (dt*1e3), flush=True, end=" ") |
|---|
| 127 | n/a | dbg_info = 'done: %s/%s' % (len(done_tasks), N) |
|---|
| 128 | n/a | self.assertFalse(errors, dbg_info) |
|---|
| 129 | n/a | self.assertTrue(completed, dbg_info) |
|---|
| 130 | n/a | if verbose: |
|---|
| 131 | n/a | print("OK.") |
|---|
| 132 | n/a | |
|---|
| 133 | n/a | def test_parallel_module_init(self): |
|---|
| 134 | n/a | self.check_parallel_module_init() |
|---|
| 135 | n/a | |
|---|
| 136 | n/a | def test_parallel_meta_path(self): |
|---|
| 137 | n/a | finder = Finder() |
|---|
| 138 | n/a | sys.meta_path.insert(0, finder) |
|---|
| 139 | n/a | try: |
|---|
| 140 | n/a | self.check_parallel_module_init() |
|---|
| 141 | n/a | self.assertGreater(finder.numcalls, 0) |
|---|
| 142 | n/a | self.assertEqual(finder.x, finder.numcalls) |
|---|
| 143 | n/a | finally: |
|---|
| 144 | n/a | sys.meta_path.remove(finder) |
|---|
| 145 | n/a | |
|---|
| 146 | n/a | def test_parallel_path_hooks(self): |
|---|
| 147 | n/a | # Here the Finder instance is only used to check concurrent calls |
|---|
| 148 | n/a | # to path_hook(). |
|---|
| 149 | n/a | finder = Finder() |
|---|
| 150 | n/a | # In order for our path hook to be called at each import, we need |
|---|
| 151 | n/a | # to flush the path_importer_cache, which we do by registering a |
|---|
| 152 | n/a | # dedicated meta_path entry. |
|---|
| 153 | n/a | flushing_finder = FlushingFinder() |
|---|
| 154 | n/a | def path_hook(path): |
|---|
| 155 | n/a | finder.find_spec('') |
|---|
| 156 | n/a | raise ImportError |
|---|
| 157 | n/a | sys.path_hooks.insert(0, path_hook) |
|---|
| 158 | n/a | sys.meta_path.append(flushing_finder) |
|---|
| 159 | n/a | try: |
|---|
| 160 | n/a | # Flush the cache a first time |
|---|
| 161 | n/a | flushing_finder.find_spec('') |
|---|
| 162 | n/a | numtests = self.check_parallel_module_init() |
|---|
| 163 | n/a | self.assertGreater(finder.numcalls, 0) |
|---|
| 164 | n/a | self.assertEqual(finder.x, finder.numcalls) |
|---|
| 165 | n/a | finally: |
|---|
| 166 | n/a | sys.meta_path.remove(flushing_finder) |
|---|
| 167 | n/a | sys.path_hooks.remove(path_hook) |
|---|
| 168 | n/a | |
|---|
| 169 | n/a | def test_import_hangers(self): |
|---|
| 170 | n/a | # In case this test is run again, make sure the helper module |
|---|
| 171 | n/a | # gets loaded from scratch again. |
|---|
| 172 | n/a | try: |
|---|
| 173 | n/a | del sys.modules['test.threaded_import_hangers'] |
|---|
| 174 | n/a | except KeyError: |
|---|
| 175 | n/a | pass |
|---|
| 176 | n/a | import test.threaded_import_hangers |
|---|
| 177 | n/a | self.assertFalse(test.threaded_import_hangers.errors) |
|---|
| 178 | n/a | |
|---|
| 179 | n/a | def test_circular_imports(self): |
|---|
| 180 | n/a | # The goal of this test is to exercise implementations of the import |
|---|
| 181 | n/a | # lock which use a per-module lock, rather than a global lock. |
|---|
| 182 | n/a | # In these implementations, there is a possible deadlock with |
|---|
| 183 | n/a | # circular imports, for example: |
|---|
| 184 | n/a | # - thread 1 imports A (grabbing the lock for A) which imports B |
|---|
| 185 | n/a | # - thread 2 imports B (grabbing the lock for B) which imports A |
|---|
| 186 | n/a | # Such implementations should be able to detect such situations and |
|---|
| 187 | n/a | # resolve them one way or the other, without freezing. |
|---|
| 188 | n/a | # NOTE: our test constructs a slightly less trivial import cycle, |
|---|
| 189 | n/a | # in order to better stress the deadlock avoidance mechanism. |
|---|
| 190 | n/a | delay = 0.5 |
|---|
| 191 | n/a | os.mkdir(TESTFN) |
|---|
| 192 | n/a | self.addCleanup(shutil.rmtree, TESTFN) |
|---|
| 193 | n/a | sys.path.insert(0, TESTFN) |
|---|
| 194 | n/a | self.addCleanup(sys.path.remove, TESTFN) |
|---|
| 195 | n/a | for name, contents in circular_imports_modules.items(): |
|---|
| 196 | n/a | contents = contents % {'delay': delay} |
|---|
| 197 | n/a | with open(os.path.join(TESTFN, name + ".py"), "wb") as f: |
|---|
| 198 | n/a | f.write(contents.encode('utf-8')) |
|---|
| 199 | n/a | self.addCleanup(forget, name) |
|---|
| 200 | n/a | |
|---|
| 201 | n/a | importlib.invalidate_caches() |
|---|
| 202 | n/a | results = [] |
|---|
| 203 | n/a | def import_ab(): |
|---|
| 204 | n/a | import A |
|---|
| 205 | n/a | results.append(getattr(A, 'x', None)) |
|---|
| 206 | n/a | def import_ba(): |
|---|
| 207 | n/a | import B |
|---|
| 208 | n/a | results.append(getattr(B, 'x', None)) |
|---|
| 209 | n/a | t1 = threading.Thread(target=import_ab) |
|---|
| 210 | n/a | t2 = threading.Thread(target=import_ba) |
|---|
| 211 | n/a | t1.start() |
|---|
| 212 | n/a | t2.start() |
|---|
| 213 | n/a | t1.join() |
|---|
| 214 | n/a | t2.join() |
|---|
| 215 | n/a | self.assertEqual(set(results), {'a', 'b'}) |
|---|
| 216 | n/a | |
|---|
| 217 | n/a | def test_side_effect_import(self): |
|---|
| 218 | n/a | code = """if 1: |
|---|
| 219 | n/a | import threading |
|---|
| 220 | n/a | def target(): |
|---|
| 221 | n/a | import random |
|---|
| 222 | n/a | t = threading.Thread(target=target) |
|---|
| 223 | n/a | t.start() |
|---|
| 224 | n/a | t.join()""" |
|---|
| 225 | n/a | sys.path.insert(0, os.curdir) |
|---|
| 226 | n/a | self.addCleanup(sys.path.remove, os.curdir) |
|---|
| 227 | n/a | filename = TESTFN + ".py" |
|---|
| 228 | n/a | with open(filename, "wb") as f: |
|---|
| 229 | n/a | f.write(code.encode('utf-8')) |
|---|
| 230 | n/a | self.addCleanup(unlink, filename) |
|---|
| 231 | n/a | self.addCleanup(forget, TESTFN) |
|---|
| 232 | n/a | self.addCleanup(rmtree, '__pycache__') |
|---|
| 233 | n/a | importlib.invalidate_caches() |
|---|
| 234 | n/a | __import__(TESTFN) |
|---|
| 235 | n/a | |
|---|
| 236 | n/a | |
|---|
| 237 | n/a | @reap_threads |
|---|
| 238 | n/a | def test_main(): |
|---|
| 239 | n/a | old_switchinterval = None |
|---|
| 240 | n/a | try: |
|---|
| 241 | n/a | old_switchinterval = sys.getswitchinterval() |
|---|
| 242 | n/a | sys.setswitchinterval(1e-5) |
|---|
| 243 | n/a | except AttributeError: |
|---|
| 244 | n/a | pass |
|---|
| 245 | n/a | try: |
|---|
| 246 | n/a | run_unittest(ThreadedImportTests) |
|---|
| 247 | n/a | finally: |
|---|
| 248 | n/a | if old_switchinterval is not None: |
|---|
| 249 | n/a | sys.setswitchinterval(old_switchinterval) |
|---|
| 250 | n/a | |
|---|
| 251 | n/a | if __name__ == "__main__": |
|---|
| 252 | n/a | test_main() |
|---|