# Python code coverage for Python/peephole.c

# | count | content |
---|---|---|

1 | n/a | /* Peephole optimizations for bytecode compiler. */ |

2 | n/a | |

3 | n/a | #include "Python.h" |

4 | n/a | |

5 | n/a | #include "Python-ast.h" |

6 | n/a | #include "node.h" |

7 | n/a | #include "ast.h" |

8 | n/a | #include "code.h" |

9 | n/a | #include "symtable.h" |

10 | n/a | #include "opcode.h" |

11 | n/a | #include "wordcode_helpers.h" |

12 | n/a | |

13 | n/a | #define UNCONDITIONAL_JUMP(op) (op==JUMP_ABSOLUTE || op==JUMP_FORWARD) |

14 | n/a | #define CONDITIONAL_JUMP(op) (op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \ |

15 | n/a | || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP) |

16 | n/a | #define ABSOLUTE_JUMP(op) (op==JUMP_ABSOLUTE || op==CONTINUE_LOOP \ |

17 | n/a | || op==POP_JUMP_IF_FALSE || op==POP_JUMP_IF_TRUE \ |

18 | n/a | || op==JUMP_IF_FALSE_OR_POP || op==JUMP_IF_TRUE_OR_POP) |

19 | n/a | #define JUMPS_ON_TRUE(op) (op==POP_JUMP_IF_TRUE || op==JUMP_IF_TRUE_OR_POP) |

20 | n/a | #define GETJUMPTGT(arr, i) (get_arg(arr, i) / sizeof(_Py_CODEUNIT) + \ |

21 | n/a | (ABSOLUTE_JUMP(_Py_OPCODE(arr[i])) ? 0 : i+1)) |

22 | n/a | #define ISBASICBLOCK(blocks, start, end) \ |

23 | n/a | (blocks[start]==blocks[end]) |

24 | n/a | |

25 | n/a | |

26 | n/a | #define CONST_STACK_CREATE() { \ |

27 | n/a | const_stack_size = 256; \ |

28 | n/a | const_stack = PyMem_New(PyObject *, const_stack_size); \ |

29 | n/a | if (!const_stack) { \ |

30 | n/a | PyErr_NoMemory(); \ |

31 | n/a | goto exitError; \ |

32 | n/a | } \ |

33 | n/a | } |

34 | n/a | |

35 | n/a | #define CONST_STACK_DELETE() do { \ |

36 | n/a | if (const_stack) \ |

37 | n/a | PyMem_Free(const_stack); \ |

38 | n/a | } while(0) |

39 | n/a | |

40 | n/a | #define CONST_STACK_LEN() ((unsigned)(const_stack_top + 1)) |

41 | n/a | |

42 | n/a | #define CONST_STACK_PUSH_OP(i) do { \ |

43 | n/a | PyObject *_x; \ |

44 | n/a | assert(_Py_OPCODE(codestr[i]) == LOAD_CONST); \ |

45 | n/a | assert(PyList_GET_SIZE(consts) > (Py_ssize_t)get_arg(codestr, i)); \ |

46 | n/a | _x = PyList_GET_ITEM(consts, get_arg(codestr, i)); \ |

47 | n/a | if (++const_stack_top >= const_stack_size) { \ |

48 | n/a | const_stack_size *= 2; \ |

49 | n/a | PyMem_Resize(const_stack, PyObject *, const_stack_size); \ |

50 | n/a | if (!const_stack) { \ |

51 | n/a | PyErr_NoMemory(); \ |

52 | n/a | goto exitError; \ |

53 | n/a | } \ |

54 | n/a | } \ |

55 | n/a | const_stack[const_stack_top] = _x; \ |

56 | n/a | in_consts = 1; \ |

57 | n/a | } while(0) |

58 | n/a | |

59 | n/a | #define CONST_STACK_RESET() do { \ |

60 | n/a | const_stack_top = -1; \ |

61 | n/a | } while(0) |

62 | n/a | |

63 | n/a | #define CONST_STACK_LASTN(i) \ |

64 | n/a | &const_stack[CONST_STACK_LEN() - i] |

65 | n/a | |

66 | n/a | #define CONST_STACK_POP(i) do { \ |

67 | n/a | assert(CONST_STACK_LEN() >= i); \ |

68 | n/a | const_stack_top -= i; \ |

69 | n/a | } while(0) |

70 | n/a | |

71 | n/a | /* Scans back N consecutive LOAD_CONST instructions, skipping NOPs, |

72 | n/a | returns index of the Nth last's LOAD_CONST's EXTENDED_ARG prefix. |

73 | n/a | Callers are responsible to check CONST_STACK_LEN beforehand. |

74 | n/a | */ |

75 | n/a | static Py_ssize_t |

76 | n/a | lastn_const_start(const _Py_CODEUNIT *codestr, Py_ssize_t i, Py_ssize_t n) |

77 | n/a | { |

78 | n/a | assert(n > 0); |

79 | n/a | for (;;) { |

80 | n/a | i--; |

81 | n/a | assert(i >= 0); |

82 | n/a | if (_Py_OPCODE(codestr[i]) == LOAD_CONST) { |

83 | n/a | if (!--n) { |

84 | n/a | while (i > 0 && _Py_OPCODE(codestr[i-1]) == EXTENDED_ARG) { |

85 | n/a | i--; |

86 | n/a | } |

87 | n/a | return i; |

88 | n/a | } |

89 | n/a | } |

90 | n/a | else { |

91 | n/a | assert(_Py_OPCODE(codestr[i]) == NOP || |

92 | n/a | _Py_OPCODE(codestr[i]) == EXTENDED_ARG); |

93 | n/a | } |

94 | n/a | } |

95 | n/a | } |

96 | n/a | |

97 | n/a | /* Scans through EXTENDED ARGs, seeking the index of the effective opcode */ |

98 | n/a | static Py_ssize_t |

99 | n/a | find_op(const _Py_CODEUNIT *codestr, Py_ssize_t i) |

100 | n/a | { |

101 | n/a | while (_Py_OPCODE(codestr[i]) == EXTENDED_ARG) { |

102 | n/a | i++; |

103 | n/a | } |

104 | n/a | return i; |

105 | n/a | } |

106 | n/a | |

107 | n/a | /* Given the index of the effective opcode, |

108 | n/a | scan back to construct the oparg with EXTENDED_ARG */ |

109 | n/a | static unsigned int |

110 | n/a | get_arg(const _Py_CODEUNIT *codestr, Py_ssize_t i) |

111 | n/a | { |

112 | n/a | _Py_CODEUNIT word; |

113 | n/a | unsigned int oparg = _Py_OPARG(codestr[i]); |

114 | n/a | if (i >= 1 && _Py_OPCODE(word = codestr[i-1]) == EXTENDED_ARG) { |

115 | n/a | oparg |= _Py_OPARG(word) << 8; |

116 | n/a | if (i >= 2 && _Py_OPCODE(word = codestr[i-2]) == EXTENDED_ARG) { |

117 | n/a | oparg |= _Py_OPARG(word) << 16; |

118 | n/a | if (i >= 3 && _Py_OPCODE(word = codestr[i-3]) == EXTENDED_ARG) { |

119 | n/a | oparg |= _Py_OPARG(word) << 24; |

120 | n/a | } |

121 | n/a | } |

122 | n/a | } |

123 | n/a | return oparg; |

124 | n/a | } |

125 | n/a | |

126 | n/a | /* Fill the region with NOPs. */ |

127 | n/a | static void |

128 | n/a | fill_nops(_Py_CODEUNIT *codestr, Py_ssize_t start, Py_ssize_t end) |

129 | n/a | { |

130 | n/a | memset(codestr + start, NOP, (end - start) * sizeof(_Py_CODEUNIT)); |

131 | n/a | } |

132 | n/a | |

133 | n/a | /* Given the index of the effective opcode, |

134 | n/a | attempt to replace the argument, taking into account EXTENDED_ARG. |

135 | n/a | Returns -1 on failure, or the new op index on success */ |

136 | n/a | static Py_ssize_t |

137 | n/a | set_arg(_Py_CODEUNIT *codestr, Py_ssize_t i, unsigned int oparg) |

138 | n/a | { |

139 | n/a | unsigned int curarg = get_arg(codestr, i); |

140 | n/a | int curilen, newilen; |

141 | n/a | if (curarg == oparg) |

142 | n/a | return i; |

143 | n/a | curilen = instrsize(curarg); |

144 | n/a | newilen = instrsize(oparg); |

145 | n/a | if (curilen < newilen) { |

146 | n/a | return -1; |

147 | n/a | } |

148 | n/a | |

149 | n/a | write_op_arg(codestr + i + 1 - curilen, _Py_OPCODE(codestr[i]), oparg, newilen); |

150 | n/a | fill_nops(codestr, i + 1 - curilen + newilen, i + 1); |

151 | n/a | return i-curilen+newilen; |

152 | n/a | } |

153 | n/a | |

154 | n/a | /* Attempt to write op/arg at end of specified region of memory. |

155 | n/a | Preceding memory in the region is overwritten with NOPs. |

156 | n/a | Returns -1 on failure, op index on success */ |

157 | n/a | static Py_ssize_t |

158 | n/a | copy_op_arg(_Py_CODEUNIT *codestr, Py_ssize_t i, unsigned char op, |

159 | n/a | unsigned int oparg, Py_ssize_t maxi) |

160 | n/a | { |

161 | n/a | int ilen = instrsize(oparg); |

162 | n/a | if (i + ilen > maxi) { |

163 | n/a | return -1; |

164 | n/a | } |

165 | n/a | write_op_arg(codestr + maxi - ilen, op, oparg, ilen); |

166 | n/a | fill_nops(codestr, i, maxi - ilen); |

167 | n/a | return maxi - 1; |

168 | n/a | } |

169 | n/a | |

170 | n/a | /* Replace LOAD_CONST c1, LOAD_CONST c2 ... LOAD_CONST cn, BUILD_TUPLE n |

171 | n/a | with LOAD_CONST (c1, c2, ... cn). |

172 | n/a | The consts table must still be in list form so that the |

173 | n/a | new constant (c1, c2, ... cn) can be appended. |

174 | n/a | Called with codestr pointing to the first LOAD_CONST. |

175 | n/a | Bails out with no change if one or more of the LOAD_CONSTs is missing. |

176 | n/a | Also works for BUILD_LIST and BUILT_SET when followed by an "in" or "not in" |

177 | n/a | test; for BUILD_SET it assembles a frozenset rather than a tuple. |

178 | n/a | */ |

179 | n/a | static Py_ssize_t |

180 | n/a | fold_tuple_on_constants(_Py_CODEUNIT *codestr, Py_ssize_t c_start, |

181 | n/a | Py_ssize_t opcode_end, unsigned char opcode, |

182 | n/a | PyObject *consts, PyObject **objs, int n) |

183 | n/a | { |

184 | n/a | PyObject *newconst, *constant; |

185 | n/a | Py_ssize_t i, len_consts; |

186 | n/a | |

187 | n/a | /* Pre-conditions */ |

188 | n/a | assert(PyList_CheckExact(consts)); |

189 | n/a | |

190 | n/a | /* Buildup new tuple of constants */ |

191 | n/a | newconst = PyTuple_New(n); |

192 | n/a | if (newconst == NULL) { |

193 | n/a | return -1; |

194 | n/a | } |

195 | n/a | for (i=0 ; i<n ; i++) { |

196 | n/a | constant = objs[i]; |

197 | n/a | Py_INCREF(constant); |

198 | n/a | PyTuple_SET_ITEM(newconst, i, constant); |

199 | n/a | } |

200 | n/a | |

201 | n/a | /* If it's a BUILD_SET, use the PyTuple we just built to create a |

202 | n/a | PyFrozenSet, and use that as the constant instead: */ |

203 | n/a | if (opcode == BUILD_SET) { |

204 | n/a | Py_SETREF(newconst, PyFrozenSet_New(newconst)); |

205 | n/a | if (newconst == NULL) { |

206 | n/a | return -1; |

207 | n/a | } |

208 | n/a | } |

209 | n/a | |

210 | n/a | /* Append folded constant onto consts */ |

211 | n/a | len_consts = PyList_GET_SIZE(consts); |

212 | n/a | if (PyList_Append(consts, newconst)) { |

213 | n/a | Py_DECREF(newconst); |

214 | n/a | return -1; |

215 | n/a | } |

216 | n/a | Py_DECREF(newconst); |

217 | n/a | |

218 | n/a | return copy_op_arg(codestr, c_start, LOAD_CONST, len_consts, opcode_end); |

219 | n/a | } |

220 | n/a | |

221 | n/a | /* Replace LOAD_CONST c1, LOAD_CONST c2, BINOP |

222 | n/a | with LOAD_CONST binop(c1,c2) |

223 | n/a | The consts table must still be in list form so that the |

224 | n/a | new constant can be appended. |

225 | n/a | Called with codestr pointing to the BINOP. |

226 | n/a | Abandons the transformation if the folding fails (i.e. 1+'a'). |

227 | n/a | If the new constant is a sequence, only folds when the size |

228 | n/a | is below a threshold value. That keeps pyc files from |

229 | n/a | becoming large in the presence of code like: (None,)*1000. |

230 | n/a | */ |

231 | n/a | static Py_ssize_t |

232 | n/a | fold_binops_on_constants(_Py_CODEUNIT *codestr, Py_ssize_t c_start, |

233 | n/a | Py_ssize_t opcode_end, unsigned char opcode, |

234 | n/a | PyObject *consts, PyObject **objs) |

235 | n/a | { |

236 | n/a | PyObject *newconst, *v, *w; |

237 | n/a | Py_ssize_t len_consts, size; |

238 | n/a | |

239 | n/a | /* Pre-conditions */ |

240 | n/a | assert(PyList_CheckExact(consts)); |

241 | n/a | len_consts = PyList_GET_SIZE(consts); |

242 | n/a | |

243 | n/a | /* Create new constant */ |

244 | n/a | v = objs[0]; |

245 | n/a | w = objs[1]; |

246 | n/a | switch (opcode) { |

247 | n/a | case BINARY_POWER: |

248 | n/a | newconst = PyNumber_Power(v, w, Py_None); |

249 | n/a | break; |

250 | n/a | case BINARY_MULTIPLY: |

251 | n/a | newconst = PyNumber_Multiply(v, w); |

252 | n/a | break; |

253 | n/a | case BINARY_TRUE_DIVIDE: |

254 | n/a | newconst = PyNumber_TrueDivide(v, w); |

255 | n/a | break; |

256 | n/a | case BINARY_FLOOR_DIVIDE: |

257 | n/a | newconst = PyNumber_FloorDivide(v, w); |

258 | n/a | break; |

259 | n/a | case BINARY_MODULO: |

260 | n/a | newconst = PyNumber_Remainder(v, w); |

261 | n/a | break; |

262 | n/a | case BINARY_ADD: |

263 | n/a | newconst = PyNumber_Add(v, w); |

264 | n/a | break; |

265 | n/a | case BINARY_SUBTRACT: |

266 | n/a | newconst = PyNumber_Subtract(v, w); |

267 | n/a | break; |

268 | n/a | case BINARY_SUBSCR: |

269 | n/a | newconst = PyObject_GetItem(v, w); |

270 | n/a | break; |

271 | n/a | case BINARY_LSHIFT: |

272 | n/a | newconst = PyNumber_Lshift(v, w); |

273 | n/a | break; |

274 | n/a | case BINARY_RSHIFT: |

275 | n/a | newconst = PyNumber_Rshift(v, w); |

276 | n/a | break; |

277 | n/a | case BINARY_AND: |

278 | n/a | newconst = PyNumber_And(v, w); |

279 | n/a | break; |

280 | n/a | case BINARY_XOR: |

281 | n/a | newconst = PyNumber_Xor(v, w); |

282 | n/a | break; |

283 | n/a | case BINARY_OR: |

284 | n/a | newconst = PyNumber_Or(v, w); |

285 | n/a | break; |

286 | n/a | default: |

287 | n/a | /* Called with an unknown opcode */ |

288 | n/a | PyErr_Format(PyExc_SystemError, |

289 | n/a | "unexpected binary operation %d on a constant", |

290 | n/a | opcode); |

291 | n/a | return -1; |

292 | n/a | } |

293 | n/a | if (newconst == NULL) { |

294 | n/a | if(!PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) { |

295 | n/a | PyErr_Clear(); |

296 | n/a | } |

297 | n/a | return -1; |

298 | n/a | } |

299 | n/a | size = PyObject_Size(newconst); |

300 | n/a | if (size == -1) { |

301 | n/a | if (PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) { |

302 | n/a | return -1; |

303 | n/a | } |

304 | n/a | PyErr_Clear(); |

305 | n/a | } else if (size > 20) { |

306 | n/a | Py_DECREF(newconst); |

307 | n/a | return -1; |

308 | n/a | } |

309 | n/a | |

310 | n/a | /* Append folded constant into consts table */ |

311 | n/a | if (PyList_Append(consts, newconst)) { |

312 | n/a | Py_DECREF(newconst); |

313 | n/a | return -1; |

314 | n/a | } |

315 | n/a | Py_DECREF(newconst); |

316 | n/a | |

317 | n/a | return copy_op_arg(codestr, c_start, LOAD_CONST, len_consts, opcode_end); |

318 | n/a | } |

319 | n/a | |

320 | n/a | static Py_ssize_t |

321 | n/a | fold_unaryops_on_constants(_Py_CODEUNIT *codestr, Py_ssize_t c_start, |

322 | n/a | Py_ssize_t opcode_end, unsigned char opcode, |

323 | n/a | PyObject *consts, PyObject *v) |

324 | n/a | { |

325 | n/a | PyObject *newconst; |

326 | n/a | Py_ssize_t len_consts; |

327 | n/a | |

328 | n/a | /* Pre-conditions */ |

329 | n/a | assert(PyList_CheckExact(consts)); |

330 | n/a | len_consts = PyList_GET_SIZE(consts); |

331 | n/a | |

332 | n/a | /* Create new constant */ |

333 | n/a | switch (opcode) { |

334 | n/a | case UNARY_NEGATIVE: |

335 | n/a | newconst = PyNumber_Negative(v); |

336 | n/a | break; |

337 | n/a | case UNARY_INVERT: |

338 | n/a | newconst = PyNumber_Invert(v); |

339 | n/a | break; |

340 | n/a | case UNARY_POSITIVE: |

341 | n/a | newconst = PyNumber_Positive(v); |

342 | n/a | break; |

343 | n/a | default: |

344 | n/a | /* Called with an unknown opcode */ |

345 | n/a | PyErr_Format(PyExc_SystemError, |

346 | n/a | "unexpected unary operation %d on a constant", |

347 | n/a | opcode); |

348 | n/a | return -1; |

349 | n/a | } |

350 | n/a | if (newconst == NULL) { |

351 | n/a | if(!PyErr_ExceptionMatches(PyExc_KeyboardInterrupt)) { |

352 | n/a | PyErr_Clear(); |

353 | n/a | } |

354 | n/a | return -1; |

355 | n/a | } |

356 | n/a | |

357 | n/a | /* Append folded constant into consts table */ |

358 | n/a | if (PyList_Append(consts, newconst)) { |

359 | n/a | Py_DECREF(newconst); |

360 | n/a | PyErr_Clear(); |

361 | n/a | return -1; |

362 | n/a | } |

363 | n/a | Py_DECREF(newconst); |

364 | n/a | |

365 | n/a | return copy_op_arg(codestr, c_start, LOAD_CONST, len_consts, opcode_end); |

366 | n/a | } |

367 | n/a | |

368 | n/a | static unsigned int * |

369 | n/a | markblocks(_Py_CODEUNIT *code, Py_ssize_t len) |

370 | n/a | { |

371 | n/a | unsigned int *blocks = PyMem_New(unsigned int, len); |

372 | n/a | int i, j, opcode, blockcnt = 0; |

373 | n/a | |

374 | n/a | if (blocks == NULL) { |

375 | n/a | PyErr_NoMemory(); |

376 | n/a | return NULL; |

377 | n/a | } |

378 | n/a | memset(blocks, 0, len*sizeof(int)); |

379 | n/a | |

380 | n/a | /* Mark labels in the first pass */ |

381 | n/a | for (i = 0; i < len; i++) { |

382 | n/a | opcode = _Py_OPCODE(code[i]); |

383 | n/a | switch (opcode) { |

384 | n/a | case FOR_ITER: |

385 | n/a | case JUMP_FORWARD: |

386 | n/a | case JUMP_IF_FALSE_OR_POP: |

387 | n/a | case JUMP_IF_TRUE_OR_POP: |

388 | n/a | case POP_JUMP_IF_FALSE: |

389 | n/a | case POP_JUMP_IF_TRUE: |

390 | n/a | case JUMP_ABSOLUTE: |

391 | n/a | case CONTINUE_LOOP: |

392 | n/a | case SETUP_LOOP: |

393 | n/a | case SETUP_EXCEPT: |

394 | n/a | case SETUP_FINALLY: |

395 | n/a | case SETUP_WITH: |

396 | n/a | case SETUP_ASYNC_WITH: |

397 | n/a | j = GETJUMPTGT(code, i); |

398 | n/a | assert(j < len); |

399 | n/a | blocks[j] = 1; |

400 | n/a | break; |

401 | n/a | } |

402 | n/a | } |

403 | n/a | /* Build block numbers in the second pass */ |

404 | n/a | for (i = 0; i < len; i++) { |

405 | n/a | blockcnt += blocks[i]; /* increment blockcnt over labels */ |

406 | n/a | blocks[i] = blockcnt; |

407 | n/a | } |

408 | n/a | return blocks; |

409 | n/a | } |

410 | n/a | |

411 | n/a | /* Perform basic peephole optimizations to components of a code object. |

412 | n/a | The consts object should still be in list form to allow new constants |

413 | n/a | to be appended. |

414 | n/a | |

415 | n/a | To keep the optimizer simple, it bails when the lineno table has complex |

416 | n/a | encoding for gaps >= 255. |

417 | n/a | |

418 | n/a | Optimizations are restricted to simple transformations occurring within a |

419 | n/a | single basic block. All transformations keep the code size the same or |

420 | n/a | smaller. For those that reduce size, the gaps are initially filled with |

421 | n/a | NOPs. Later those NOPs are removed and the jump addresses retargeted in |

422 | n/a | a single pass. */ |

423 | n/a | |

424 | n/a | PyObject * |

425 | n/a | PyCode_Optimize(PyObject *code, PyObject* consts, PyObject *names, |

426 | n/a | PyObject *lnotab_obj) |

427 | n/a | { |

428 | n/a | Py_ssize_t h, i, nexti, op_start, codelen, tgt; |

429 | n/a | unsigned int j, nops; |

430 | n/a | unsigned char opcode, nextop; |

431 | n/a | _Py_CODEUNIT *codestr = NULL; |

432 | n/a | unsigned char *lnotab; |

433 | n/a | unsigned int cum_orig_offset, last_offset; |

434 | n/a | Py_ssize_t tabsiz; |

435 | n/a | PyObject **const_stack = NULL; |

436 | n/a | Py_ssize_t const_stack_top = -1; |

437 | n/a | Py_ssize_t const_stack_size = 0; |

438 | n/a | int in_consts = 0; /* whether we are in a LOAD_CONST sequence */ |

439 | n/a | unsigned int *blocks = NULL; |

440 | n/a | |

441 | n/a | /* Bail out if an exception is set */ |

442 | n/a | if (PyErr_Occurred()) |

443 | n/a | goto exitError; |

444 | n/a | |

445 | n/a | /* Bypass optimization when the lnotab table is too complex */ |

446 | n/a | assert(PyBytes_Check(lnotab_obj)); |

447 | n/a | lnotab = (unsigned char*)PyBytes_AS_STRING(lnotab_obj); |

448 | n/a | tabsiz = PyBytes_GET_SIZE(lnotab_obj); |

449 | n/a | assert(tabsiz == 0 || Py_REFCNT(lnotab_obj) == 1); |

450 | n/a | if (memchr(lnotab, 255, tabsiz) != NULL) { |

451 | n/a | /* 255 value are used for multibyte bytecode instructions */ |

452 | n/a | goto exitUnchanged; |

453 | n/a | } |

454 | n/a | /* Note: -128 and 127 special values for line number delta are ok, |

455 | n/a | the peephole optimizer doesn't modify line numbers. */ |

456 | n/a | |

457 | n/a | assert(PyBytes_Check(code)); |

458 | n/a | codelen = PyBytes_GET_SIZE(code); |

459 | n/a | assert(codelen % sizeof(_Py_CODEUNIT) == 0); |

460 | n/a | |

461 | n/a | /* Make a modifiable copy of the code string */ |

462 | n/a | codestr = (_Py_CODEUNIT *)PyMem_Malloc(codelen); |

463 | n/a | if (codestr == NULL) { |

464 | n/a | PyErr_NoMemory(); |

465 | n/a | goto exitError; |

466 | n/a | } |

467 | n/a | memcpy(codestr, PyBytes_AS_STRING(code), codelen); |

468 | n/a | codelen /= sizeof(_Py_CODEUNIT); |

469 | n/a | |

470 | n/a | blocks = markblocks(codestr, codelen); |

471 | n/a | if (blocks == NULL) |

472 | n/a | goto exitError; |

473 | n/a | assert(PyList_Check(consts)); |

474 | n/a | |

475 | n/a | CONST_STACK_CREATE(); |

476 | n/a | |

477 | n/a | for (i=find_op(codestr, 0) ; i<codelen ; i=nexti) { |

478 | n/a | opcode = _Py_OPCODE(codestr[i]); |

479 | n/a | op_start = i; |

480 | n/a | while (op_start >= 1 && _Py_OPCODE(codestr[op_start-1]) == EXTENDED_ARG) { |

481 | n/a | op_start--; |

482 | n/a | } |

483 | n/a | |

484 | n/a | nexti = i + 1; |

485 | n/a | while (nexti < codelen && _Py_OPCODE(codestr[nexti]) == EXTENDED_ARG) |

486 | n/a | nexti++; |

487 | n/a | nextop = nexti < codelen ? _Py_OPCODE(codestr[nexti]) : 0; |

488 | n/a | |

489 | n/a | if (!in_consts) { |

490 | n/a | CONST_STACK_RESET(); |

491 | n/a | } |

492 | n/a | in_consts = 0; |

493 | n/a | |

494 | n/a | switch (opcode) { |

495 | n/a | /* Replace UNARY_NOT POP_JUMP_IF_FALSE |

496 | n/a | with POP_JUMP_IF_TRUE */ |

497 | n/a | case UNARY_NOT: |

498 | n/a | if (nextop != POP_JUMP_IF_FALSE |

499 | n/a | || !ISBASICBLOCK(blocks, op_start, i + 1)) |

500 | n/a | break; |

501 | n/a | fill_nops(codestr, op_start, i + 1); |

502 | n/a | codestr[nexti] = PACKOPARG(POP_JUMP_IF_TRUE, _Py_OPARG(codestr[nexti])); |

503 | n/a | break; |

504 | n/a | |

505 | n/a | /* not a is b --> a is not b |

506 | n/a | not a in b --> a not in b |

507 | n/a | not a is not b --> a is b |

508 | n/a | not a not in b --> a in b |

509 | n/a | */ |

510 | n/a | case COMPARE_OP: |

511 | n/a | j = get_arg(codestr, i); |

512 | n/a | if (j < 6 || j > 9 || |

513 | n/a | nextop != UNARY_NOT || |

514 | n/a | !ISBASICBLOCK(blocks, op_start, i + 1)) |

515 | n/a | break; |

516 | n/a | codestr[i] = PACKOPARG(opcode, j^1); |

517 | n/a | fill_nops(codestr, i + 1, nexti + 1); |

518 | n/a | break; |

519 | n/a | |

520 | n/a | /* Skip over LOAD_CONST trueconst |

521 | n/a | POP_JUMP_IF_FALSE xx. This improves |

522 | n/a | "while 1" performance. */ |

523 | n/a | case LOAD_CONST: |

524 | n/a | CONST_STACK_PUSH_OP(i); |

525 | n/a | if (nextop != POP_JUMP_IF_FALSE || |

526 | n/a | !ISBASICBLOCK(blocks, op_start, i + 1) || |

527 | n/a | !PyObject_IsTrue(PyList_GET_ITEM(consts, get_arg(codestr, i)))) |

528 | n/a | break; |

529 | n/a | fill_nops(codestr, op_start, nexti + 1); |

530 | n/a | CONST_STACK_POP(1); |

531 | n/a | break; |

532 | n/a | |

533 | n/a | /* Try to fold tuples of constants (includes a case for lists |

534 | n/a | and sets which are only used for "in" and "not in" tests). |

535 | n/a | Skip over BUILD_SEQN 1 UNPACK_SEQN 1. |

536 | n/a | Replace BUILD_SEQN 2 UNPACK_SEQN 2 with ROT2. |

537 | n/a | Replace BUILD_SEQN 3 UNPACK_SEQN 3 with ROT3 ROT2. */ |

538 | n/a | case BUILD_TUPLE: |

539 | n/a | case BUILD_LIST: |

540 | n/a | case BUILD_SET: |

541 | n/a | j = get_arg(codestr, i); |

542 | n/a | if (j > 0 && CONST_STACK_LEN() >= j) { |

543 | n/a | h = lastn_const_start(codestr, op_start, j); |

544 | n/a | if ((opcode == BUILD_TUPLE && |

545 | n/a | ISBASICBLOCK(blocks, h, op_start)) || |

546 | n/a | ((opcode == BUILD_LIST || opcode == BUILD_SET) && |

547 | n/a | ((nextop==COMPARE_OP && |

548 | n/a | (_Py_OPARG(codestr[nexti]) == PyCmp_IN || |

549 | n/a | _Py_OPARG(codestr[nexti]) == PyCmp_NOT_IN)) || |

550 | n/a | nextop == GET_ITER) && ISBASICBLOCK(blocks, h, i + 1))) { |

551 | n/a | h = fold_tuple_on_constants(codestr, h, i + 1, opcode, |

552 | n/a | consts, CONST_STACK_LASTN(j), j); |

553 | n/a | if (h >= 0) { |

554 | n/a | CONST_STACK_POP(j); |

555 | n/a | CONST_STACK_PUSH_OP(h); |

556 | n/a | } |

557 | n/a | break; |

558 | n/a | } |

559 | n/a | } |

560 | n/a | if (nextop != UNPACK_SEQUENCE || |

561 | n/a | !ISBASICBLOCK(blocks, op_start, i + 1) || |

562 | n/a | j != get_arg(codestr, nexti) || |

563 | n/a | opcode == BUILD_SET) |

564 | n/a | break; |

565 | n/a | if (j < 2) { |

566 | n/a | fill_nops(codestr, op_start, nexti + 1); |

567 | n/a | } else if (j == 2) { |

568 | n/a | codestr[op_start] = PACKOPARG(ROT_TWO, 0); |

569 | n/a | fill_nops(codestr, op_start + 1, nexti + 1); |

570 | n/a | CONST_STACK_RESET(); |

571 | n/a | } else if (j == 3) { |

572 | n/a | codestr[op_start] = PACKOPARG(ROT_THREE, 0); |

573 | n/a | codestr[op_start + 1] = PACKOPARG(ROT_TWO, 0); |

574 | n/a | fill_nops(codestr, op_start + 2, nexti + 1); |

575 | n/a | CONST_STACK_RESET(); |

576 | n/a | } |

577 | n/a | break; |

578 | n/a | |

579 | n/a | /* Fold binary ops on constants. |

580 | n/a | LOAD_CONST c1 LOAD_CONST c2 BINOP --> LOAD_CONST binop(c1,c2) */ |

581 | n/a | case BINARY_POWER: |

582 | n/a | case BINARY_MULTIPLY: |

583 | n/a | case BINARY_TRUE_DIVIDE: |

584 | n/a | case BINARY_FLOOR_DIVIDE: |

585 | n/a | case BINARY_MODULO: |

586 | n/a | case BINARY_ADD: |

587 | n/a | case BINARY_SUBTRACT: |

588 | n/a | case BINARY_SUBSCR: |

589 | n/a | case BINARY_LSHIFT: |

590 | n/a | case BINARY_RSHIFT: |

591 | n/a | case BINARY_AND: |

592 | n/a | case BINARY_XOR: |

593 | n/a | case BINARY_OR: |

594 | n/a | if (CONST_STACK_LEN() < 2) |

595 | n/a | break; |

596 | n/a | h = lastn_const_start(codestr, op_start, 2); |

597 | n/a | if (ISBASICBLOCK(blocks, h, op_start)) { |

598 | n/a | h = fold_binops_on_constants(codestr, h, i + 1, opcode, |

599 | n/a | consts, CONST_STACK_LASTN(2)); |

600 | n/a | if (h >= 0) { |

601 | n/a | CONST_STACK_POP(2); |

602 | n/a | CONST_STACK_PUSH_OP(h); |

603 | n/a | } |

604 | n/a | } |

605 | n/a | break; |

606 | n/a | |

607 | n/a | /* Fold unary ops on constants. |

608 | n/a | LOAD_CONST c1 UNARY_OP --> LOAD_CONST unary_op(c) */ |

609 | n/a | case UNARY_NEGATIVE: |

610 | n/a | case UNARY_INVERT: |

611 | n/a | case UNARY_POSITIVE: |

612 | n/a | if (CONST_STACK_LEN() < 1) |

613 | n/a | break; |

614 | n/a | h = lastn_const_start(codestr, op_start, 1); |

615 | n/a | if (ISBASICBLOCK(blocks, h, op_start)) { |

616 | n/a | h = fold_unaryops_on_constants(codestr, h, i + 1, opcode, |

617 | n/a | consts, *CONST_STACK_LASTN(1)); |

618 | n/a | if (h >= 0) { |

619 | n/a | CONST_STACK_POP(1); |

620 | n/a | CONST_STACK_PUSH_OP(h); |

621 | n/a | } |

622 | n/a | } |

623 | n/a | break; |

624 | n/a | |

625 | n/a | /* Simplify conditional jump to conditional jump where the |

626 | n/a | result of the first test implies the success of a similar |

627 | n/a | test or the failure of the opposite test. |

628 | n/a | Arises in code like: |

629 | n/a | "if a and b:" |

630 | n/a | "if a or b:" |

631 | n/a | "a and b or c" |

632 | n/a | "(a and b) and c" |

633 | n/a | x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_FALSE_OR_POP z |

634 | n/a | --> x:JUMP_IF_FALSE_OR_POP z |

635 | n/a | x:JUMP_IF_FALSE_OR_POP y y:JUMP_IF_TRUE_OR_POP z |

636 | n/a | --> x:POP_JUMP_IF_FALSE y+1 |

637 | n/a | where y+1 is the instruction following the second test. |

638 | n/a | */ |

639 | n/a | case JUMP_IF_FALSE_OR_POP: |

640 | n/a | case JUMP_IF_TRUE_OR_POP: |

641 | n/a | h = get_arg(codestr, i) / sizeof(_Py_CODEUNIT); |

642 | n/a | tgt = find_op(codestr, h); |

643 | n/a | |

644 | n/a | j = _Py_OPCODE(codestr[tgt]); |

645 | n/a | if (CONDITIONAL_JUMP(j)) { |

646 | n/a | /* NOTE: all possible jumps here are absolute. */ |

647 | n/a | if (JUMPS_ON_TRUE(j) == JUMPS_ON_TRUE(opcode)) { |

648 | n/a | /* The second jump will be taken iff the first is. |

649 | n/a | The current opcode inherits its target's |

650 | n/a | stack effect */ |

651 | n/a | h = set_arg(codestr, i, get_arg(codestr, tgt)); |

652 | n/a | } else { |

653 | n/a | /* The second jump is not taken if the first is (so |

654 | n/a | jump past it), and all conditional jumps pop their |

655 | n/a | argument when they're not taken (so change the |

656 | n/a | first jump to pop its argument when it's taken). */ |

657 | n/a | h = set_arg(codestr, i, (tgt + 1) * sizeof(_Py_CODEUNIT)); |

658 | n/a | j = opcode == JUMP_IF_TRUE_OR_POP ? |

659 | n/a | POP_JUMP_IF_TRUE : POP_JUMP_IF_FALSE; |

660 | n/a | } |

661 | n/a | |

662 | n/a | if (h >= 0) { |

663 | n/a | nexti = h; |

664 | n/a | codestr[nexti] = PACKOPARG(j, _Py_OPARG(codestr[nexti])); |

665 | n/a | break; |

666 | n/a | } |

667 | n/a | } |

668 | n/a | /* Intentional fallthrough */ |

669 | n/a | |

670 | n/a | /* Replace jumps to unconditional jumps */ |

671 | n/a | case POP_JUMP_IF_FALSE: |

672 | n/a | case POP_JUMP_IF_TRUE: |

673 | n/a | case FOR_ITER: |

674 | n/a | case JUMP_FORWARD: |

675 | n/a | case JUMP_ABSOLUTE: |

676 | n/a | case CONTINUE_LOOP: |

677 | n/a | case SETUP_LOOP: |

678 | n/a | case SETUP_EXCEPT: |

679 | n/a | case SETUP_FINALLY: |

680 | n/a | case SETUP_WITH: |

681 | n/a | case SETUP_ASYNC_WITH: |

682 | n/a | h = GETJUMPTGT(codestr, i); |

683 | n/a | tgt = find_op(codestr, h); |

684 | n/a | /* Replace JUMP_* to a RETURN into just a RETURN */ |

685 | n/a | if (UNCONDITIONAL_JUMP(opcode) && |

686 | n/a | _Py_OPCODE(codestr[tgt]) == RETURN_VALUE) { |

687 | n/a | codestr[op_start] = PACKOPARG(RETURN_VALUE, 0); |

688 | n/a | fill_nops(codestr, op_start + 1, i + 1); |

689 | n/a | } else if (UNCONDITIONAL_JUMP(_Py_OPCODE(codestr[tgt]))) { |

690 | n/a | j = GETJUMPTGT(codestr, tgt); |

691 | n/a | if (opcode == JUMP_FORWARD) { /* JMP_ABS can go backwards */ |

692 | n/a | opcode = JUMP_ABSOLUTE; |

693 | n/a | } else if (!ABSOLUTE_JUMP(opcode)) { |

694 | n/a | if ((Py_ssize_t)j < i + 1) { |

695 | n/a | break; /* No backward relative jumps */ |

696 | n/a | } |

697 | n/a | j -= i + 1; /* Calc relative jump addr */ |

698 | n/a | } |

699 | n/a | j *= sizeof(_Py_CODEUNIT); |

700 | n/a | copy_op_arg(codestr, op_start, opcode, j, i + 1); |

701 | n/a | } |

702 | n/a | break; |

703 | n/a | |

704 | n/a | /* Remove unreachable ops after RETURN */ |

705 | n/a | case RETURN_VALUE: |

706 | n/a | h = i + 1; |

707 | n/a | while (h < codelen && ISBASICBLOCK(blocks, i, h)) { |

708 | n/a | h++; |

709 | n/a | } |

710 | n/a | if (h > i + 1) { |

711 | n/a | fill_nops(codestr, i + 1, h); |

712 | n/a | nexti = find_op(codestr, h); |

713 | n/a | } |

714 | n/a | break; |

715 | n/a | } |

716 | n/a | } |

717 | n/a | |

718 | n/a | /* Fixup lnotab */ |

719 | n/a | for (i = 0, nops = 0; i < codelen; i++) { |

720 | n/a | assert(i - nops <= INT_MAX); |

721 | n/a | /* original code offset => new code offset */ |

722 | n/a | blocks[i] = i - nops; |

723 | n/a | if (_Py_OPCODE(codestr[i]) == NOP) |

724 | n/a | nops++; |

725 | n/a | } |

726 | n/a | cum_orig_offset = 0; |

727 | n/a | last_offset = 0; |

728 | n/a | for (i=0 ; i < tabsiz ; i+=2) { |

729 | n/a | unsigned int offset_delta, new_offset; |

730 | n/a | cum_orig_offset += lnotab[i]; |

731 | n/a | assert(cum_orig_offset % sizeof(_Py_CODEUNIT) == 0); |

732 | n/a | new_offset = blocks[cum_orig_offset / sizeof(_Py_CODEUNIT)] * |

733 | n/a | sizeof(_Py_CODEUNIT); |

734 | n/a | offset_delta = new_offset - last_offset; |

735 | n/a | assert(offset_delta <= 255); |

736 | n/a | lnotab[i] = (unsigned char)offset_delta; |

737 | n/a | last_offset = new_offset; |

738 | n/a | } |

739 | n/a | |

740 | n/a | /* Remove NOPs and fixup jump targets */ |

741 | n/a | for (op_start = i = h = 0; i < codelen; i++, op_start = i) { |

742 | n/a | j = _Py_OPARG(codestr[i]); |

743 | n/a | while (_Py_OPCODE(codestr[i]) == EXTENDED_ARG) { |

744 | n/a | i++; |

745 | n/a | j = j<<8 | _Py_OPARG(codestr[i]); |

746 | n/a | } |

747 | n/a | opcode = _Py_OPCODE(codestr[i]); |

748 | n/a | switch (opcode) { |

749 | n/a | case NOP:continue; |

750 | n/a | |

751 | n/a | case JUMP_ABSOLUTE: |

752 | n/a | case CONTINUE_LOOP: |

753 | n/a | case POP_JUMP_IF_FALSE: |

754 | n/a | case POP_JUMP_IF_TRUE: |

755 | n/a | case JUMP_IF_FALSE_OR_POP: |

756 | n/a | case JUMP_IF_TRUE_OR_POP: |

757 | n/a | j = blocks[j / sizeof(_Py_CODEUNIT)] * sizeof(_Py_CODEUNIT); |

758 | n/a | break; |

759 | n/a | |

760 | n/a | case FOR_ITER: |

761 | n/a | case JUMP_FORWARD: |

762 | n/a | case SETUP_LOOP: |

763 | n/a | case SETUP_EXCEPT: |

764 | n/a | case SETUP_FINALLY: |

765 | n/a | case SETUP_WITH: |

766 | n/a | case SETUP_ASYNC_WITH: |

767 | n/a | j = blocks[j / sizeof(_Py_CODEUNIT) + i + 1] - blocks[i] - 1; |

768 | n/a | j *= sizeof(_Py_CODEUNIT); |

769 | n/a | break; |

770 | n/a | } |

771 | n/a | nexti = i - op_start + 1; |

772 | n/a | if (instrsize(j) > nexti) |

773 | n/a | goto exitUnchanged; |

774 | n/a | /* If instrsize(j) < nexti, we'll emit EXTENDED_ARG 0 */ |

775 | n/a | write_op_arg(codestr + h, opcode, j, nexti); |

776 | n/a | h += nexti; |

777 | n/a | } |

778 | n/a | assert(h + (Py_ssize_t)nops == codelen); |

779 | n/a | |

780 | n/a | CONST_STACK_DELETE(); |

781 | n/a | PyMem_Free(blocks); |

782 | n/a | code = PyBytes_FromStringAndSize((char *)codestr, h * sizeof(_Py_CODEUNIT)); |

783 | n/a | PyMem_Free(codestr); |

784 | n/a | return code; |

785 | n/a | |

786 | n/a | exitError: |

787 | n/a | code = NULL; |

788 | n/a | |

789 | n/a | exitUnchanged: |

790 | n/a | Py_XINCREF(code); |

791 | n/a | CONST_STACK_DELETE(); |

792 | n/a | PyMem_Free(blocks); |

793 | n/a | PyMem_Free(codestr); |

794 | n/a | return code; |

795 | n/a | } |