Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

Commit 81c739e

Browse filesBrowse files
authored
GH-120024: Tidy up case generator code a bit. (GH-122780)
1 parent 0d9c123 commit 81c739e
Copy full SHA for 81c739e
Expand file treeCollapse file tree

11 files changed

+175
-103
lines changed

‎Python/generated_cases.c.h

Copy file name to clipboardExpand all lines: Python/generated_cases.c.h
-3Lines changed: 0 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎Tools/cases_generator/analyzer.py

Copy file name to clipboardExpand all lines: Tools/cases_generator/analyzer.py
+63-25Lines changed: 63 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,6 @@ def infallible(self) -> bool:
6262
return not self.error_with_pop and not self.error_without_pop
6363

6464

65-
6665
SKIP_PROPERTIES = Properties(
6766
escapes=False,
6867
error_with_pop=False,
@@ -99,7 +98,6 @@ def properties(self) -> Properties:
9998

10099

101100
class Flush:
102-
103101
@property
104102
def properties(self) -> Properties:
105103
return SKIP_PROPERTIES
@@ -112,6 +110,7 @@ def name(self) -> str:
112110
def size(self) -> int:
113111
return 0
114112

113+
115114
@dataclass
116115
class StackItem:
117116
name: str
@@ -133,6 +132,7 @@ def is_array(self) -> bool:
133132
def get_size(self) -> str:
134133
return self.size if self.size else "1"
135134

135+
136136
@dataclass
137137
class StackEffect:
138138
inputs: list[StackItem]
@@ -150,6 +150,7 @@ class CacheEntry:
150150
def __str__(self) -> str:
151151
return f"{self.name}/{self.size}"
152152

153+
153154
@dataclass
154155
class Uop:
155156
name: str
@@ -163,7 +164,7 @@ class Uop:
163164
_size: int = -1
164165
implicitly_created: bool = False
165166
replicated = 0
166-
replicates : "Uop | None" = None
167+
replicates: "Uop | None" = None
167168

168169
def dump(self, indent: str) -> None:
169170
print(
@@ -308,19 +309,26 @@ def override_error(
308309
)
309310

310311

311-
def convert_stack_item(item: parser.StackEffect, replace_op_arg_1: str | None) -> StackItem:
312+
def convert_stack_item(
313+
item: parser.StackEffect, replace_op_arg_1: str | None
314+
) -> StackItem:
312315
cond = item.cond
313316
if replace_op_arg_1 and OPARG_AND_1.match(item.cond):
314317
cond = replace_op_arg_1
315-
return StackItem(
316-
item.name, item.type, cond, item.size
317-
)
318+
return StackItem(item.name, item.type, cond, item.size)
319+
318320

319-
def analyze_stack(op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None) -> StackEffect:
321+
def analyze_stack(
322+
op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | None = None
323+
) -> StackEffect:
320324
inputs: list[StackItem] = [
321-
convert_stack_item(i, replace_op_arg_1) for i in op.inputs if isinstance(i, parser.StackEffect)
325+
convert_stack_item(i, replace_op_arg_1)
326+
for i in op.inputs
327+
if isinstance(i, parser.StackEffect)
328+
]
329+
outputs: list[StackItem] = [
330+
convert_stack_item(i, replace_op_arg_1) for i in op.outputs
322331
]
323-
outputs: list[StackItem] = [convert_stack_item(i, replace_op_arg_1) for i in op.outputs]
324332
# Mark variables with matching names at the base of the stack as "peek"
325333
modified = False
326334
for input, output in zip(inputs, outputs):
@@ -331,9 +339,11 @@ def analyze_stack(op: parser.InstDef | parser.Pseudo, replace_op_arg_1: str | No
331339
if isinstance(op, parser.InstDef):
332340
output_names = [out.name for out in outputs]
333341
for input in inputs:
334-
if (variable_used(op, input.name) or
335-
variable_used(op, "DECREF_INPUTS") or
336-
(not input.peek and input.name in output_names)):
342+
if (
343+
variable_used(op, input.name)
344+
or variable_used(op, "DECREF_INPUTS")
345+
or (not input.peek and input.name in output_names)
346+
):
337347
input.used = True
338348
for output in outputs:
339349
if variable_used(op, output.name):
@@ -359,9 +369,9 @@ def analyze_deferred_refs(node: parser.InstDef) -> dict[lexer.Token, str | None]
359369
def find_assignment_target(idx: int) -> list[lexer.Token]:
360370
"""Find the tokens that make up the left-hand side of an assignment"""
361371
offset = 1
362-
for tkn in reversed(node.block.tokens[:idx-1]):
372+
for tkn in reversed(node.block.tokens[: idx - 1]):
363373
if tkn.kind == "SEMI" or tkn.kind == "LBRACE" or tkn.kind == "RBRACE":
364-
return node.block.tokens[idx-offset:idx-1]
374+
return node.block.tokens[idx - offset : idx - 1]
365375
offset += 1
366376
return []
367377

@@ -370,42 +380,54 @@ def find_assignment_target(idx: int) -> list[lexer.Token]:
370380
if tkn.kind != "IDENTIFIER" or tkn.text != "PyStackRef_FromPyObjectNew":
371381
continue
372382

373-
if idx == 0 or node.block.tokens[idx-1].kind != "EQUALS":
383+
if idx == 0 or node.block.tokens[idx - 1].kind != "EQUALS":
374384
raise analysis_error("Expected '=' before PyStackRef_FromPyObjectNew", tkn)
375385

376386
lhs = find_assignment_target(idx)
377387
if len(lhs) == 0:
378-
raise analysis_error("PyStackRef_FromPyObjectNew() must be assigned to an output", tkn)
388+
raise analysis_error(
389+
"PyStackRef_FromPyObjectNew() must be assigned to an output", tkn
390+
)
379391

380-
if lhs[0].kind == "TIMES" or any(t.kind == "ARROW" or t.kind == "LBRACKET" for t in lhs[1:]):
392+
if lhs[0].kind == "TIMES" or any(
393+
t.kind == "ARROW" or t.kind == "LBRACKET" for t in lhs[1:]
394+
):
381395
# Don't handle: *ptr = ..., ptr->field = ..., or ptr[field] = ...
382396
# Assume that they are visible to the GC.
383397
refs[tkn] = None
384398
continue
385399

386400
if len(lhs) != 1 or lhs[0].kind != "IDENTIFIER":
387-
raise analysis_error("PyStackRef_FromPyObjectNew() must be assigned to an output", tkn)
401+
raise analysis_error(
402+
"PyStackRef_FromPyObjectNew() must be assigned to an output", tkn
403+
)
388404

389405
name = lhs[0].text
390406
if not any(var.name == name for var in node.outputs):
391-
raise analysis_error(f"PyStackRef_FromPyObjectNew() must be assigned to an output, not '{name}'", tkn)
407+
raise analysis_error(
408+
f"PyStackRef_FromPyObjectNew() must be assigned to an output, not '{name}'",
409+
tkn,
410+
)
392411

393412
refs[tkn] = name
394413

395414
return refs
396415

416+
397417
def variable_used(node: parser.InstDef, name: str) -> bool:
398418
"""Determine whether a variable with a given name is used in a node."""
399419
return any(
400420
token.kind == "IDENTIFIER" and token.text == name for token in node.block.tokens
401421
)
402422

423+
403424
def oparg_used(node: parser.InstDef) -> bool:
404425
"""Determine whether `oparg` is used in a node."""
405426
return any(
406427
token.kind == "IDENTIFIER" and token.text == "oparg" for token in node.tokens
407428
)
408429

430+
409431
def tier_variable(node: parser.InstDef) -> int | None:
410432
"""Determine whether a tier variable is used in a node."""
411433
for token in node.tokens:
@@ -416,6 +438,7 @@ def tier_variable(node: parser.InstDef) -> int | None:
416438
return int(token.text[-1])
417439
return None
418440

441+
419442
def has_error_with_pop(op: parser.InstDef) -> bool:
420443
return (
421444
variable_used(op, "ERROR_IF")
@@ -424,6 +447,7 @@ def has_error_with_pop(op: parser.InstDef) -> bool:
424447
or variable_used(op, "resume_with_error")
425448
)
426449

450+
427451
def has_error_without_pop(op: parser.InstDef) -> bool:
428452
return (
429453
variable_used(op, "ERROR_NO_POP")
@@ -606,8 +630,10 @@ def stack_effect_only_peeks(instr: parser.InstDef) -> bool:
606630
for s, other in zip(stack_inputs, instr.outputs)
607631
)
608632

633+
609634
OPARG_AND_1 = re.compile("\\(*oparg *& *1")
610635

636+
611637
def effect_depends_on_oparg_1(op: parser.InstDef) -> bool:
612638
for effect in op.inputs:
613639
if isinstance(effect, parser.CacheEffect):
@@ -623,6 +649,7 @@ def effect_depends_on_oparg_1(op: parser.InstDef) -> bool:
623649
return True
624650
return False
625651

652+
626653
def compute_properties(op: parser.InstDef) -> Properties:
627654
has_free = (
628655
variable_used(op, "PyCell_New")
@@ -667,7 +694,12 @@ def compute_properties(op: parser.InstDef) -> Properties:
667694
)
668695

669696

670-
def make_uop(name: str, op: parser.InstDef, inputs: list[parser.InputEffect], uops: dict[str, Uop]) -> Uop:
697+
def make_uop(
698+
name: str,
699+
op: parser.InstDef,
700+
inputs: list[parser.InputEffect],
701+
uops: dict[str, Uop],
702+
) -> Uop:
671703
result = Uop(
672704
name=name,
673705
context=op.context,
@@ -685,7 +717,9 @@ def make_uop(name: str, op: parser.InstDef, inputs: list[parser.InputEffect], uo
685717
properties = compute_properties(op)
686718
if properties.oparg:
687719
# May not need oparg anymore
688-
properties.oparg = any(token.text == "oparg" for token in op.block.tokens)
720+
properties.oparg = any(
721+
token.text == "oparg" for token in op.block.tokens
722+
)
689723
rep = Uop(
690724
name=name_x,
691725
context=op.context,
@@ -736,8 +770,10 @@ def add_op(op: parser.InstDef, uops: dict[str, Uop]) -> None:
736770

737771

738772
def add_instruction(
739-
where: lexer.Token, name: str, parts: list[Part],
740-
instructions: dict[str, Instruction]
773+
where: lexer.Token,
774+
name: str,
775+
parts: list[Part],
776+
instructions: dict[str, Instruction],
741777
) -> None:
742778
instructions[name] = Instruction(where, name, parts, None)
743779

@@ -781,7 +817,9 @@ def add_macro(
781817
parts.append(Flush())
782818
else:
783819
if part.name not in uops:
784-
raise analysis_error(f"No Uop named {part.name}", macro.tokens[0])
820+
raise analysis_error(
821+
f"No Uop named {part.name}", macro.tokens[0]
822+
)
785823
parts.append(uops[part.name])
786824
case parser.CacheEffect():
787825
parts.append(Skip(part.size))

‎Tools/cases_generator/generators_common.py

Copy file name to clipboardExpand all lines: Tools/cases_generator/generators_common.py
+3-4Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,12 +58,13 @@ def emit_to(out: CWriter, tkn_iter: Iterator[Token], end: str) -> None:
5858
parens -= 1
5959
out.emit(tkn)
6060

61+
6162
ReplacementFunctionType = Callable[
6263
[Token, Iterator[Token], Uop, Stack, Instruction | None], None
6364
]
6465

65-
class Emitter:
6666

67+
class Emitter:
6768
out: CWriter
6869
_replacers: dict[str, ReplacementFunctionType]
6970

@@ -176,7 +177,6 @@ def decref_inputs(
176177
else:
177178
self.out.emit(f"PyStackRef_CLOSE({var.name});\n")
178179

179-
180180
def sync_sp(
181181
self,
182182
tkn: Token,
@@ -190,7 +190,6 @@ def sync_sp(
190190
next(tkn_iter)
191191
stack.flush(self.out)
192192

193-
194193
def check_eval_breaker(
195194
self,
196195
tkn: Token,
@@ -227,7 +226,6 @@ def py_stack_ref_from_py_object_new(
227226
# unused portions of the stack to NULL.
228227
stack.flush_single_var(self.out, target, uop.stack.outputs)
229228

230-
231229
def emit_tokens(
232230
self,
233231
uop: Uop,
@@ -248,6 +246,7 @@ def emit_tokens(
248246
def emit(self, txt: str | Token) -> None:
249247
self.out.emit(txt)
250248

249+
251250
def cflags(p: Properties) -> str:
252251
flags: list[str] = []
253252
if p.oparg:

‎Tools/cases_generator/opcode_metadata_generator.py

Copy file name to clipboardExpand all lines: Tools/cases_generator/opcode_metadata_generator.py
+1Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,7 @@ def emit_stack_effect_function(
9191
def generate_stack_effect_functions(analysis: Analysis, out: CWriter) -> None:
9292
popped_data: list[tuple[str, str]] = []
9393
pushed_data: list[tuple[str, str]] = []
94+
9495
def add(inst: Instruction | PseudoInstruction) -> None:
9596
stack = get_stack_effect(inst)
9697
popped = (-stack.base_offset).to_c()

‎Tools/cases_generator/optimizer_generator.py

Copy file name to clipboardExpand all lines: Tools/cases_generator/optimizer_generator.py
+7-6Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,8 @@ def emit_default(out: CWriter, uop: Uop) -> None:
8888
else:
8989
out.emit(f"{var.name} = sym_new_not_null(ctx);\n")
9090

91-
class OptimizerEmitter(Emitter):
9291

92+
class OptimizerEmitter(Emitter):
9393
pass
9494

9595

@@ -139,7 +139,7 @@ def write_uop(
139139
local = locals[var.name]
140140
else:
141141
local = Local.local(var)
142-
out.emit(stack.push(local))
142+
stack.push(local)
143143
out.start_line()
144144
stack.flush(out, cast_type="_Py_UopsSymbol *", extract_bits=True)
145145
except StackError as ex:
@@ -161,8 +161,9 @@ def generate_abstract_interpreter(
161161
out.emit("\n")
162162
base_uop_names = set([uop.name for uop in base.uops.values()])
163163
for abstract_uop_name in abstract.uops:
164-
assert abstract_uop_name in base_uop_names,\
165-
f"All abstract uops should override base uops, but {abstract_uop_name} is not."
164+
assert (
165+
abstract_uop_name in base_uop_names
166+
), f"All abstract uops should override base uops, but {abstract_uop_name} is not."
166167

167168
for uop in base.uops.values():
168169
override: Uop | None = None
@@ -192,7 +193,7 @@ def generate_abstract_interpreter(
192193

193194

194195
def generate_tier2_abstract_from_files(
195-
filenames: list[str], outfilename: str, debug: bool=False
196+
filenames: list[str], outfilename: str, debug: bool = False
196197
) -> None:
197198
assert len(filenames) == 2, "Need a base file and an abstract cases file."
198199
base = analyze_files([filenames[0]])
@@ -211,7 +212,7 @@ def generate_tier2_abstract_from_files(
211212
)
212213

213214

214-
arg_parser.add_argument("input", nargs='*', help="Abstract interpreter definition file")
215+
arg_parser.add_argument("input", nargs="*", help="Abstract interpreter definition file")
215216

216217
arg_parser.add_argument(
217218
"base", nargs="*", help="The base instruction definition file(s)"

0 commit comments

Comments
0 (0)
Morty Proxy This is a proxified and sanitized view of the page, visit original site.