Skip to content

Commit babdbf9

Browse files
committed
Do the super/macro analysis at analysis time
1 parent 0f12c40 commit babdbf9

File tree

1 file changed

+95
-61
lines changed

1 file changed

+95
-61
lines changed

Tools/cases_generator/generate_cases.py

Lines changed: 95 additions & 61 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
import argparse
88
import contextlib
9+
import dataclasses
910
import os
1011
import re
1112
import sys
@@ -151,6 +152,71 @@ def write_body(self, f: typing.TextIO, ndent: str, dedent: int) -> None:
151152
f.write(line)
152153

153154

155+
@dataclasses.dataclass
156+
class SuperComponent:
157+
instr: Instruction
158+
input_mapping: typing.Dict[str, parser.StackEffect]
159+
output_mapping: typing.Dict[str, parser.StackEffect]
160+
161+
162+
class SuperInstruction(parser.Super):
163+
164+
stack: list[str]
165+
initial_sp: int
166+
final_sp: int
167+
parts: list[SuperComponent]
168+
169+
def __init__(self, sup: parser.Super):
170+
super().__init__(sup.kind, sup.name, sup.ops)
171+
self.context = sup.context
172+
173+
def analyze(self, a: "Analyzer") -> None:
174+
components = [a.instrs[name] for name in self.ops]
175+
self.stack, self.initial_sp = self.super_macro_analysis(a, components)
176+
sp = self.initial_sp
177+
self.parts = []
178+
for instr in components:
179+
input_mapping = {}
180+
for ieffect in reversed(instr.input_effects):
181+
sp -= 1
182+
if ieffect.name != "unused":
183+
input_mapping[self.stack[sp]] = ieffect
184+
output_mapping = {}
185+
for oeffect in instr.output_effects:
186+
if oeffect.name != "unused":
187+
output_mapping[self.stack[sp]] = oeffect
188+
sp += 1
189+
self.parts.append(SuperComponent(instr, input_mapping, output_mapping))
190+
self.final_sp = sp
191+
192+
def super_macro_analysis(
193+
self, a: "Analyzer", components: list[Instruction]
194+
) -> tuple[list[str], int]:
195+
"""Analyze a super-instruction or macro.
196+
197+
Print an error if there's a cache effect (which we don't support yet).
198+
199+
Return the list of variable names and the initial stack pointer.
200+
"""
201+
lowest = current = highest = 0
202+
for instr in components:
203+
if instr.cache_effects:
204+
print(
205+
f"Super-instruction {self.name!r} has cache effects in {instr.name!r}",
206+
file=sys.stderr,
207+
)
208+
a.errors += 1
209+
current -= len(instr.input_effects)
210+
lowest = min(lowest, current)
211+
current += len(instr.output_effects)
212+
highest = max(highest, current)
213+
# At this point, 'current' is the net stack effect,
214+
# and 'lowest' and 'highest' are the extremes.
215+
# Note that 'lowest' may be negative.
216+
stack = [f"_tmp_{i+1}" for i in range(highest - lowest)]
217+
return stack, -lowest
218+
219+
154220
class Analyzer:
155221
"""Parse input, analyze it, and write to output."""
156222

@@ -166,6 +232,7 @@ def __init__(self, filename: str):
166232

167233
instrs: dict[str, Instruction] # Includes ops
168234
supers: dict[str, parser.Super] # Includes macros
235+
super_instrs: dict[str, SuperInstruction]
169236
families: dict[str, parser.Family]
170237

171238
def parse(self) -> None:
@@ -210,6 +277,7 @@ def analyze(self) -> None:
210277
self.find_predictions()
211278
self.map_families()
212279
self.check_families()
280+
self.analyze_supers()
213281

214282
def find_predictions(self) -> None:
215283
"""Find the instructions that need PREDICTED() labels."""
@@ -278,6 +346,14 @@ def check_families(self) -> None:
278346
)
279347
self.errors += 1
280348

349+
def analyze_supers(self) -> None:
350+
"""Analyze each super instruction."""
351+
self.super_instrs = {}
352+
for name, sup in self.supers.items():
353+
dup = SuperInstruction(sup)
354+
dup.analyze(self)
355+
self.super_instrs[name] = dup
356+
281357
def write_instructions(self, filename: str) -> None:
282358
"""Write instructions to output file."""
283359
indent = " " * 8
@@ -304,7 +380,7 @@ def write_instructions(self, filename: str) -> None:
304380
# Write super-instructions and macros
305381
n_supers = 0
306382
n_macros = 0
307-
for sup in self.supers.values():
383+
for sup in self.super_instrs.values():
308384
if sup.kind == "super":
309385
n_supers += 1
310386
elif sup.kind == "macro":
@@ -318,7 +394,7 @@ def write_instructions(self, filename: str) -> None:
318394
)
319395

320396
def write_super_macro(
321-
self, f: typing.TextIO, sup: parser.Super, indent: str = ""
397+
self, f: typing.TextIO, sup: SuperInstruction, indent: str = ""
322398
) -> None:
323399

324400
# TODO: Make write() and block() methods of some Formatter class
@@ -342,76 +418,34 @@ def block(head: str):
342418

343419
write("")
344420
with block(f"TARGET({sup.name})"):
345-
components = [self.instrs[name] for name in sup.ops]
346-
stack, nbelow = self.super_macro_analysis(sup.name, components)
347-
sp = nbelow
348-
349-
for i, var in enumerate(stack):
350-
if i < sp:
351-
write(f"PyObject *{var} = PEEK({sp - i});")
421+
for i, var in enumerate(sup.stack):
422+
if i < sup.initial_sp:
423+
write(f"PyObject *{var} = PEEK({sup.initial_sp - i});")
352424
else:
353425
write(f"PyObject *{var};")
354426

355-
for i, instr in enumerate(components):
427+
for i, comp in enumerate(sup.parts):
356428
if i > 0 and sup.kind == "super":
357429
write(f"NEXTOPARG();")
358430
write(f"next_instr++;")
359431

360432
with block(""):
361-
instack = stack[sp - len(instr.input_effects) : sp]
362-
for var, ineffect in zip(instack, instr.input_effects):
363-
if ineffect.name != "unused":
364-
write(f"PyObject *{ineffect.name} = {var};")
365-
for outeffect in instr.output_effects:
366-
if outeffect.name != "unused":
367-
write(f"PyObject *{outeffect.name};")
368-
369-
instr.write_body(f, indent, dedent=-4)
370-
371-
sp -= len(instack)
372-
nout = len(instr.output_effects)
373-
sp += nout
374-
outstack = stack[sp - nout : sp]
375-
for var, outeffect in zip(outstack, instr.output_effects):
376-
if outeffect.name != "unused":
377-
write(f"{var} = {outeffect.name};")
378-
379-
if sp > nbelow:
380-
write(f"STACK_GROW({sp - nbelow});")
381-
elif sp < nbelow:
382-
write(f"STACK_SHRINK({nbelow - sp});")
383-
for i, var in enumerate(reversed(stack[:sp]), 1):
433+
for var, ieffect in comp.input_mapping.items():
434+
write(f"PyObject *{ieffect.name} = {var};")
435+
for oeffect in comp.output_mapping.values():
436+
write(f"PyObject *{oeffect.name};")
437+
comp.instr.write_body(f, indent, dedent=-4)
438+
for var, oeffect in comp.output_mapping.items():
439+
write(f"{var} = {oeffect.name};")
440+
441+
if sup.final_sp > sup.initial_sp:
442+
write(f"STACK_GROW({sup.final_sp - sup.initial_sp});")
443+
elif sup.final_sp < sup.initial_sp:
444+
write(f"STACK_SHRINK({sup.initial_sp - sup.final_sp});")
445+
for i, var in enumerate(reversed(sup.stack[:sup.final_sp]), 1):
384446
write(f"POKE({i}, {var});")
385447
write(f"DISPATCH();")
386448

387-
# TODO: Move this into analysis phase
388-
def super_macro_analysis(
389-
self, name: str, components: list[Instruction]
390-
) -> tuple[list[str], int]:
391-
"""Analyze a super-instruction or macro.
392-
393-
Print an error if there's a cache effect (which we don't support yet).
394-
395-
Return the list of variable names and the initial stack pointer.
396-
"""
397-
lowest = current = highest = 0
398-
for instr in components:
399-
if instr.cache_effects:
400-
print(
401-
f"Super-instruction {name!r} has cache effects in {instr.name!r}",
402-
file=sys.stderr,
403-
)
404-
self.errors += 1
405-
current -= len(instr.input_effects)
406-
lowest = min(lowest, current)
407-
current += len(instr.output_effects)
408-
highest = max(highest, current)
409-
# At this point, 'current' is the net stack effect,
410-
# and 'lowest' and 'highest' are the extremes.
411-
# Note that 'lowest' may be negative.
412-
stack = [f"_tmp_{i+1}" for i in range(highest - lowest)]
413-
return stack, -lowest
414-
415449

416450
def always_exits(block: parser.Block) -> bool:
417451
"""Determine whether a block always ends in a return/goto/etc."""

0 commit comments

Comments
 (0)