Skip to content

Commit

Permalink
Support specifying a list of ops to generate delay model data points …
Browse files Browse the repository at this point in the history
…for.

The downstream logic to incorporate these data points into an actual delay model will be in a follow-up.

PiperOrigin-RevId: 639885269
  • Loading branch information
richmckeever authored and copybara-github committed Jun 3, 2024
1 parent d3971b2 commit eff2607
Show file tree
Hide file tree
Showing 2 changed files with 104 additions and 59 deletions.
105 changes: 69 additions & 36 deletions xls/synthesis/timing_characterization_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,26 @@

FLAGS = flags.FLAGS
_MAX_PS = flags.DEFINE_integer(
'max_ps', 15000, 'Maximum picoseconds delay to test.')
'max_ps', 15000, 'Maximum picoseconds delay to test.'
)
_MIN_PS = flags.DEFINE_integer(
'min_ps', 20, 'Minimum picoseconds delay to test.')
'min_ps', 20, 'Minimum picoseconds delay to test.'
)
_CHECKPOINT_PATH = flags.DEFINE_string(
'checkpoint_path', '', 'Path at which to load and save checkpoints. ' +
'Checkpoints will not be kept if unspecified.')
'checkpoint_path',
'',
'Path at which to load and save checkpoints. Checkpoints will not be kept'
' if unspecified.',
)
_SAMPLES_PATH = flags.DEFINE_string(
'samples_path', '', 'Path at which to load samples textproto.')
'samples_path', '', 'Path at which to load samples textproto.'
)
_OP_INCLUDE_LIST = flags.DEFINE_list(
'op_include_list',
[],
'Names of ops from samples textproto to generate data points for. If empty,'
' all of them are included. Note that kIdentity is always included.',
)

ENUM2NAME_MAP = dict((op.enum_name, op.name) for op in OPS)

Expand Down Expand Up @@ -101,22 +113,23 @@ def _search_for_fmax_and_synth(
# the binary search. Just use the max_frequency_hz of the first response
# (whether it passes or fails).
if response.insensitive_to_target_freq and response.max_frequency_hz > 0:
logging.info(
'USING (@min %2.1fps).', response_ps
)
logging.info('USING (@min %2.1fps).', response_ps)
best_result = response
break

if response.slack_ps >= 0:
if response.max_frequency_hz > 0:
logging.info(
'PASS at %.1fps (slack %dps @min %2.1fps)',
current_ps, response.slack_ps, response_ps
current_ps,
response.slack_ps,
response_ps,
)
else:
logging.error('PASS but no maximum frequency determined.')
logging.error('ERROR: this occurs when '
'an operator is optimized to a constant.')
logging.error(
'ERROR: this occurs when an operator is optimized to a constant.'
)
logging.error('Source Verilog:\n%s', request.module_text)
sys.exit()
high_ps = current_ps
Expand All @@ -126,7 +139,9 @@ def _search_for_fmax_and_synth(
if response.max_frequency_hz:
logging.info(
'FAIL at %.1fps (slack %dps @min %2.1fps).',
current_ps, response.slack_ps, response_ps
current_ps,
response.slack_ps,
response_ps,
)
else:
# This shouldn't happen
Expand All @@ -145,20 +160,22 @@ def _search_for_fmax_and_synth(
1e12 / best_result.max_frequency_hz,
)
else:
logging.error(
'INTERNAL ERROR: no passing run.'
)
logging.error('INTERNAL ERROR: no passing run.')
sys.exit()
return best_result


def _synthesize_ir(stub: synthesis_service_pb2_grpc.SynthesisServiceStub,
results: delay_model_pb2.DataPoints,
data_points: Dict[str, Set[str]], ir_text: str,
op: str, result_bit_count: int,
operand_bit_counts: Sequence[int],
operand_element_counts: Dict[int, int],
specialization: delay_model_pb2.SpecializationKind) -> None:
def _synthesize_ir(
stub: synthesis_service_pb2_grpc.SynthesisServiceStub,
results: delay_model_pb2.DataPoints,
data_points: Dict[str, Set[str]],
ir_text: str,
op: str,
result_bit_count: int,
operand_bit_counts: Sequence[int],
operand_element_counts: Dict[int, int],
specialization: delay_model_pb2.SpecializationKind,
) -> None:
"""Synthesizes the given IR text and checkpoint resulting data points."""
if op not in data_points:
data_points[op] = set()
Expand All @@ -176,11 +193,16 @@ def _synthesize_ir(stub: synthesis_service_pb2_grpc.SynthesisServiceStub,
return
data_points[op].add(key)

logging.info('Running %s with %d / %s', op, result_bit_count,
', '.join([str(x) for x in operand_bit_counts]))
logging.info(
'Running %s with %d / %s',
op,
result_bit_count,
', '.join([str(x) for x in operand_bit_counts]),
)
module_name = 'main'
mod_generator_result = op_module_generator.generate_verilog_module(
module_name, ir_text)
module_name, ir_text
)

op_comment = '// op: ' + op + ' \n'
verilog_text = op_comment + mod_generator_result.verilog_text
Expand Down Expand Up @@ -254,20 +276,28 @@ def _run_point(
# TODO(tcal): complete handling for specialization == HAS_LITERAL_OPERAND
logging.info('types: %s : %s', res_type, ' '.join(opnd_types))
literal_operand = None
repeated_operand = 1 if (
specialization == delay_model_pb2.OPERANDS_IDENTICAL) else None
repeated_operand = (
1 if (specialization == delay_model_pb2.OPERANDS_IDENTICAL) else None
)
ir_text = op_module_generator.generate_ir_package(
op_name, res_type, (opnd_types), attr, literal_operand, repeated_operand
)
logging.info('ir_text:\n%s\n', ir_text)
_synthesize_ir(
stub, results, data_points, ir_text, op, res_bit_count,
list(point.operand_widths), opnd_element_counts, specialization
stub,
results,
data_points,
ir_text,
op,
res_bit_count,
list(point.operand_widths),
opnd_element_counts,
specialization,
)


def init_data(
checkpoint_path: str
checkpoint_path: str,
) -> Tuple[Dict[str, Set[str]], delay_model_pb2.DataPoints]:
"""Return new state, loading data from a checkpoint, if available."""
data_points = {}
Expand All @@ -279,8 +309,9 @@ def init_data(
op = data_point.operation
if op.op not in data_points:
data_points[op.op] = set()
key = ', '.join([str(op.bit_count)] +
[str(x.bit_count) for x in op.operands])
key = ', '.join(
[str(op.bit_count)] + [str(x.bit_count) for x in op.operands]
)
if op.specialization:
key = key + ' ' + str(op.specialization)
data_points[op.op].add(key)
Expand All @@ -294,13 +325,15 @@ def run_characterization(
data_points, data_points_proto = init_data(_CHECKPOINT_PATH.value)
samples_file = _SAMPLES_PATH.value
op_samples_list = delay_model_pb2.OpSamplesList()
op_include_list = set()
if _OP_INCLUDE_LIST.value:
op_include_list.update(['kIdentity'] + _OP_INCLUDE_LIST.value)
with gfile.open(samples_file, 'r') as f:
op_samples_list = text_format.Parse(f.read(), op_samples_list)
for op_samples in op_samples_list.op_samples:
for point in op_samples.samples:
_run_point(op_samples,
point,
data_points_proto, data_points, stub)
if not op_include_list or op_samples.op in op_include_list:
for point in op_samples.samples:
_run_point(op_samples, point, data_points_proto, data_points, stub)

print('# proto-file: xls/delay_model/delay_model.proto')
print('# proto-message: xls.delay_model.DataPoints')
Expand Down
58 changes: 35 additions & 23 deletions xls/tools/run_timing_characterization.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@

"""Runs timing characterization to generate XLS delay models using Yosys and OpenSTA.
There are two modes:
There are two modes:
If --openroad_path is supplied, then scripts, tooling,
and libraries are found in the OpenROAD installation.
The set of PDKs used is hardcoded (sky130, asap7, and nangate45).
Timing characterization is run for all these PDKs.
If --openroad_path is supplied, then scripts, tooling,
and libraries are found in the OpenROAD installation.
The set of PDKs used is hardcoded (sky130, asap7, and nangate45).
Timing characterization is run for all these PDKs.
If --openroad_path is NOT supplied, then paths to
Yosys, openSTA, synthesis library, and (if different from
synthesis librarty) STA libraries must be provided.
If --openroad_path is NOT supplied, then paths to
Yosys, openSTA, synthesis library, and (if different from
synthesis librarty) STA libraries must be provided.
"""

# Assume that https://github.com/The-OpenROAD-Project/OpenROAD-flow-scripts are
Expand All @@ -34,18 +34,22 @@
import os
import subprocess
import time

from typing import List

from absl import app
from absl import flags
from absl import logging

import portpicker

_SAMPLES_PATH = flags.DEFINE_string(
'samples_path', None, 'Path to proto providing sample points.'
)
_OP_INCLUDE_LIST = flags.DEFINE_list(
'op_include_list',
[],
'Names of ops from samples textproto to generate data points for. If empty,'
' all of them are included. Note that kIdentity is always included',
)
_BAZEL_BIN_PATH = flags.DEFINE_string(
'bazel_bin_path', None, 'Root directory of bazel-bin'
)
Expand All @@ -70,12 +74,12 @@
'synth_libs', None, 'Path to synthesis library or libraries'
)
_STA_LIBS = flags.DEFINE_string(
'sta_libs', None, 'Path to static timing library or libraries; '
'only needed if different from synth_libs'
)
_OUT_PATH = flags.DEFINE_string(
'out_path', None, 'Path for output text proto'
'sta_libs',
None,
'Path to static timing library or libraries; '
'only needed if different from synth_libs',
)
_OUT_PATH = flags.DEFINE_string('out_path', None, 'Path for output text proto')

# The options below are used when bazel_bin_path is NOT specified
_CLIENT = flags.DEFINE_string(
Expand Down Expand Up @@ -118,21 +122,23 @@ def _do_config_task(config: WorkerConfig):
config.yosys_bin = f'{config.openroad_path}/tools/install/yosys/bin/yosys'
config.sta_bin = f'{config.openroad_path}/tools/install/OpenROAD/bin/sta'
config.client_checkpoint_file = (
f'../../{config.target}_checkpoint.textproto')
f'../../{config.target}_checkpoint.textproto'
)
else:
if not _YOSYS_PATH.value:
raise app.UsageError(
'Must provide either --openroad_path or --yosys_path.')
'Must provide either --openroad_path or --yosys_path.'
)
config.yosys_bin = os.path.realpath(_YOSYS_PATH.value)

if not _STA_PATH.value:
raise app.UsageError(
'Must provide either --openroad_path or --sta_path.')
raise app.UsageError('Must provide either --openroad_path or --sta_path.')
config.sta_bin = os.path.realpath(_STA_PATH.value)

if not _SYNTH_LIBS.value:
raise app.UsageError(
'Must provide either --openroad_path or --synth_libs.')
'Must provide either --openroad_path or --synth_libs.'
)
synth_libs = _SYNTH_LIBS.value
assert synth_libs is not None
config.synthesis_libraries = synth_libs.split()
Expand All @@ -150,7 +156,8 @@ def _do_config_task(config: WorkerConfig):
config.client_checkpoint_file = out_path
else:
raise app.UsageError(
'If not using --openroad_path, then must provide --out_path.')
'If not using --openroad_path, then must provide --out_path.'
)

if not _SAMPLES_PATH.value:
if _QUICK_RUN.value:
Expand All @@ -175,8 +182,9 @@ def _do_config_task(config: WorkerConfig):

print('server bin path:', config.server_bin)
print('client bin path:', config.client_bin)
print('output checkpoint path:',
os.path.realpath(config.client_checkpoint_file))
print(
'output checkpoint path:', os.path.realpath(config.client_checkpoint_file)
)

if not os.path.isfile(config.yosys_bin):
raise app.UsageError(f'Yosys tools not found with {config.yosys_bin}')
Expand Down Expand Up @@ -205,6 +213,9 @@ def _do_config_task(config: WorkerConfig):
if _SAMPLES_PATH.value:
config.samples_path = os.path.realpath(_SAMPLES_PATH.value)
config.client_extra_args.append(f'--samples_path={config.samples_path}')
config.client_extra_args.append(
'--op_include_list=' + ','.join(_OP_INCLUDE_LIST.value)
)


def _do_config_asap7(config: WorkerConfig):
Expand Down Expand Up @@ -362,5 +373,6 @@ def main(_):
_do_worker_task(config)
print('Finish')


if __name__ == '__main__':
app.run(main)

0 comments on commit eff2607

Please sign in to comment.