Coverage for klayout_pex / kpex_cli.py: 71%
533 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-24 15:46 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-24 15:46 +0000
1#! /usr/bin/env python3
2#
3# --------------------------------------------------------------------------------
4# SPDX-FileCopyrightText: 2024-2025 Martin Jan Köhler and Harald Pretl
5# Johannes Kepler University, Institute for Integrated Circuits.
6#
7# This file is part of KPEX
8# (see https://github.com/iic-jku/klayout-pex).
9#
10# This program is free software: you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation, either version 3 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License
21# along with this program. If not, see <http://www.gnu.org/licenses/>.
22# SPDX-License-Identifier: GPL-3.0-or-later
23# --------------------------------------------------------------------------------
24#
26import argparse
27from datetime import datetime
28from enum import StrEnum
29import logging
30import os
31import os.path
32from pathlib import Path
33import rich.console
34import rich.markdown
35import rich.text
36from rich_argparse import RichHelpFormatter
37import shlex
38import shutil
39import sys
40from typing import *
42import klayout.db as kdb
43import klayout.rdb as rdb
45from .common.path_validation import validate_files, FileValidationResult
46from .env import EnvVar, Env
47from .extraction_engine import ExtractionEngine
48from .fastercap.fastercap_input_builder import FasterCapInputBuilder
49from .fastercap.fastercap_model_generator import FasterCapModelGenerator
50from .fastercap.fastercap_runner import run_fastercap, fastercap_parse_capacitance_matrix
51from .fastcap.fastcap_runner import run_fastcap, fastcap_parse_capacitance_matrix
52from .klayout.lvs_runner import LVSRunner
53from .klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo
54from .klayout.netlist_expander import NetlistExpander
55from .klayout.netlist_csv import NetlistCSVWriter
56from .klayout.netlist_printer import NetlistPrinter
57from .klayout.netlist_reducer import NetlistReducer
58from .klayout.repair_rdb import repair_rdb
59from .log import (
60 LogLevel,
61 set_log_level,
62 register_additional_handler,
63 deregister_additional_handler,
64 # console,
65 # debug,
66 info,
67 warning,
68 subproc,
69 error,
70 rule
71)
72from .magic.magic_ext_file_parser import parse_magic_pex_run
73from .magic.magic_runner import (
74 MagicPEXMode,
75 MagicShortMode,
76 MagicMergeMode,
77 run_magic,
78 prepare_magic_script,
79)
80from .magic.magic_log_analyzer import MagicLogAnalyzer
81from .pdk_config import PDK, PDKConfig
82from .rcx25.extractor import RCX25Extractor, ExtractionResults
83from .rcx25.netlist_expander import RCX25NetlistExpander
84from .rcx25.pex_mode import PEXMode
85from .tech_info import TechInfo
86from .util.multiple_choice import MultipleChoicePattern
87from .util.argparse_helpers import render_enum_help, true_or_false
88from .version import __version__
91# ------------------------------------------------------------------------------------
93PROGRAM_NAME = "kpex"
96class ArgumentValidationError(Exception):
97 pass
100class InputMode(StrEnum):
101 LVSDB = "lvsdb"
102 GDS = "gds"
105class KpexCLI:
106 @staticmethod
107 def parse_args(arg_list: List[str],
108 env: Env) -> argparse.Namespace:
109 # epilog = f"See '{PROGRAM_NAME} <subcommand> -h' for help on subcommand"
110 epilog = EnvVar.help_epilog_table()
111 epilog_md = rich.console.Group(
112 rich.text.Text('Environmental variables:', style='argparse.groups'),
113 rich.markdown.Markdown(epilog, style='argparse.text')
114 )
115 main_parser = argparse.ArgumentParser(description=f"{PROGRAM_NAME}: "
116 f"KLayout-integrated Parasitic Extraction Tool",
117 epilog=epilog_md,
118 add_help=False,
119 formatter_class=RichHelpFormatter)
121 group_special = main_parser.add_argument_group("Special options")
122 group_special.add_argument("--help", "-h", action='help', help="show this help message and exit")
123 group_special.add_argument("--version", "-v", action='version', version=f'{PROGRAM_NAME} {__version__}')
124 group_special.add_argument("--log_level", dest='log_level', default='subprocess',
125 help=render_enum_help(topic='log_level', enum_cls=LogLevel))
126 group_special.add_argument("--threads", dest='num_threads', type=int,
127 default=os.cpu_count() * 4,
128 help="number of threads (e.g. for FasterCap) (default is %(default)s)")
130 group_pex = main_parser.add_argument_group("Parasitic Extraction Setup")
132 all_pdk_choices = list(PDK) + list(PDK.legacy_aliases().keys())
134 group_pex.add_argument("--pdk", dest="pdk", required=True,
135 type=PDK.from_string, choices=all_pdk_choices,
136 help=render_enum_help(topic='pdk', enum_cls=PDK))
138 group_pex.add_argument("--out_dir", dest="output_dir_base_path", default="output",
139 help="Run directory path (default is '%(default)s')")
141 group_pex.add_argument("--out_spice", "-o", dest="output_spice_path", default=None,
142 help="Optional additional SPICE output path (default is none)")
144 group_pex_input = main_parser.add_argument_group("Parasitic Extraction Input",
145 description="Either LVS is run, or an existing LVSDB is used")
146 group_pex_input.add_argument("--gds", "-g", dest="gds_path", default=None,
147 help="GDS path (for LVS)")
148 group_pex_input.add_argument("--schematic", "-s", dest="schematic_path",
149 help="Schematic SPICE netlist path (for LVS). "
150 "If none given, a dummy schematic will be created")
151 group_pex_input.add_argument("--lvsdb", "-l", dest="lvsdb_path", default=None,
152 help="KLayout LVSDB path (bypass LVS)")
153 group_pex_input.add_argument("--cell", "-c", dest="cell_name", default=None,
154 help="Cell (default is the top cell)")
156 group_pex_input.add_argument("--cache-lvs", dest="cache_lvs",
157 type=true_or_false, default=True,
158 help="Used cached LVSDB (for given input GDS) (default is %(default)s)")
159 group_pex_input.add_argument("--cache-dir", dest="cache_dir_path", default=None,
160 help="Path for cached LVSDB (default is .kpex_cache within --out_dir)")
161 group_pex_input.add_argument("--lvs-verbose", dest="klayout_lvs_verbose",
162 type=true_or_false, default=False,
163 help="Verbose KLayout LVS output (default is %(default)s)")
165 group_pex_options = main_parser.add_argument_group("Parasitic Extraction Options")
166 group_pex_options.add_argument("--blackbox", dest="blackbox_devices",
167 type=true_or_false, default=False, # TODO: in the future this should be True by default
168 help="Blackbox devices like MIM/MOM caps, as they are handled by SPICE models "
169 "(default is %(default)s for testing now)")
170 group_pex_options.add_argument("--fastercap", dest="run_fastercap",
171 action='store_true', default=False,
172 help="Run FasterCap engine (default is %(default)s)")
173 group_pex_options.add_argument("--fastcap", dest="run_fastcap",
174 action='store_true', default=False,
175 help="Run FastCap2 engine (default is %(default)s)")
176 group_pex_options.add_argument("--magic", dest="run_magic",
177 action='store_true', default=False,
178 help="Run MAGIC engine (default is %(default)s)")
179 group_pex_options.add_argument("--2.5D", dest="run_2_5D",
180 action='store_true', default=False,
181 help="Run 2.5D analytical engine (default is %(default)s)")
183 group_fastercap = main_parser.add_argument_group("FasterCap options")
184 group_fastercap.add_argument("--k_void", "-k", dest="k_void",
185 type=float, default=3.9,
186 help="Dielectric constant of void (default is %(default)s)")
188 # TODO: reflect that these are also now used by KPEX/2.5D engine!
189 group_fastercap.add_argument("--delaunay_amax", "-a", dest="delaunay_amax",
190 type=float, default=50,
191 help="Delaunay triangulation maximum area (default is %(default)s)")
192 group_fastercap.add_argument("--delaunay_b", "-b", dest="delaunay_b",
193 type=float, default=0.5,
194 help="Delaunay triangulation b (default is %(default)s)")
195 group_fastercap.add_argument("--geo_check", dest="geometry_check",
196 type=true_or_false, default=False,
197 help=f"Validate geometries before passing to FasterCap "
198 f"(default is False)")
199 group_fastercap.add_argument("--diel", dest="dielectric_filter",
200 type=str, default="all",
201 help=f"Comma separated list of dielectric filter patterns. "
202 f"Allowed patterns are: (none, all, -dielname1, +dielname2) "
203 f"(default is %(default)s)")
205 group_fastercap.add_argument("--tolerance", dest="fastercap_tolerance",
206 type=float, default=0.05,
207 help="FasterCap -aX error tolerance (default is %(default)s)")
208 group_fastercap.add_argument("--d_coeff", dest="fastercap_d_coeff",
209 type=float, default=0.5,
210 help=f"FasterCap -d direct potential interaction coefficient to mesh refinement "
211 f"(default is %(default)s)")
212 group_fastercap.add_argument("--mesh", dest="fastercap_mesh_refinement_value",
213 type=float, default=0.5,
214 help="FasterCap -m Mesh relative refinement value (default is %(default)s)")
215 group_fastercap.add_argument("--ooc", dest="fastercap_ooc_condition",
216 type=float, default=2,
217 help="FasterCap -f out-of-core free memory to link memory condition "
218 "(0 = don't go OOC, default is %(default)s)")
219 group_fastercap.add_argument("--auto_precond", dest="fastercap_auto_preconditioner",
220 type=true_or_false, default=True,
221 help=f"FasterCap -ap Automatic preconditioner usage (default is %(default)s)")
222 group_fastercap.add_argument("--galerkin", dest="fastercap_galerkin_scheme",
223 action='store_true', default=False,
224 help=f"FasterCap -g Use Galerkin scheme (default is %(default)s)")
225 group_fastercap.add_argument("--jacobi", dest="fastercap_jacobi_preconditioner",
226 action='store_true', default=False,
227 help="FasterCap -pj Use Jacobi preconditioner (default is %(default)s)")
229 group_magic = main_parser.add_argument_group("MAGIC options")
231 default_magicrc_path = env.default_magicrc_path
232 if default_magicrc_path:
233 magicrc_help = f"Path to magicrc configuration file (default is '{default_magicrc_path}')"
234 else:
235 magicrc_help = "Path to magicrc configuration file "\
236 "(default not available, PDK and PDK_ROOT must be set!)"
238 group_magic.add_argument('--magicrc', dest='magicrc_path', default=default_magicrc_path,
239 help=magicrc_help)
240 group_magic.add_argument("--magic_mode", dest='magic_pex_mode',
241 default=MagicPEXMode.DEFAULT, type=MagicPEXMode, choices=list(MagicPEXMode),
242 help=render_enum_help(topic='magic_mode', enum_cls=MagicPEXMode))
243 group_magic.add_argument("--magic_cthresh", dest="magic_cthresh",
244 type=float, default=0.01,
245 help="Threshold (in fF) for ignored parasitic capacitances (default is %(default)s). "
246 "(MAGIC command: ext2spice cthresh <value>)")
247 group_magic.add_argument("--magic_rthresh", dest="magic_rthresh",
248 type=int, default=100,
249 help="Threshold (in Ω) for ignored parasitic resistances (default is %(default)s). "
250 "(MAGIC command: ext2spice rthresh <value>)")
251 group_magic.add_argument("--magic_tolerance", dest="magic_tolerance",
252 type=float, default=1,
253 help="Set ratio between resistor and device tolerance (default is %(default)s). "
254 "(MAGIC command: extresist tolerance <value>)")
255 group_magic.add_argument("--magic_halo", dest="magic_halo",
256 type=float, default=None,
257 help="Custom sidewall halo distance (in µm) "
258 "(MAGIC command: extract halo <value>) (default is no custom halo)")
259 group_magic.add_argument("--magic_short", dest='magic_short_mode',
260 default=MagicShortMode.DEFAULT, type=MagicShortMode, choices=list(MagicShortMode),
261 help=render_enum_help(topic='magic_short', enum_cls=MagicShortMode))
262 group_magic.add_argument("--magic_merge", dest='magic_merge_mode',
263 default=MagicMergeMode.DEFAULT, type=MagicMergeMode, choices=list(MagicMergeMode),
264 help=render_enum_help(topic='magic_merge', enum_cls=MagicMergeMode))
266 group_25d = main_parser.add_argument_group("2.5D options")
267 group_25d.add_argument("--mode", dest='pex_mode',
268 default=PEXMode.DEFAULT, type=PEXMode, choices=list(PEXMode),
269 help=render_enum_help(topic='mode', enum_cls=PEXMode))
270 group_25d.add_argument("--halo", dest="halo",
271 type=float, default=None,
272 help="Custom sidewall halo distance (in µm) to override tech info "
273 "(default is no custom halo)")
274 group_25d.add_argument("--scale", dest="scale_ratio_to_fit_halo",
275 type=true_or_false, default=True,
276 help=f"Scale fringe ratios, so that halo distance is 100%% (default is %(default)s)")
278 if arg_list is None:
279 arg_list = sys.argv[1:]
280 args = main_parser.parse_args(arg_list)
282 # environmental variables and their defaults
283 args.fastcap_exe_path = env[EnvVar.FASTCAP_EXE]
284 args.fastercap_exe_path = env[EnvVar.FASTERCAP_EXE]
285 args.klayout_exe_path = env[EnvVar.KLAYOUT_EXE]
286 args.magic_exe_path = env[EnvVar.MAGIC_EXE]
288 return args
290 @staticmethod
291 def validate_args(args: argparse.Namespace):
292 found_errors = False
294 pdk_config: PDKConfig = args.pdk.config
295 args.tech_pbjson_path = pdk_config.tech_pb_json_path
296 args.lvs_script_path = pdk_config.pex_lvs_script_path
298 def input_file_stem(path: str):
299 # could be *.gds, or *.gds.gz, so remove all extensions
300 return os.path.basename(path).split(sep='.')[0]
302 if not os.path.isfile(args.klayout_exe_path):
303 path = shutil.which(args.klayout_exe_path)
304 if not path:
305 error(f"Can't locate KLayout executable at {args.klayout_exe_path}")
306 found_errors = True
308 if not os.path.isfile(args.tech_pbjson_path):
309 error(f"Can't read technology file at path {args.tech_pbjson_path}")
310 found_errors = True
312 if not os.path.isfile(args.lvs_script_path):
313 error(f"Can't locate LVS script path at {args.lvs_script_path}")
314 found_errors = True
316 rule('Input Layout')
318 # check engines VS input possiblities
319 match (args.run_magic, args.run_fastcap, args.run_fastercap, args.run_2_5D,
320 args.gds_path, args.lvsdb_path):
321 case (True, _, _, _, None, _):
322 error(f"Running PEX engine MAGIC requires --gds (--lvsdb not possible)")
323 found_errors = True
324 case (False, False, False, False, _, _): # at least one engine must be activated
325 error("No PEX engines activated")
326 engine_help = """
327 | Argument | Description |
328 | ------------ | ------------------------------- |
329 | --2.5D | Run KPEX/2.5D analytical engine |
330 | --fastercap | Run KPEX/FastCap 3D engine |
331 | --fastercap | Run KPEX/FasterCap 3D engine |
332 | --magic | Run MAGIC wrapper engine |
333 """
334 subproc(f"\n\nPlease activate one or more engines using the arguments:")
335 rich.print(rich.markdown.Markdown(engine_help, style='argparse.text'))
336 found_errors = True
337 case (_, _, _, _, None, None):
338 error(f"Neither GDS nor LVSDB was provided")
339 found_errors = True
341 # check if we find magicrc
342 if args.run_magic:
343 if args.magicrc_path is None:
344 error(f"magicrc not available, requires any those:\n"
345 f"\t• set environmental variables PDK_ROOT / PDK\n"
346 f"\t• pass argument --magicrc")
347 found_errors = True
348 else:
349 result = validate_files([args.magicrc_path])
350 for f in result.failures:
351 error(f"Invalid magicrc: {f.reason} at {str(f.path)}")
352 found_errors = True
354 # input mode: LVS or existing LVSDB?
355 if args.gds_path:
356 info(f"GDS input file passed, running in LVS mode")
357 args.input_mode = InputMode.GDS
358 if not os.path.isfile(args.gds_path):
359 error(f"Can't read GDS file (LVS input) at path {args.gds_path}")
360 found_errors = True
361 else:
362 args.layout = kdb.Layout()
363 args.layout.read(args.gds_path)
365 top_cells = args.layout.top_cells()
367 if args.cell_name: # explicit user-specified cell name
368 args.effective_cell_name = args.cell_name
370 found_cell: Optional[kdb.Cell] = None
371 for cell in args.layout.cells('*'):
372 if cell.name == args.effective_cell_name:
373 found_cell = cell
374 break
375 if not found_cell:
376 error(f"Could not find cell {args.cell_name} in GDS {args.gds_path}")
377 found_errors = True
379 is_only_top_cell = len(top_cells) == 1 and top_cells[0].name == args.cell_name
380 if is_only_top_cell:
381 info(f"Found cell {args.cell_name} in GDS {args.gds_path} (only top cell)")
382 else: # there are other cells => extract the top cell to a tmp layout
383 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}"
384 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id)
385 os.makedirs(args.output_dir_path, exist_ok=True)
386 args.effective_gds_path = os.path.join(args.output_dir_path,
387 f"{args.cell_name}_exported.gds.gz")
388 info(f"Found cell {args.cell_name} in GDS {args.gds_path}, "
389 f"but it is not the only top cell, "
390 f"so layout is exported to: {args.effective_gds_path}")
392 found_cell.write(args.effective_gds_path)
393 else: # find top cell
394 if len(top_cells) == 1:
395 args.effective_cell_name = top_cells[0].name
396 info(f"No explicit top cell specified, using top cell '{args.effective_cell_name}'")
397 else:
398 args.effective_cell_name = 'TOP'
399 error(f"Could not determine the default top cell in GDS {args.gds_path}, "
400 f"there are multiple: {', '.join([c.name for c in top_cells])}. "
401 f"Use --cell to specify the cell")
402 found_errors = True
404 if not hasattr(args, 'effective_gds_path'):
405 args.effective_gds_path = args.gds_path
406 elif args.lvsdb_path is not None:
407 info(f"LVSDB input file passed, bypassing LVS")
408 args.input_mode = InputMode.LVSDB
409 if not os.path.isfile(args.lvsdb_path):
410 error(f"Can't read KLayout LVSDB file at path {args.lvsdb_path}")
411 found_errors = True
412 else:
413 lvsdb = kdb.LayoutVsSchematic()
414 lvsdb.read(args.lvsdb_path)
415 top_cell: kdb.Cell = lvsdb.internal_top_cell()
416 args.effective_cell_name = top_cell.name
418 if hasattr(args, 'effective_cell_name'):
419 run_dir_id: str
420 match args.input_mode:
421 case InputMode.GDS:
422 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}"
423 case InputMode.LVSDB:
424 run_dir_id = f"{input_file_stem(args.lvsdb_path)}__{args.effective_cell_name}"
425 case _:
426 raise NotImplementedError(f"Unknown input mode {args.input_mode}")
428 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id)
429 os.makedirs(args.output_dir_path, exist_ok=True)
430 if args.input_mode == InputMode.GDS:
431 if args.schematic_path:
432 args.effective_schematic_path = args.schematic_path
433 if not os.path.isfile(args.schematic_path):
434 error(f"Can't read schematic (LVS input) at path {args.schematic_path}")
435 found_errors = True
436 else:
437 info(f"LVS input schematic not specified (argument --schematic), using dummy schematic")
438 args.effective_schematic_path = os.path.join(args.output_dir_path,
439 f"{args.effective_cell_name}_dummy_schematic.spice")
440 with open(args.effective_schematic_path, 'w', encoding='utf-8') as f:
441 f.writelines([
442 f".subckt {args.effective_cell_name} VDD VSS\n",
443 '.ends\n',
444 '.end\n'
445 ])
447 try:
448 args.log_level = LogLevel[args.log_level.upper()]
449 except KeyError:
450 error(f"Requested log level {args.log_level.lower()} does not exist, "
451 f"{render_enum_help(topic='log_level', enum_cls=LogLevel, print_default=False)}")
452 found_errors = True
454 try:
455 pattern_string: str = args.dielectric_filter
456 args.dielectric_filter = MultipleChoicePattern(pattern=pattern_string)
457 except ValueError as e:
458 error("Failed to parse --diel arg", e)
459 found_errors = True
461 if args.cache_dir_path is None:
462 args.cache_dir_path = os.path.join(args.output_dir_base_path, '.kpex_cache')
464 if found_errors:
465 raise ArgumentValidationError("Argument validation failed")
467 def create_netlist_printer(self,
468 args: argparse.Namespace,
469 extraction_engine: ExtractionEngine):
470 printer = NetlistPrinter(extraction_engine=extraction_engine,
471 pdk=args.pdk)
472 return printer
474 def build_fastercap_input(self,
475 args: argparse.Namespace,
476 pex_context: KLayoutExtractionContext,
477 tech_info: TechInfo) -> str:
478 rule('Process stackup')
479 fastercap_input_builder = FasterCapInputBuilder(pex_context=pex_context,
480 tech_info=tech_info,
481 k_void=args.k_void,
482 delaunay_amax=args.delaunay_amax,
483 delaunay_b=args.delaunay_b)
484 gen: FasterCapModelGenerator = fastercap_input_builder.build()
486 rule('FasterCap Input File Generation')
487 faster_cap_input_dir_path = os.path.join(args.output_dir_path, 'FasterCap_Input_Files')
488 os.makedirs(faster_cap_input_dir_path, exist_ok=True)
490 lst_file = gen.write_fastcap(output_dir_path=faster_cap_input_dir_path, prefix='FasterCap_Input_')
492 rule('STL File Generation')
493 geometry_dir_path = os.path.join(args.output_dir_path, 'Geometries')
494 os.makedirs(geometry_dir_path, exist_ok=True)
495 gen.dump_stl(output_dir_path=geometry_dir_path, prefix='')
497 if args.geometry_check:
498 rule('Geometry Validation')
499 gen.check()
501 return lst_file
504 def run_fastercap_extraction(self,
505 args: argparse.Namespace,
506 pex_context: KLayoutExtractionContext,
507 lst_file: str):
508 rule('FasterCap Execution')
509 info(f"Configure number of OpenMP threads (environmental variable OMP_NUM_THREADS) as {args.num_threads}")
510 os.environ['OMP_NUM_THREADS'] = f"{args.num_threads}"
512 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Output.txt")
513 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Raw.csv")
514 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Avg.csv")
515 expanded_netlist_path = os.path.join(args.output_dir_path,
516 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.cir")
517 expanded_netlist_csv_path = os.path.join(args.output_dir_path,
518 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.csv")
519 reduced_netlist_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Reduced_Netlist.cir")
521 run_fastercap(exe_path=args.fastercap_exe_path,
522 lst_file_path=lst_file,
523 log_path=log_path,
524 tolerance=args.fastercap_tolerance,
525 d_coeff=args.fastercap_d_coeff,
526 mesh_refinement_value=args.fastercap_mesh_refinement_value,
527 ooc_condition=args.fastercap_ooc_condition,
528 auto_preconditioner=args.fastercap_auto_preconditioner,
529 galerkin_scheme=args.fastercap_galerkin_scheme,
530 jacobi_preconditioner=args.fastercap_jacobi_preconditioner)
532 cap_matrix = fastercap_parse_capacitance_matrix(log_path)
533 cap_matrix.write_csv(raw_csv_path)
535 cap_matrix = cap_matrix.averaged_off_diagonals()
536 cap_matrix.write_csv(avg_csv_path)
538 netlist_expander = NetlistExpander()
539 expanded_netlist = netlist_expander.expand(
540 extracted_netlist=pex_context.lvsdb.netlist(),
541 top_cell_name=pex_context.annotated_top_cell.name,
542 cap_matrix=cap_matrix,
543 blackbox_devices=args.blackbox_devices
544 )
546 # create a nice CSV for reports, useful for spreadsheets
547 netlist_csv_writer = NetlistCSVWriter()
548 netlist_csv_writer.write_csv(netlist=expanded_netlist,
549 top_cell_name=pex_context.annotated_top_cell.name,
550 output_path=expanded_netlist_csv_path)
552 rule("Extended netlist (CSV format):")
553 with open(expanded_netlist_csv_path, 'r') as f:
554 for line in f.readlines():
555 subproc(line[:-1]) # abusing subproc, simply want verbatim
556 rule()
558 info(f"Wrote expanded netlist CSV to: {expanded_netlist_csv_path}")
560 netlist_printer = self.create_netlist_printer(args, ExtractionEngine.FASTERCAP)
561 netlist_printer.write(expanded_netlist, expanded_netlist_path)
562 info(f"Wrote expanded netlist to: {expanded_netlist_path}")
564 # FIXME: should this be already reduced?
565 if args.output_spice_path:
566 netlist_printer.write(expanded_netlist, args.output_spice_path)
567 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
569 netlist_reducer = NetlistReducer()
570 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist,
571 top_cell_name=pex_context.annotated_top_cell.name)
572 netlist_printer.write(reduced_netlist, reduced_netlist_path)
573 info(f"Wrote reduced netlist to: {reduced_netlist_path}")
575 self._fastercap_extracted_csv_path = expanded_netlist_csv_path
577 def run_magic_extraction(self,
578 args: argparse.Namespace):
579 if args.input_mode != InputMode.GDS:
580 error(f"MAGIC engine only works with GDS input mode"
581 f" (currently {args.input_mode})")
582 return
584 magic_run_dir = os.path.join(args.output_dir_path, f"magic_{args.magic_pex_mode}")
585 magic_log_path = os.path.join(magic_run_dir,
586 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Output.txt")
587 magic_script_path = os.path.join(magic_run_dir,
588 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Script.tcl")
590 output_netlist_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}.pex.spice")
591 report_db_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_report.rdb.gz")
593 os.makedirs(magic_run_dir, exist_ok=True)
595 prepare_magic_script(gds_path=args.effective_gds_path,
596 cell_name=args.effective_cell_name,
597 run_dir_path=magic_run_dir,
598 script_path=magic_script_path,
599 output_netlist_path=output_netlist_path,
600 pex_mode=args.magic_pex_mode,
601 c_threshold=args.magic_cthresh,
602 r_threshold=args.magic_rthresh,
603 tolerance=args.magic_tolerance,
604 halo=args.magic_halo,
605 short_mode=args.magic_short_mode,
606 merge_mode=args.magic_merge_mode)
608 run_magic(exe_path=args.magic_exe_path,
609 magicrc_path=args.magicrc_path,
610 script_path=magic_script_path,
611 log_path=magic_log_path)
613 magic_pex_run = parse_magic_pex_run(Path(magic_run_dir))
615 layout = kdb.Layout()
616 layout.read(args.effective_gds_path)
618 report = rdb.ReportDatabase('')
619 magic_log_analyzer = MagicLogAnalyzer(magic_pex_run=magic_pex_run,
620 report=report,
621 dbu=layout.dbu)
622 magic_log_analyzer.analyze()
623 report.save(report_db_path)
625 rule("Paths")
626 subproc(f"Report DB saved at: {report_db_path}")
627 subproc(f"SPICE netlist saved at: {output_netlist_path}")
629 if os.path.exists(output_netlist_path):
630 if args.output_spice_path and os.path.exists(output_netlist_path):
631 shutil.copy(output_netlist_path, args.output_spice_path)
632 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
634 rule("MAGIC PEX SPICE netlist")
635 with open(output_netlist_path, 'r') as f:
636 subproc(f.read())
637 rule()
639 def run_fastcap_extraction(self,
640 args: argparse.Namespace,
641 pex_context: KLayoutExtractionContext,
642 lst_file: str):
643 rule('FastCap2 Execution')
645 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Output.txt")
646 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Raw.csv")
647 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Avg.csv")
648 expanded_netlist_path = os.path.join(args.output_dir_path,
649 f"{args.effective_cell_name}_FastCap2_Expanded_Netlist.cir")
650 reduced_netlist_path = os.path.join(args.output_dir_path,
651 f"{args.effective_cell_name}_FastCap2_Reduced_Netlist.cir")
653 run_fastcap(exe_path=args.fastcap_exe_path,
654 lst_file_path=lst_file,
655 log_path=log_path)
657 cap_matrix = fastcap_parse_capacitance_matrix(log_path)
658 cap_matrix.write_csv(raw_csv_path)
660 cap_matrix = cap_matrix.averaged_off_diagonals()
661 cap_matrix.write_csv(avg_csv_path)
663 netlist_expander = NetlistExpander()
664 expanded_netlist = netlist_expander.expand(
665 extracted_netlist=pex_context.lvsdb.netlist(),
666 top_cell_name=pex_context.annotated_top_cell.name,
667 cap_matrix=cap_matrix,
668 blackbox_devices=args.blackbox_devices
669 )
671 netlist_printer = self.create_netlist_printer(args, ExtractionEngine.FASTCAP2)
672 netlist_printer.write(expanded_netlist, expanded_netlist_path)
673 info(f"Wrote expanded netlist to: {expanded_netlist_path}")
675 # FIXME: should this be already reduced?
676 if args.output_spice_path:
677 netlist_printer.write(expanded_netlist, args.output_spice_path)
678 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
680 netlist_reducer = NetlistReducer()
681 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist,
682 top_cell_name=pex_context.annotated_top_cell.name)
683 netlist_printer.write(reduced_netlist, reduced_netlist_path)
685 info(f"Wrote reduced netlist to: {reduced_netlist_path}")
687 def run_kpex_2_5d_engine(self,
688 args: argparse.Namespace,
689 pex_context: KLayoutExtractionContext,
690 tech_info: TechInfo,
691 report_path: str,
692 netlist_csv_path: Optional[str],
693 expanded_netlist_path: Optional[str]):
694 # TODO: make this separatly configurable
695 # for now we use 0
696 args.rcx25d_delaunay_amax = 0
697 args.rcx25d_delaunay_b = 0.5
699 extractor = RCX25Extractor(pex_context=pex_context,
700 pex_mode=args.pex_mode,
701 delaunay_amax=args.rcx25d_delaunay_amax,
702 delaunay_b=args.rcx25d_delaunay_b,
703 scale_ratio_to_fit_halo=args.scale_ratio_to_fit_halo,
704 tech_info=tech_info,
705 report_path=report_path)
706 extraction_results = extractor.extract()
708 if netlist_csv_path is not None:
709 # TODO: merge this with klayout_pex/klayout/netlist_csv.py
711 with open(netlist_csv_path, 'w', encoding='utf-8') as f:
712 summary = extraction_results.summarize()
714 f.write('Device;Net1;Net2;Capacitance [fF];Resistance [Ω]\n')
715 for idx, (key, cap_value) in enumerate(sorted(summary.capacitances.items())):
716 f.write(f"C{idx + 1};{key.net1};{key.net2};{round(cap_value, 3)};\n")
717 for idx, (key, res_value) in enumerate(sorted(summary.resistances.items())):
718 f.write(f"R{idx + 1};{key.net1};{key.net2};;{round(res_value, 3)}\n")
720 rule('kpex/2.5D extracted netlist (CSV format)')
721 with open(netlist_csv_path, 'r') as f:
722 for line in f.readlines():
723 subproc(line[:-1]) # abusing subproc, simply want verbatim
725 rule('Extracted netlist CSV')
726 subproc(f"{netlist_csv_path}")
728 if expanded_netlist_path is not None:
729 rule('kpex/2.5D extracted netlist (SPICE format)')
730 netlist_expander = RCX25NetlistExpander()
731 expanded_netlist = netlist_expander.expand(
732 extracted_netlist=pex_context.lvsdb.netlist(),
733 top_cell_name=pex_context.annotated_top_cell.name,
734 extraction_results=extraction_results,
735 blackbox_devices=args.blackbox_devices
736 )
738 netlist_printer = self.create_netlist_printer(args, ExtractionEngine.K25D)
739 netlist_printer.write(expanded_netlist, expanded_netlist_path)
740 subproc(f"Wrote expanded netlist to: {expanded_netlist_path}")
742 # FIXME: should this be already reduced?
743 if args.output_spice_path:
744 netlist_printer.write(expanded_netlist, args.output_spice_path)
745 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
747 # NOTE: there was a KLayout bug that some of the categories were lost,
748 # so that the marker browser could not load the report file
749 try:
750 report = rdb.ReportDatabase('')
751 report.load(report_path) # try loading rdb
752 except Exception as e:
753 rule("Repair broken marker DB")
754 warning(f"Detected KLayout bug: RDB can't be loaded due to exception {e}")
755 repair_rdb(report_path)
757 return extraction_results
759 def setup_logging(self, args: argparse.Namespace):
760 def register_log_file_handler(log_path: str,
761 formatter: Optional[logging.Formatter]) -> logging.Handler:
762 handler = logging.FileHandler(log_path)
763 handler.setLevel(LogLevel.SUBPROCESS)
764 if formatter:
765 handler.setFormatter(formatter)
766 register_additional_handler(handler)
767 return handler
769 def reregister_log_file_handler(handler: logging.Handler,
770 log_path: str,
771 formatter: Optional[logging.Formatter]):
772 deregister_additional_handler(handler)
773 handler.flush()
774 handler.close()
775 os.makedirs(args.output_dir_path, exist_ok=True)
776 new_path = os.path.join(args.output_dir_path, os.path.basename(log_path))
777 if os.path.exists(new_path):
778 ctime = os.path.getctime(new_path)
779 dt = datetime.fromtimestamp(ctime)
780 timestamp = dt.strftime('%Y-%m-%d_%H-%M-%S')
781 backup_path = f"{new_path[:-4]}_{timestamp}.bak.log"
782 shutil.move(new_path, backup_path)
783 log_path = shutil.move(log_path, new_path)
784 register_log_file_handler(log_path, formatter)
786 # setup preliminary logger
787 cli_log_path_plain = os.path.join(args.output_dir_base_path, f"kpex_plain.log")
788 cli_log_path_formatted = os.path.join(args.output_dir_base_path, f"kpex.log")
789 formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
790 file_handler_plain = register_log_file_handler(cli_log_path_plain, None)
791 file_handler_formatted = register_log_file_handler(cli_log_path_formatted, formatter)
792 try:
793 self.validate_args(args)
794 except ArgumentValidationError:
795 if hasattr(args, 'output_dir_path'):
796 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None)
797 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter)
798 sys.exit(1)
799 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None)
800 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter)
802 set_log_level(args.log_level)
804 @staticmethod
805 def modification_date(filename: str) -> datetime:
806 t = os.path.getmtime(filename)
807 return datetime.fromtimestamp(t)
809 def create_lvsdb(self, args: argparse.Namespace) -> kdb.LayoutVsSchematic:
810 lvsdb = kdb.LayoutVsSchematic()
812 match args.input_mode:
813 case InputMode.LVSDB:
814 lvsdb.read(args.lvsdb_path)
815 case InputMode.GDS:
816 lvs_log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_lvs.log")
817 lvsdb_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}.lvsdb.gz")
818 lvsdb_cache_path = os.path.join(args.cache_dir_path, args.pdk,
819 os.path.splitroot(os.path.abspath(args.gds_path))[-1],
820 f"{args.effective_cell_name}.lvsdb.gz")
822 lvs_needed = True
824 if args.cache_lvs:
825 if not os.path.exists(lvsdb_cache_path):
826 info(f"Cache miss: extracted LVSDB does not exist")
827 subproc(lvsdb_cache_path)
828 elif self.modification_date(lvsdb_cache_path) <= self.modification_date(args.gds_path):
829 info(f"Cache miss: extracted LVSDB is older than the input GDS")
830 subproc(lvsdb_cache_path)
831 else:
832 warning(f"Cache hit: Reusing cached LVSDB")
833 subproc(lvsdb_cache_path)
834 lvs_needed = False
836 if lvs_needed:
837 lvs_runner = LVSRunner()
838 lvs_runner.run_klayout_lvs(exe_path=args.klayout_exe_path,
839 lvs_script=args.lvs_script_path,
840 gds_path=args.effective_gds_path,
841 schematic_path=args.effective_schematic_path,
842 log_path=lvs_log_path,
843 lvsdb_path=lvsdb_path,
844 verbose=args.klayout_lvs_verbose)
845 if args.cache_lvs:
846 cache_dir_path = os.path.dirname(lvsdb_cache_path)
847 if not os.path.exists(cache_dir_path):
848 os.makedirs(cache_dir_path, exist_ok=True)
849 shutil.copy(lvsdb_path, lvsdb_cache_path)
851 lvsdb.read(lvsdb_path)
852 return lvsdb
854 def main(self, argv: List[str]):
855 if '-v' not in argv and \
856 '--version' not in argv and \
857 '-h' not in argv and \
858 '--help' not in argv:
859 rule('Command line arguments')
860 subproc(' '.join(map(shlex.quote, sys.argv)))
862 env = Env.from_os_environ()
863 args = self.parse_args(arg_list=argv[1:], env=env)
865 os.makedirs(args.output_dir_base_path, exist_ok=True)
866 self.setup_logging(args)
868 tech_info = TechInfo.from_json(args.tech_pbjson_path,
869 dielectric_filter=args.dielectric_filter)
871 if args.halo is not None:
872 tech_info.tech.process_parasitics.side_halo = args.halo
874 if args.run_magic:
875 rule('MAGIC')
876 self.run_magic_extraction(args)
878 # no need to run LVS etc if only running magic engine
879 if not (args.run_fastcap or args.run_fastercap or args.run_2_5D):
880 return
882 rule('Prepare LVSDB')
883 lvsdb = self.create_lvsdb(args)
885 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=args.effective_cell_name,
886 lvsdb=lvsdb,
887 tech=tech_info,
888 blackbox_devices=args.blackbox_devices)
889 rule('Non-empty layers in LVS database')
890 for gds_pair, layer_info in pex_context.extracted_layers.items():
891 names = [l.lvs_layer_name for l in layer_info.source_layers]
892 info(f"{gds_pair} -> ({' '.join(names)})")
894 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_extracted.oas")
895 pex_context.annotated_layout.write(gds_path)
897 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_internal.oas")
898 pex_context.lvsdb.internal_layout().write(gds_path)
900 def dump_layers(cell: str,
901 layers: List[KLayoutExtractedLayerInfo],
902 layout_dump_path: str):
903 layout = kdb.Layout()
904 layout.dbu = lvsdb.internal_layout().dbu
906 top_cell = layout.create_cell(cell)
907 for ulyr in layers:
908 li = kdb.LayerInfo(*ulyr.gds_pair)
909 li.name = ulyr.lvs_layer_name
910 layer = layout.insert_layer(li)
911 layout.insert(top_cell.cell_index(), layer, ulyr.region.dup())
913 layout.write(layout_dump_path)
915 if len(pex_context.unnamed_layers) >= 1:
916 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_unnamed_LVS_layers.gds.gz")
917 dump_layers(cell=args.effective_cell_name,
918 layers=pex_context.unnamed_layers,
919 layout_dump_path=layout_dump_path)
921 if len(pex_context.extracted_layers) >= 1:
922 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_nonempty_LVS_layers.gds.gz")
923 nonempty_layers = [l \
924 for layers in pex_context.extracted_layers.values() \
925 for l in layers.source_layers]
926 dump_layers(cell=args.effective_cell_name,
927 layers=nonempty_layers,
928 layout_dump_path=layout_dump_path)
929 else:
930 error("No extracted layers found")
931 sys.exit(1)
933 if args.run_fastcap or args.run_fastercap:
934 lst_file = self.build_fastercap_input(args=args,
935 pex_context=pex_context,
936 tech_info=tech_info)
937 if args.run_fastercap:
938 self.run_fastercap_extraction(args=args,
939 pex_context=pex_context,
940 lst_file=lst_file)
941 if args.run_fastcap:
942 self.run_fastcap_extraction(args=args,
943 pex_context=pex_context,
944 lst_file=lst_file)
946 if args.run_2_5D:
947 rule("kpex/2.5D PEX Engine")
948 report_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_report.rdb.gz")
949 netlist_csv_path = os.path.abspath(os.path.join(args.output_dir_path,
950 f"{args.effective_cell_name}_k25d_pex_netlist.csv"))
951 netlist_spice_path = os.path.abspath(os.path.join(args.output_dir_path,
952 f"{args.effective_cell_name}_k25d_pex_netlist.spice"))
954 self._rcx25_extraction_results = self.run_kpex_2_5d_engine( # NOTE: store for test case
955 args=args,
956 pex_context=pex_context,
957 tech_info=tech_info,
958 report_path=report_path,
959 netlist_csv_path=netlist_csv_path,
960 expanded_netlist_path=netlist_spice_path
961 )
963 self._rcx25_extracted_csv_path = netlist_csv_path
965 @property
966 def rcx25_extraction_results(self) -> ExtractionResults:
967 if not hasattr(self, '_rcx25_extraction_results'):
968 raise Exception('rcx25_extraction_results is not initialized, was run_kpex_2_5d_engine called?')
969 return self._rcx25_extraction_results
971 @property
972 def rcx25_extracted_csv_path(self) -> str:
973 if not hasattr(self, '_rcx25_extracted_csv_path'):
974 raise Exception('rcx25_extracted_csv_path is not initialized, was run_kpex_2_5d_engine called?')
975 return self._rcx25_extracted_csv_path
977 @property
978 def fastercap_extracted_csv_path(self) -> str:
979 if not hasattr(self, '_fastercap_extracted_csv_path'):
980 raise Exception('fastercap_extracted_csv_path is not initialized, was run_fastercap_extraction called?')
981 return self._fastercap_extracted_csv_path
984if __name__ == "__main__":
985 cli = KpexCLI()
986 cli.main(sys.argv)