Coverage for klayout_pex / kpex_cli.py: 71%
553 statements
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-02 17:12 +0000
« prev ^ index » next coverage.py v7.13.4, created at 2026-03-02 17:12 +0000
1#! /usr/bin/env python3
2#
3# --------------------------------------------------------------------------------
4# SPDX-FileCopyrightText: 2024-2025 Martin Jan Köhler and Harald Pretl
5# Johannes Kepler University, Institute for Integrated Circuits.
6#
7# This file is part of KPEX
8# (see https://github.com/iic-jku/klayout-pex).
9#
10# This program is free software: you can redistribute it and/or modify
11# it under the terms of the GNU General Public License as published by
12# the Free Software Foundation, either version 3 of the License, or
13# (at your option) any later version.
14#
15# This program is distributed in the hope that it will be useful,
16# but WITHOUT ANY WARRANTY; without even the implied warranty of
17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18# GNU General Public License for more details.
19#
20# You should have received a copy of the GNU General Public License
21# along with this program. If not, see <http://www.gnu.org/licenses/>.
22# SPDX-License-Identifier: GPL-3.0-or-later
23# --------------------------------------------------------------------------------
24#
26import argparse
27from datetime import datetime
28from enum import StrEnum
29from functools import cached_property
30import logging
31import os
32import os.path
33from pathlib import Path
34import rich.console
35import rich.markdown
36import rich.text
37from rich_argparse import RichHelpFormatter
38import shlex
39import shutil
40import sys
41from typing import *
43import klayout.db as kdb
44import klayout.rdb as rdb
46from .common.path_validation import validate_files, FileValidationResult
47from .env import EnvVar, Env
48from .extraction_engine import ExtractionEngine
49from .fastercap.fastercap_input_builder import FasterCapInputBuilder
50from .fastercap.fastercap_model_generator import FasterCapModelGenerator
51from .fastercap.fastercap_runner import run_fastercap, fastercap_parse_capacitance_matrix
52from .fastcap.fastcap_runner import run_fastcap, fastcap_parse_capacitance_matrix
53from .klayout.lvs_runner import LVSRunner
54from .klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo
55from .klayout.netlist_expander import NetlistExpander
56from .klayout.netlist_csv import NetlistCSVWriter
57from .klayout.netlist_printer import NetlistPrinter
58from .klayout.netlist_reducer import NetlistReducer
59from .klayout.repair_rdb import repair_rdb
60from .log import (
61 LogLevel,
62 set_log_level,
63 register_additional_handler,
64 deregister_additional_handler,
65 # console,
66 # debug,
67 info,
68 warning,
69 subproc,
70 error,
71 rule
72)
73from .magic.magic_ext_file_parser import parse_magic_pex_run
74from .magic.magic_runner import (
75 MagicPEXMode,
76 MagicShortMode,
77 MagicMergeMode,
78 run_magic,
79 prepare_magic_script,
80)
81from .magic.magic_log_analyzer import MagicLogAnalyzer
82from .pdk_config import PDKConfig
83from .rcx25.extractor import RCX25Extractor, ExtractionResults
84from .rcx25.netlist_expander import RCX25NetlistExpander
85from .rcx25.pex_mode import PEXMode
86from .tech_info import TechInfo
87from .util.multiple_choice import MultipleChoicePattern
88from .util.argparse_helpers import render_enum_help, true_or_false
89from .version import __version__
92# ------------------------------------------------------------------------------------
94PROGRAM_NAME = "kpex"
97class ArgumentValidationError(Exception):
98 pass
101class InputMode(StrEnum):
102 LVSDB = "lvsdb"
103 GDS = "gds"
106# TODO: this should be externally configurable
107class PDK(StrEnum):
108 GF180MCUD = 'gf180mcuD'
109 IHP_SG13G2 = 'ihp_sg13g2'
110 SKY130A = 'sky130A'
112 @cached_property
113 def config(self) -> PDKConfig:
114 # NOTE: installation paths of resources in the distribution wheel differs from source repo
115 base_dir = os.path.dirname(os.path.realpath(__file__))
117 # NOTE: .git can be dir (standalone clone), or file (in case of submodule)
118 if os.path.exists(os.path.join(base_dir, '..', '.git')): # in source repo
119 base_dir = os.path.dirname(base_dir)
120 tech_pb_json_dir = os.path.join(base_dir, 'klayout_pex_protobuf')
121 else: # site-packages/klayout_pex -> site-packages/klayout_pex_protobuf
122 tech_pb_json_dir = os.path.join(os.path.dirname(base_dir), 'klayout_pex_protobuf')
124 match self:
125 case PDK.GF180MCUD:
126 return PDKConfig(
127 name=self,
128 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'gf180mcu.lvs'),
129 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json")
130 )
131 case PDK.IHP_SG13G2:
132 return PDKConfig(
133 name=self,
134 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sg13g2.lvs'),
135 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json")
136 )
137 case PDK.SKY130A:
138 return PDKConfig(
139 name=self,
140 pex_lvs_script_path=os.path.join(base_dir, 'pdk', self, 'libs.tech', 'kpex', 'sky130.lvs'),
141 tech_pb_json_path=os.path.join(tech_pb_json_dir, f"{self}_tech.pb.json")
142 )
143 case _:
144 raise NotImplementedError(f"Unhandled enum case {self}")
148class KpexCLI:
149 @staticmethod
150 def parse_args(arg_list: List[str],
151 env: Env) -> argparse.Namespace:
152 # epilog = f"See '{PROGRAM_NAME} <subcommand> -h' for help on subcommand"
153 epilog = EnvVar.help_epilog_table()
154 epilog_md = rich.console.Group(
155 rich.text.Text('Environmental variables:', style='argparse.groups'),
156 rich.markdown.Markdown(epilog, style='argparse.text')
157 )
158 main_parser = argparse.ArgumentParser(description=f"{PROGRAM_NAME}: "
159 f"KLayout-integrated Parasitic Extraction Tool",
160 epilog=epilog_md,
161 add_help=False,
162 formatter_class=RichHelpFormatter)
164 group_special = main_parser.add_argument_group("Special options")
165 group_special.add_argument("--help", "-h", action='help', help="show this help message and exit")
166 group_special.add_argument("--version", "-v", action='version', version=f'{PROGRAM_NAME} {__version__}')
167 group_special.add_argument("--log_level", dest='log_level', default='subprocess',
168 help=render_enum_help(topic='log_level', enum_cls=LogLevel))
169 group_special.add_argument("--threads", dest='num_threads', type=int,
170 default=os.cpu_count() * 4,
171 help="number of threads (e.g. for FasterCap) (default is %(default)s)")
173 group_pex = main_parser.add_argument_group("Parasitic Extraction Setup")
174 group_pex.add_argument("--pdk", dest="pdk", required=True,
175 type=PDK, choices=list(PDK),
176 help=render_enum_help(topic='pdk', enum_cls=PDK))
178 group_pex.add_argument("--out_dir", dest="output_dir_base_path", default="output",
179 help="Run directory path (default is '%(default)s')")
181 group_pex.add_argument("--out_spice", "-o", dest="output_spice_path", default=None,
182 help="Optional additional SPICE output path (default is none)")
184 group_pex_input = main_parser.add_argument_group("Parasitic Extraction Input",
185 description="Either LVS is run, or an existing LVSDB is used")
186 group_pex_input.add_argument("--gds", "-g", dest="gds_path", default=None,
187 help="GDS path (for LVS)")
188 group_pex_input.add_argument("--schematic", "-s", dest="schematic_path",
189 help="Schematic SPICE netlist path (for LVS). "
190 "If none given, a dummy schematic will be created")
191 group_pex_input.add_argument("--lvsdb", "-l", dest="lvsdb_path", default=None,
192 help="KLayout LVSDB path (bypass LVS)")
193 group_pex_input.add_argument("--cell", "-c", dest="cell_name", default=None,
194 help="Cell (default is the top cell)")
196 group_pex_input.add_argument("--cache-lvs", dest="cache_lvs",
197 type=true_or_false, default=True,
198 help="Used cached LVSDB (for given input GDS) (default is %(default)s)")
199 group_pex_input.add_argument("--cache-dir", dest="cache_dir_path", default=None,
200 help="Path for cached LVSDB (default is .kpex_cache within --out_dir)")
201 group_pex_input.add_argument("--lvs-verbose", dest="klayout_lvs_verbose",
202 type=true_or_false, default=False,
203 help="Verbose KLayout LVS output (default is %(default)s)")
205 group_pex_options = main_parser.add_argument_group("Parasitic Extraction Options")
206 group_pex_options.add_argument("--blackbox", dest="blackbox_devices",
207 type=true_or_false, default=False, # TODO: in the future this should be True by default
208 help="Blackbox devices like MIM/MOM caps, as they are handled by SPICE models "
209 "(default is %(default)s for testing now)")
210 group_pex_options.add_argument("--fastercap", dest="run_fastercap",
211 action='store_true', default=False,
212 help="Run FasterCap engine (default is %(default)s)")
213 group_pex_options.add_argument("--fastcap", dest="run_fastcap",
214 action='store_true', default=False,
215 help="Run FastCap2 engine (default is %(default)s)")
216 group_pex_options.add_argument("--magic", dest="run_magic",
217 action='store_true', default=False,
218 help="Run MAGIC engine (default is %(default)s)")
219 group_pex_options.add_argument("--2.5D", dest="run_2_5D",
220 action='store_true', default=False,
221 help="Run 2.5D analytical engine (default is %(default)s)")
223 group_fastercap = main_parser.add_argument_group("FasterCap options")
224 group_fastercap.add_argument("--k_void", "-k", dest="k_void",
225 type=float, default=3.9,
226 help="Dielectric constant of void (default is %(default)s)")
228 # TODO: reflect that these are also now used by KPEX/2.5D engine!
229 group_fastercap.add_argument("--delaunay_amax", "-a", dest="delaunay_amax",
230 type=float, default=50,
231 help="Delaunay triangulation maximum area (default is %(default)s)")
232 group_fastercap.add_argument("--delaunay_b", "-b", dest="delaunay_b",
233 type=float, default=0.5,
234 help="Delaunay triangulation b (default is %(default)s)")
235 group_fastercap.add_argument("--geo_check", dest="geometry_check",
236 type=true_or_false, default=False,
237 help=f"Validate geometries before passing to FasterCap "
238 f"(default is False)")
239 group_fastercap.add_argument("--diel", dest="dielectric_filter",
240 type=str, default="all",
241 help=f"Comma separated list of dielectric filter patterns. "
242 f"Allowed patterns are: (none, all, -dielname1, +dielname2) "
243 f"(default is %(default)s)")
245 group_fastercap.add_argument("--tolerance", dest="fastercap_tolerance",
246 type=float, default=0.05,
247 help="FasterCap -aX error tolerance (default is %(default)s)")
248 group_fastercap.add_argument("--d_coeff", dest="fastercap_d_coeff",
249 type=float, default=0.5,
250 help=f"FasterCap -d direct potential interaction coefficient to mesh refinement "
251 f"(default is %(default)s)")
252 group_fastercap.add_argument("--mesh", dest="fastercap_mesh_refinement_value",
253 type=float, default=0.5,
254 help="FasterCap -m Mesh relative refinement value (default is %(default)s)")
255 group_fastercap.add_argument("--ooc", dest="fastercap_ooc_condition",
256 type=float, default=2,
257 help="FasterCap -f out-of-core free memory to link memory condition "
258 "(0 = don't go OOC, default is %(default)s)")
259 group_fastercap.add_argument("--auto_precond", dest="fastercap_auto_preconditioner",
260 type=true_or_false, default=True,
261 help=f"FasterCap -ap Automatic preconditioner usage (default is %(default)s)")
262 group_fastercap.add_argument("--galerkin", dest="fastercap_galerkin_scheme",
263 action='store_true', default=False,
264 help=f"FasterCap -g Use Galerkin scheme (default is %(default)s)")
265 group_fastercap.add_argument("--jacobi", dest="fastercap_jacobi_preconditioner",
266 action='store_true', default=False,
267 help="FasterCap -pj Use Jacobi preconditioner (default is %(default)s)")
269 group_magic = main_parser.add_argument_group("MAGIC options")
271 default_magicrc_path = env.default_magicrc_path
272 if default_magicrc_path:
273 magicrc_help = f"Path to magicrc configuration file (default is '{default_magicrc_path}')"
274 else:
275 magicrc_help = "Path to magicrc configuration file "\
276 "(default not available, PDK and PDK_ROOT must be set!)"
278 group_magic.add_argument('--magicrc', dest='magicrc_path', default=default_magicrc_path,
279 help=magicrc_help)
280 group_magic.add_argument("--magic_mode", dest='magic_pex_mode',
281 default=MagicPEXMode.DEFAULT, type=MagicPEXMode, choices=list(MagicPEXMode),
282 help=render_enum_help(topic='magic_mode', enum_cls=MagicPEXMode))
283 group_magic.add_argument("--magic_cthresh", dest="magic_cthresh",
284 type=float, default=0.01,
285 help="Threshold (in fF) for ignored parasitic capacitances (default is %(default)s). "
286 "(MAGIC command: ext2spice cthresh <value>)")
287 group_magic.add_argument("--magic_rthresh", dest="magic_rthresh",
288 type=int, default=100,
289 help="Threshold (in Ω) for ignored parasitic resistances (default is %(default)s). "
290 "(MAGIC command: ext2spice rthresh <value>)")
291 group_magic.add_argument("--magic_tolerance", dest="magic_tolerance",
292 type=float, default=1,
293 help="Set ratio between resistor and device tolerance (default is %(default)s). "
294 "(MAGIC command: extresist tolerance <value>)")
295 group_magic.add_argument("--magic_halo", dest="magic_halo",
296 type=float, default=None,
297 help="Custom sidewall halo distance (in µm) "
298 "(MAGIC command: extract halo <value>) (default is no custom halo)")
299 group_magic.add_argument("--magic_short", dest='magic_short_mode',
300 default=MagicShortMode.DEFAULT, type=MagicShortMode, choices=list(MagicShortMode),
301 help=render_enum_help(topic='magic_short', enum_cls=MagicShortMode))
302 group_magic.add_argument("--magic_merge", dest='magic_merge_mode',
303 default=MagicMergeMode.DEFAULT, type=MagicMergeMode, choices=list(MagicMergeMode),
304 help=render_enum_help(topic='magic_merge', enum_cls=MagicMergeMode))
306 group_25d = main_parser.add_argument_group("2.5D options")
307 group_25d.add_argument("--mode", dest='pex_mode',
308 default=PEXMode.DEFAULT, type=PEXMode, choices=list(PEXMode),
309 help=render_enum_help(topic='mode', enum_cls=PEXMode))
310 group_25d.add_argument("--halo", dest="halo",
311 type=float, default=None,
312 help="Custom sidewall halo distance (in µm) to override tech info "
313 "(default is no custom halo)")
314 group_25d.add_argument("--scale", dest="scale_ratio_to_fit_halo",
315 type=true_or_false, default=True,
316 help=f"Scale fringe ratios, so that halo distance is 100%% (default is %(default)s)")
318 if arg_list is None:
319 arg_list = sys.argv[1:]
320 args = main_parser.parse_args(arg_list)
322 # environmental variables and their defaults
323 args.fastcap_exe_path = env[EnvVar.FASTCAP_EXE]
324 args.fastercap_exe_path = env[EnvVar.FASTERCAP_EXE]
325 args.klayout_exe_path = env[EnvVar.KLAYOUT_EXE]
326 args.magic_exe_path = env[EnvVar.MAGIC_EXE]
328 return args
330 @staticmethod
331 def validate_args(args: argparse.Namespace):
332 found_errors = False
334 pdk_config: PDKConfig = args.pdk.config
335 args.tech_pbjson_path = pdk_config.tech_pb_json_path
336 args.lvs_script_path = pdk_config.pex_lvs_script_path
338 def input_file_stem(path: str):
339 # could be *.gds, or *.gds.gz, so remove all extensions
340 return os.path.basename(path).split(sep='.')[0]
342 if not os.path.isfile(args.klayout_exe_path):
343 path = shutil.which(args.klayout_exe_path)
344 if not path:
345 error(f"Can't locate KLayout executable at {args.klayout_exe_path}")
346 found_errors = True
348 if not os.path.isfile(args.tech_pbjson_path):
349 error(f"Can't read technology file at path {args.tech_pbjson_path}")
350 found_errors = True
352 if not os.path.isfile(args.lvs_script_path):
353 error(f"Can't locate LVS script path at {args.lvs_script_path}")
354 found_errors = True
356 rule('Input Layout')
358 # check engines VS input possiblities
359 match (args.run_magic, args.run_fastcap, args.run_fastercap, args.run_2_5D,
360 args.gds_path, args.lvsdb_path):
361 case (True, _, _, _, None, _):
362 error(f"Running PEX engine MAGIC requires --gds (--lvsdb not possible)")
363 found_errors = True
364 case (False, False, False, False, _, _): # at least one engine must be activated
365 error("No PEX engines activated")
366 engine_help = """
367 | Argument | Description |
368 | ------------ | ------------------------------- |
369 | --2.5D | Run KPEX/2.5D analytical engine |
370 | --fastercap | Run KPEX/FastCap 3D engine |
371 | --fastercap | Run KPEX/FasterCap 3D engine |
372 | --magic | Run MAGIC wrapper engine |
373 """
374 subproc(f"\n\nPlease activate one or more engines using the arguments:")
375 rich.print(rich.markdown.Markdown(engine_help, style='argparse.text'))
376 found_errors = True
377 case (_, _, _, _, None, None):
378 error(f"Neither GDS nor LVSDB was provided")
379 found_errors = True
381 # check if we find magicrc
382 if args.run_magic:
383 if args.magicrc_path is None:
384 error(f"magicrc not available, requires any those:\n"
385 f"\t• set environmental variables PDK_ROOT / PDK\n"
386 f"\t• pass argument --magicrc")
387 found_errors = True
388 else:
389 result = validate_files([args.magicrc_path])
390 for f in result.failures:
391 error(f"Invalid magicrc: {f.reason} at {str(f.path)}")
392 found_errors = True
394 # input mode: LVS or existing LVSDB?
395 if args.gds_path:
396 info(f"GDS input file passed, running in LVS mode")
397 args.input_mode = InputMode.GDS
398 if not os.path.isfile(args.gds_path):
399 error(f"Can't read GDS file (LVS input) at path {args.gds_path}")
400 found_errors = True
401 else:
402 args.layout = kdb.Layout()
403 args.layout.read(args.gds_path)
405 top_cells = args.layout.top_cells()
407 if args.cell_name: # explicit user-specified cell name
408 args.effective_cell_name = args.cell_name
410 found_cell: Optional[kdb.Cell] = None
411 for cell in args.layout.cells('*'):
412 if cell.name == args.effective_cell_name:
413 found_cell = cell
414 break
415 if not found_cell:
416 error(f"Could not find cell {args.cell_name} in GDS {args.gds_path}")
417 found_errors = True
419 is_only_top_cell = len(top_cells) == 1 and top_cells[0].name == args.cell_name
420 if is_only_top_cell:
421 info(f"Found cell {args.cell_name} in GDS {args.gds_path} (only top cell)")
422 else: # there are other cells => extract the top cell to a tmp layout
423 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}"
424 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id)
425 os.makedirs(args.output_dir_path, exist_ok=True)
426 args.effective_gds_path = os.path.join(args.output_dir_path,
427 f"{args.cell_name}_exported.gds.gz")
428 info(f"Found cell {args.cell_name} in GDS {args.gds_path}, "
429 f"but it is not the only top cell, "
430 f"so layout is exported to: {args.effective_gds_path}")
432 found_cell.write(args.effective_gds_path)
433 else: # find top cell
434 if len(top_cells) == 1:
435 args.effective_cell_name = top_cells[0].name
436 info(f"No explicit top cell specified, using top cell '{args.effective_cell_name}'")
437 else:
438 args.effective_cell_name = 'TOP'
439 error(f"Could not determine the default top cell in GDS {args.gds_path}, "
440 f"there are multiple: {', '.join([c.name for c in top_cells])}. "
441 f"Use --cell to specify the cell")
442 found_errors = True
444 if not hasattr(args, 'effective_gds_path'):
445 args.effective_gds_path = args.gds_path
446 elif args.lvsdb_path is not None:
447 info(f"LVSDB input file passed, bypassing LVS")
448 args.input_mode = InputMode.LVSDB
449 if not os.path.isfile(args.lvsdb_path):
450 error(f"Can't read KLayout LVSDB file at path {args.lvsdb_path}")
451 found_errors = True
452 else:
453 lvsdb = kdb.LayoutVsSchematic()
454 lvsdb.read(args.lvsdb_path)
455 top_cell: kdb.Cell = lvsdb.internal_top_cell()
456 args.effective_cell_name = top_cell.name
458 if hasattr(args, 'effective_cell_name'):
459 run_dir_id: str
460 match args.input_mode:
461 case InputMode.GDS:
462 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}"
463 case InputMode.LVSDB:
464 run_dir_id = f"{input_file_stem(args.lvsdb_path)}__{args.effective_cell_name}"
465 case _:
466 raise NotImplementedError(f"Unknown input mode {args.input_mode}")
468 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id)
469 os.makedirs(args.output_dir_path, exist_ok=True)
470 if args.input_mode == InputMode.GDS:
471 if args.schematic_path:
472 args.effective_schematic_path = args.schematic_path
473 if not os.path.isfile(args.schematic_path):
474 error(f"Can't read schematic (LVS input) at path {args.schematic_path}")
475 found_errors = True
476 else:
477 info(f"LVS input schematic not specified (argument --schematic), using dummy schematic")
478 args.effective_schematic_path = os.path.join(args.output_dir_path,
479 f"{args.effective_cell_name}_dummy_schematic.spice")
480 with open(args.effective_schematic_path, 'w', encoding='utf-8') as f:
481 f.writelines([
482 f".subckt {args.effective_cell_name} VDD VSS\n",
483 '.ends\n',
484 '.end\n'
485 ])
487 try:
488 args.log_level = LogLevel[args.log_level.upper()]
489 except KeyError:
490 error(f"Requested log level {args.log_level.lower()} does not exist, "
491 f"{render_enum_help(topic='log_level', enum_cls=LogLevel, print_default=False)}")
492 found_errors = True
494 try:
495 pattern_string: str = args.dielectric_filter
496 args.dielectric_filter = MultipleChoicePattern(pattern=pattern_string)
497 except ValueError as e:
498 error("Failed to parse --diel arg", e)
499 found_errors = True
501 if args.cache_dir_path is None:
502 args.cache_dir_path = os.path.join(args.output_dir_base_path, '.kpex_cache')
504 if found_errors:
505 raise ArgumentValidationError("Argument validation failed")
507 def create_netlist_printer(self,
508 args: argparse.Namespace,
509 extraction_engine: ExtractionEngine):
510 printer = NetlistPrinter(extraction_engine=extraction_engine,
511 pdk=args.pdk)
512 return printer
514 def build_fastercap_input(self,
515 args: argparse.Namespace,
516 pex_context: KLayoutExtractionContext,
517 tech_info: TechInfo) -> str:
518 rule('Process stackup')
519 fastercap_input_builder = FasterCapInputBuilder(pex_context=pex_context,
520 tech_info=tech_info,
521 k_void=args.k_void,
522 delaunay_amax=args.delaunay_amax,
523 delaunay_b=args.delaunay_b)
524 gen: FasterCapModelGenerator = fastercap_input_builder.build()
526 rule('FasterCap Input File Generation')
527 faster_cap_input_dir_path = os.path.join(args.output_dir_path, 'FasterCap_Input_Files')
528 os.makedirs(faster_cap_input_dir_path, exist_ok=True)
530 lst_file = gen.write_fastcap(output_dir_path=faster_cap_input_dir_path, prefix='FasterCap_Input_')
532 rule('STL File Generation')
533 geometry_dir_path = os.path.join(args.output_dir_path, 'Geometries')
534 os.makedirs(geometry_dir_path, exist_ok=True)
535 gen.dump_stl(output_dir_path=geometry_dir_path, prefix='')
537 if args.geometry_check:
538 rule('Geometry Validation')
539 gen.check()
541 return lst_file
544 def run_fastercap_extraction(self,
545 args: argparse.Namespace,
546 pex_context: KLayoutExtractionContext,
547 lst_file: str):
548 rule('FasterCap Execution')
549 info(f"Configure number of OpenMP threads (environmental variable OMP_NUM_THREADS) as {args.num_threads}")
550 os.environ['OMP_NUM_THREADS'] = f"{args.num_threads}"
552 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Output.txt")
553 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Raw.csv")
554 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Avg.csv")
555 expanded_netlist_path = os.path.join(args.output_dir_path,
556 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.cir")
557 expanded_netlist_csv_path = os.path.join(args.output_dir_path,
558 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.csv")
559 reduced_netlist_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Reduced_Netlist.cir")
561 run_fastercap(exe_path=args.fastercap_exe_path,
562 lst_file_path=lst_file,
563 log_path=log_path,
564 tolerance=args.fastercap_tolerance,
565 d_coeff=args.fastercap_d_coeff,
566 mesh_refinement_value=args.fastercap_mesh_refinement_value,
567 ooc_condition=args.fastercap_ooc_condition,
568 auto_preconditioner=args.fastercap_auto_preconditioner,
569 galerkin_scheme=args.fastercap_galerkin_scheme,
570 jacobi_preconditioner=args.fastercap_jacobi_preconditioner)
572 cap_matrix = fastercap_parse_capacitance_matrix(log_path)
573 cap_matrix.write_csv(raw_csv_path)
575 cap_matrix = cap_matrix.averaged_off_diagonals()
576 cap_matrix.write_csv(avg_csv_path)
578 netlist_expander = NetlistExpander()
579 expanded_netlist = netlist_expander.expand(
580 extracted_netlist=pex_context.lvsdb.netlist(),
581 top_cell_name=pex_context.annotated_top_cell.name,
582 cap_matrix=cap_matrix,
583 blackbox_devices=args.blackbox_devices
584 )
586 # create a nice CSV for reports, useful for spreadsheets
587 netlist_csv_writer = NetlistCSVWriter()
588 netlist_csv_writer.write_csv(netlist=expanded_netlist,
589 top_cell_name=pex_context.annotated_top_cell.name,
590 output_path=expanded_netlist_csv_path)
592 rule("Extended netlist (CSV format):")
593 with open(expanded_netlist_csv_path, 'r') as f:
594 for line in f.readlines():
595 subproc(line[:-1]) # abusing subproc, simply want verbatim
596 rule()
598 info(f"Wrote expanded netlist CSV to: {expanded_netlist_csv_path}")
600 netlist_printer = self.create_netlist_printer(args, ExtractionEngine.FASTERCAP)
601 netlist_printer.write(expanded_netlist, expanded_netlist_path)
602 info(f"Wrote expanded netlist to: {expanded_netlist_path}")
604 # FIXME: should this be already reduced?
605 if args.output_spice_path:
606 netlist_printer.write(expanded_netlist, args.output_spice_path)
607 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
609 netlist_reducer = NetlistReducer()
610 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist,
611 top_cell_name=pex_context.annotated_top_cell.name)
612 netlist_printer.write(reduced_netlist, reduced_netlist_path)
613 info(f"Wrote reduced netlist to: {reduced_netlist_path}")
615 self._fastercap_extracted_csv_path = expanded_netlist_csv_path
617 def run_magic_extraction(self,
618 args: argparse.Namespace):
619 if args.input_mode != InputMode.GDS:
620 error(f"MAGIC engine only works with GDS input mode"
621 f" (currently {args.input_mode})")
622 return
624 magic_run_dir = os.path.join(args.output_dir_path, f"magic_{args.magic_pex_mode}")
625 magic_log_path = os.path.join(magic_run_dir,
626 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Output.txt")
627 magic_script_path = os.path.join(magic_run_dir,
628 f"{args.effective_cell_name}_MAGIC_{args.magic_pex_mode}_Script.tcl")
630 output_netlist_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}.pex.spice")
631 report_db_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_report.rdb.gz")
633 os.makedirs(magic_run_dir, exist_ok=True)
635 prepare_magic_script(gds_path=args.effective_gds_path,
636 cell_name=args.effective_cell_name,
637 run_dir_path=magic_run_dir,
638 script_path=magic_script_path,
639 output_netlist_path=output_netlist_path,
640 pex_mode=args.magic_pex_mode,
641 c_threshold=args.magic_cthresh,
642 r_threshold=args.magic_rthresh,
643 tolerance=args.magic_tolerance,
644 halo=args.magic_halo,
645 short_mode=args.magic_short_mode,
646 merge_mode=args.magic_merge_mode)
648 run_magic(exe_path=args.magic_exe_path,
649 magicrc_path=args.magicrc_path,
650 script_path=magic_script_path,
651 log_path=magic_log_path)
653 magic_pex_run = parse_magic_pex_run(Path(magic_run_dir))
655 layout = kdb.Layout()
656 layout.read(args.effective_gds_path)
658 report = rdb.ReportDatabase('')
659 magic_log_analyzer = MagicLogAnalyzer(magic_pex_run=magic_pex_run,
660 report=report,
661 dbu=layout.dbu)
662 magic_log_analyzer.analyze()
663 report.save(report_db_path)
665 rule("Paths")
666 subproc(f"Report DB saved at: {report_db_path}")
667 subproc(f"SPICE netlist saved at: {output_netlist_path}")
669 if os.path.exists(output_netlist_path):
670 if args.output_spice_path and os.path.exists(output_netlist_path):
671 shutil.copy(output_netlist_path, args.output_spice_path)
672 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
674 rule("MAGIC PEX SPICE netlist")
675 with open(output_netlist_path, 'r') as f:
676 subproc(f.read())
677 rule()
679 def run_fastcap_extraction(self,
680 args: argparse.Namespace,
681 pex_context: KLayoutExtractionContext,
682 lst_file: str):
683 rule('FastCap2 Execution')
685 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Output.txt")
686 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Raw.csv")
687 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Avg.csv")
688 expanded_netlist_path = os.path.join(args.output_dir_path,
689 f"{args.effective_cell_name}_FastCap2_Expanded_Netlist.cir")
690 reduced_netlist_path = os.path.join(args.output_dir_path,
691 f"{args.effective_cell_name}_FastCap2_Reduced_Netlist.cir")
693 run_fastcap(exe_path=args.fastcap_exe_path,
694 lst_file_path=lst_file,
695 log_path=log_path)
697 cap_matrix = fastcap_parse_capacitance_matrix(log_path)
698 cap_matrix.write_csv(raw_csv_path)
700 cap_matrix = cap_matrix.averaged_off_diagonals()
701 cap_matrix.write_csv(avg_csv_path)
703 netlist_expander = NetlistExpander()
704 expanded_netlist = netlist_expander.expand(
705 extracted_netlist=pex_context.lvsdb.netlist(),
706 top_cell_name=pex_context.annotated_top_cell.name,
707 cap_matrix=cap_matrix,
708 blackbox_devices=args.blackbox_devices
709 )
711 netlist_printer = self.create_netlist_printer(args, ExtractionEngine.FASTCAP2)
712 netlist_printer.write(expanded_netlist, expanded_netlist_path)
713 info(f"Wrote expanded netlist to: {expanded_netlist_path}")
715 # FIXME: should this be already reduced?
716 if args.output_spice_path:
717 netlist_printer.write(expanded_netlist, args.output_spice_path)
718 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
720 netlist_reducer = NetlistReducer()
721 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist,
722 top_cell_name=pex_context.annotated_top_cell.name)
723 netlist_printer.write(reduced_netlist, reduced_netlist_path)
725 info(f"Wrote reduced netlist to: {reduced_netlist_path}")
727 def run_kpex_2_5d_engine(self,
728 args: argparse.Namespace,
729 pex_context: KLayoutExtractionContext,
730 tech_info: TechInfo,
731 report_path: str,
732 netlist_csv_path: Optional[str],
733 expanded_netlist_path: Optional[str]):
734 # TODO: make this separatly configurable
735 # for now we use 0
736 args.rcx25d_delaunay_amax = 0
737 args.rcx25d_delaunay_b = 0.5
739 extractor = RCX25Extractor(pex_context=pex_context,
740 pex_mode=args.pex_mode,
741 delaunay_amax=args.rcx25d_delaunay_amax,
742 delaunay_b=args.rcx25d_delaunay_b,
743 scale_ratio_to_fit_halo=args.scale_ratio_to_fit_halo,
744 tech_info=tech_info,
745 report_path=report_path)
746 extraction_results = extractor.extract()
748 if netlist_csv_path is not None:
749 # TODO: merge this with klayout_pex/klayout/netlist_csv.py
751 with open(netlist_csv_path, 'w', encoding='utf-8') as f:
752 summary = extraction_results.summarize()
754 f.write('Device;Net1;Net2;Capacitance [fF];Resistance [Ω]\n')
755 for idx, (key, cap_value) in enumerate(sorted(summary.capacitances.items())):
756 f.write(f"C{idx + 1};{key.net1};{key.net2};{round(cap_value, 3)};\n")
757 for idx, (key, res_value) in enumerate(sorted(summary.resistances.items())):
758 f.write(f"R{idx + 1};{key.net1};{key.net2};;{round(res_value, 3)}\n")
760 rule('kpex/2.5D extracted netlist (CSV format)')
761 with open(netlist_csv_path, 'r') as f:
762 for line in f.readlines():
763 subproc(line[:-1]) # abusing subproc, simply want verbatim
765 rule('Extracted netlist CSV')
766 subproc(f"{netlist_csv_path}")
768 if expanded_netlist_path is not None:
769 rule('kpex/2.5D extracted netlist (SPICE format)')
770 netlist_expander = RCX25NetlistExpander()
771 expanded_netlist = netlist_expander.expand(
772 extracted_netlist=pex_context.lvsdb.netlist(),
773 top_cell_name=pex_context.annotated_top_cell.name,
774 extraction_results=extraction_results,
775 blackbox_devices=args.blackbox_devices
776 )
778 netlist_printer = self.create_netlist_printer(args, ExtractionEngine.K25D)
779 netlist_printer.write(expanded_netlist, expanded_netlist_path)
780 subproc(f"Wrote expanded netlist to: {expanded_netlist_path}")
782 # FIXME: should this be already reduced?
783 if args.output_spice_path:
784 netlist_printer.write(expanded_netlist, args.output_spice_path)
785 info(f"Copied expanded SPICE netlist to: {args.output_spice_path}")
787 # NOTE: there was a KLayout bug that some of the categories were lost,
788 # so that the marker browser could not load the report file
789 try:
790 report = rdb.ReportDatabase('')
791 report.load(report_path) # try loading rdb
792 except Exception as e:
793 rule("Repair broken marker DB")
794 warning(f"Detected KLayout bug: RDB can't be loaded due to exception {e}")
795 repair_rdb(report_path)
797 return extraction_results
799 def setup_logging(self, args: argparse.Namespace):
800 def register_log_file_handler(log_path: str,
801 formatter: Optional[logging.Formatter]) -> logging.Handler:
802 handler = logging.FileHandler(log_path)
803 handler.setLevel(LogLevel.SUBPROCESS)
804 if formatter:
805 handler.setFormatter(formatter)
806 register_additional_handler(handler)
807 return handler
809 def reregister_log_file_handler(handler: logging.Handler,
810 log_path: str,
811 formatter: Optional[logging.Formatter]):
812 deregister_additional_handler(handler)
813 handler.flush()
814 handler.close()
815 os.makedirs(args.output_dir_path, exist_ok=True)
816 new_path = os.path.join(args.output_dir_path, os.path.basename(log_path))
817 if os.path.exists(new_path):
818 ctime = os.path.getctime(new_path)
819 dt = datetime.fromtimestamp(ctime)
820 timestamp = dt.strftime('%Y-%m-%d_%H-%M-%S')
821 backup_path = f"{new_path[:-4]}_{timestamp}.bak.log"
822 shutil.move(new_path, backup_path)
823 log_path = shutil.move(log_path, new_path)
824 register_log_file_handler(log_path, formatter)
826 # setup preliminary logger
827 cli_log_path_plain = os.path.join(args.output_dir_base_path, f"kpex_plain.log")
828 cli_log_path_formatted = os.path.join(args.output_dir_base_path, f"kpex.log")
829 formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s')
830 file_handler_plain = register_log_file_handler(cli_log_path_plain, None)
831 file_handler_formatted = register_log_file_handler(cli_log_path_formatted, formatter)
832 try:
833 self.validate_args(args)
834 except ArgumentValidationError:
835 if hasattr(args, 'output_dir_path'):
836 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None)
837 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter)
838 sys.exit(1)
839 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None)
840 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter)
842 set_log_level(args.log_level)
844 @staticmethod
845 def modification_date(filename: str) -> datetime:
846 t = os.path.getmtime(filename)
847 return datetime.fromtimestamp(t)
849 def create_lvsdb(self, args: argparse.Namespace) -> kdb.LayoutVsSchematic:
850 lvsdb = kdb.LayoutVsSchematic()
852 match args.input_mode:
853 case InputMode.LVSDB:
854 lvsdb.read(args.lvsdb_path)
855 case InputMode.GDS:
856 lvs_log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_lvs.log")
857 lvsdb_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}.lvsdb.gz")
858 lvsdb_cache_path = os.path.join(args.cache_dir_path, args.pdk,
859 os.path.splitroot(os.path.abspath(args.gds_path))[-1],
860 f"{args.effective_cell_name}.lvsdb.gz")
862 lvs_needed = True
864 if args.cache_lvs:
865 if not os.path.exists(lvsdb_cache_path):
866 info(f"Cache miss: extracted LVSDB does not exist")
867 subproc(lvsdb_cache_path)
868 elif self.modification_date(lvsdb_cache_path) <= self.modification_date(args.gds_path):
869 info(f"Cache miss: extracted LVSDB is older than the input GDS")
870 subproc(lvsdb_cache_path)
871 else:
872 warning(f"Cache hit: Reusing cached LVSDB")
873 subproc(lvsdb_cache_path)
874 lvs_needed = False
876 if lvs_needed:
877 lvs_runner = LVSRunner()
878 lvs_runner.run_klayout_lvs(exe_path=args.klayout_exe_path,
879 lvs_script=args.lvs_script_path,
880 gds_path=args.effective_gds_path,
881 schematic_path=args.effective_schematic_path,
882 log_path=lvs_log_path,
883 lvsdb_path=lvsdb_path,
884 verbose=args.klayout_lvs_verbose)
885 if args.cache_lvs:
886 cache_dir_path = os.path.dirname(lvsdb_cache_path)
887 if not os.path.exists(cache_dir_path):
888 os.makedirs(cache_dir_path, exist_ok=True)
889 shutil.copy(lvsdb_path, lvsdb_cache_path)
891 lvsdb.read(lvsdb_path)
892 return lvsdb
894 def main(self, argv: List[str]):
895 if '-v' not in argv and \
896 '--version' not in argv and \
897 '-h' not in argv and \
898 '--help' not in argv:
899 rule('Command line arguments')
900 subproc(' '.join(map(shlex.quote, sys.argv)))
902 env = Env.from_os_environ()
903 args = self.parse_args(arg_list=argv[1:], env=env)
905 os.makedirs(args.output_dir_base_path, exist_ok=True)
906 self.setup_logging(args)
908 tech_info = TechInfo.from_json(args.tech_pbjson_path,
909 dielectric_filter=args.dielectric_filter)
911 if args.halo is not None:
912 tech_info.tech.process_parasitics.side_halo = args.halo
914 if args.run_magic:
915 rule('MAGIC')
916 self.run_magic_extraction(args)
918 # no need to run LVS etc if only running magic engine
919 if not (args.run_fastcap or args.run_fastercap or args.run_2_5D):
920 return
922 rule('Prepare LVSDB')
923 lvsdb = self.create_lvsdb(args)
925 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=args.effective_cell_name,
926 lvsdb=lvsdb,
927 tech=tech_info,
928 blackbox_devices=args.blackbox_devices)
929 rule('Non-empty layers in LVS database')
930 for gds_pair, layer_info in pex_context.extracted_layers.items():
931 names = [l.lvs_layer_name for l in layer_info.source_layers]
932 info(f"{gds_pair} -> ({' '.join(names)})")
934 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_extracted.oas")
935 pex_context.annotated_layout.write(gds_path)
937 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_internal.oas")
938 pex_context.lvsdb.internal_layout().write(gds_path)
940 def dump_layers(cell: str,
941 layers: List[KLayoutExtractedLayerInfo],
942 layout_dump_path: str):
943 layout = kdb.Layout()
944 layout.dbu = lvsdb.internal_layout().dbu
946 top_cell = layout.create_cell(cell)
947 for ulyr in layers:
948 li = kdb.LayerInfo(*ulyr.gds_pair)
949 li.name = ulyr.lvs_layer_name
950 layer = layout.insert_layer(li)
951 layout.insert(top_cell.cell_index(), layer, ulyr.region.dup())
953 layout.write(layout_dump_path)
955 if len(pex_context.unnamed_layers) >= 1:
956 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_unnamed_LVS_layers.gds.gz")
957 dump_layers(cell=args.effective_cell_name,
958 layers=pex_context.unnamed_layers,
959 layout_dump_path=layout_dump_path)
961 if len(pex_context.extracted_layers) >= 1:
962 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_nonempty_LVS_layers.gds.gz")
963 nonempty_layers = [l \
964 for layers in pex_context.extracted_layers.values() \
965 for l in layers.source_layers]
966 dump_layers(cell=args.effective_cell_name,
967 layers=nonempty_layers,
968 layout_dump_path=layout_dump_path)
969 else:
970 error("No extracted layers found")
971 sys.exit(1)
973 if args.run_fastcap or args.run_fastercap:
974 lst_file = self.build_fastercap_input(args=args,
975 pex_context=pex_context,
976 tech_info=tech_info)
977 if args.run_fastercap:
978 self.run_fastercap_extraction(args=args,
979 pex_context=pex_context,
980 lst_file=lst_file)
981 if args.run_fastcap:
982 self.run_fastcap_extraction(args=args,
983 pex_context=pex_context,
984 lst_file=lst_file)
986 if args.run_2_5D:
987 rule("kpex/2.5D PEX Engine")
988 report_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_report.rdb.gz")
989 netlist_csv_path = os.path.abspath(os.path.join(args.output_dir_path,
990 f"{args.effective_cell_name}_k25d_pex_netlist.csv"))
991 netlist_spice_path = os.path.abspath(os.path.join(args.output_dir_path,
992 f"{args.effective_cell_name}_k25d_pex_netlist.spice"))
994 self._rcx25_extraction_results = self.run_kpex_2_5d_engine( # NOTE: store for test case
995 args=args,
996 pex_context=pex_context,
997 tech_info=tech_info,
998 report_path=report_path,
999 netlist_csv_path=netlist_csv_path,
1000 expanded_netlist_path=netlist_spice_path
1001 )
1003 self._rcx25_extracted_csv_path = netlist_csv_path
1005 @property
1006 def rcx25_extraction_results(self) -> ExtractionResults:
1007 if not hasattr(self, '_rcx25_extraction_results'):
1008 raise Exception('rcx25_extraction_results is not initialized, was run_kpex_2_5d_engine called?')
1009 return self._rcx25_extraction_results
1011 @property
1012 def rcx25_extracted_csv_path(self) -> str:
1013 if not hasattr(self, '_rcx25_extracted_csv_path'):
1014 raise Exception('rcx25_extracted_csv_path is not initialized, was run_kpex_2_5d_engine called?')
1015 return self._rcx25_extracted_csv_path
1017 @property
1018 def fastercap_extracted_csv_path(self) -> str:
1019 if not hasattr(self, '_fastercap_extracted_csv_path'):
1020 raise Exception('fastercap_extracted_csv_path is not initialized, was run_fastercap_extraction called?')
1021 return self._fastercap_extracted_csv_path
1024if __name__ == "__main__":
1025 cli = KpexCLI()
1026 cli.main(sys.argv)