From 2324a74aaa0acf63f2a4b98799667f9f793bb312 Mon Sep 17 00:00:00 2001 From: martinjankoehler Date: Thu, 5 Dec 2024 19:44:08 +0000 Subject: [PATCH] deploy: 173b153606d78ca45594f28d387c452235939f70 --- pycov/class_index.html | 795 ------- pycov/coverage_html_cb_497bf287.js | 733 ------ pycov/coverage_html_cb_6fb7b396.js | 733 ------ pycov/favicon_32_cb_58284776.png | Bin 1732 -> 0 bytes pycov/function_index.html | 2083 ----------------- pycov/index.html | 398 ---- pycov/keybd_closed_cb_ce680311.png | Bin 9004 -> 0 bytes pycov/status.json | 1 - pycov/style_cb_718ce007.css | 337 --- pycov/style_cb_8e611ae1.css | 337 --- pycov/z_143e04ff0a847ff6___init___py.html | 120 - ..._143e04ff0a847ff6_argparse_helpers_py.html | 149 -- ...z_143e04ff0a847ff6_multiple_choice_py.html | 167 -- ...3ca105fd3f6_process_parasitics_pb2_py.html | 151 -- ...1e5163ca105fd3f6_process_stack_pb2_py.html | 155 -- pycov/z_1e5163ca105fd3f6_tech_pb2_py.html | 141 -- ...1ea3ee988f9971_fastcap_runner_test_py.html | 150 -- pycov/z_2a6b66cd9c831353___init___py.html | 124 - pycov/z_2a6b66cd9c831353_lvs_runner_py.html | 194 -- ...z_2a6b66cd9c831353_lvsdb_extractor_py.html | 374 --- pycov/z_2a6b66cd9c831353_netlist_csv_py.html | 156 -- ..._2a6b66cd9c831353_netlist_expander_py.html | 245 -- ...z_2a6b66cd9c831353_netlist_reducer_py.html | 160 -- pycov/z_2a6b66cd9c831353_repair_rdb_py.html | 234 -- ...8ddab25760f5a7_fastcap_runner_test_py.html | 150 -- pycov/z_2dc81a3a091b1002_rcx25_test_py.html | 409 ---- pycov/z_2ea764e3f741ac46___init___py.html | 124 - pycov/z_2ea764e3f741ac46_lvs_runner_py.html | 194 -- ...z_2ea764e3f741ac46_lvsdb_extractor_py.html | 374 --- pycov/z_2ea764e3f741ac46_netlist_csv_py.html | 156 -- ..._2ea764e3f741ac46_netlist_expander_py.html | 245 -- ...z_2ea764e3f741ac46_netlist_reducer_py.html | 160 -- pycov/z_2ea764e3f741ac46_repair_rdb_py.html | 234 -- pycov/z_31e83241eddb0cfa___init___py.html | 120 - pycov/z_31e83241eddb0cfa_kpex_cli_py.html | 853 ------- pycov/z_31e83241eddb0cfa_tech_info_py.html | 382 --- pycov/z_31e83241eddb0cfa_version_py.html | 121 - pycov/z_45b499fe6cab3296___init___py.html | 120 - ...296_fastercap_model_generator_test_py.html | 201 -- ...99fe6cab3296_fastercap_runner_test_py.html | 149 -- pycov/z_4832265eea321c21___init___py.html | 120 - pycov/z_4832265eea321c21_magic_runner_py.html | 250 -- pycov/z_48f13015c956926b___init___py.html | 120 - ...26b_fastercap_model_generator_test_py.html | 201 -- ...3015c956926b_fastercap_runner_test_py.html | 149 -- pycov/z_4c73bcae445d81c6___init___py.html | 120 - ...ae445d81c6_capacitance_matrix_test_py.html | 167 -- pycov/z_52482777700ec44a___init___py.html | 120 - ..._52482777700ec44a_argparse_helpers_py.html | 149 -- ...z_52482777700ec44a_multiple_choice_py.html | 167 -- pycov/z_588bb9d7e9b47fd3___init___py.html | 120 - ...88bb9d7e9b47fd3_extraction_results_py.html | 261 --- pycov/z_588bb9d7e9b47fd3_extractor_py.html | 840 ------- pycov/z_5aed77b868240c56___init___py.html | 141 -- pycov/z_5aed77b868240c56_logger_py.html | 280 --- ...z_5f30060c77e65d78_lvs_runner_test_py.html | 160 -- ...060c77e65d78_netlist_expander_test_py.html | 182 -- ...0060c77e65d78_netlist_reducer_test_py.html | 167 -- pycov/z_741a08911aeaedad___init___py.html | 120 - ...911aeaedad_capacitance_matrix_test_py.html | 167 -- ...77067d4aa92_process_parasitics_pb2_py.html | 151 -- ...8d81377067d4aa92_process_stack_pb2_py.html | 155 -- pycov/z_8d81377067d4aa92_tech_pb2_py.html | 141 -- pycov/z_95258413a42419dc_rcx25_test_py.html | 409 ---- ...z_9747eacc0c5fa802_lvs_runner_test_py.html | 160 -- ...eacc0c5fa802_netlist_expander_test_py.html | 182 -- ...7eacc0c5fa802_netlist_reducer_test_py.html | 167 -- pycov/z_a44f0ac069e85531___init___py.html | 120 - pycov/z_a5841ccd503d0903___init___py.html | 120 - ...cd503d0903_fastercap_input_builder_py.html | 437 ---- ...503d0903_fastercap_model_generator_py.html | 1136 --------- ..._a5841ccd503d0903_fastercap_runner_py.html | 225 -- pycov/z_b5137d8b20ededf9___init___py.html | 120 - ...5137d8b20ededf9_capacitance_matrix_py.html | 184 -- pycov/z_b7daf585f790d5fa___init___py.html | 120 - ...7daf585f790d5fa_extraction_results_py.html | 261 --- pycov/z_b7daf585f790d5fa_extractor_py.html | 840 ------- pycov/z_b89b04cf284a76bf___init___py.html | 141 -- pycov/z_b89b04cf284a76bf_logger_py.html | 280 --- pycov/z_bb4acdb2528096e4___init___py.html | 120 - pycov/z_bb4acdb2528096e4_magic_runner_py.html | 250 -- pycov/z_c489968eb1a5e358___init___py.html | 120 - ...8eb1a5e358_fastercap_input_builder_py.html | 437 ---- ...b1a5e358_fastercap_model_generator_py.html | 1136 --------- ..._c489968eb1a5e358_fastercap_runner_py.html | 225 -- .../z_e404b588faff9084_fastcap_runner_py.html | 239 -- pycov/z_f40df6a530c8cf33___init___py.html | 120 - ...40df6a530c8cf33_capacitance_matrix_py.html | 184 -- .../z_f568a0cfbd87c836_fastcap_runner_py.html | 239 -- 89 files changed, 24349 deletions(-) delete mode 100644 pycov/class_index.html delete mode 100644 pycov/coverage_html_cb_497bf287.js delete mode 100644 pycov/coverage_html_cb_6fb7b396.js delete mode 100644 pycov/favicon_32_cb_58284776.png delete mode 100644 pycov/function_index.html delete mode 100644 pycov/index.html delete mode 100644 pycov/keybd_closed_cb_ce680311.png delete mode 100644 pycov/status.json delete mode 100644 pycov/style_cb_718ce007.css delete mode 100644 pycov/style_cb_8e611ae1.css delete mode 100644 pycov/z_143e04ff0a847ff6___init___py.html delete mode 100644 pycov/z_143e04ff0a847ff6_argparse_helpers_py.html delete mode 100644 pycov/z_143e04ff0a847ff6_multiple_choice_py.html delete mode 100644 pycov/z_1e5163ca105fd3f6_process_parasitics_pb2_py.html delete mode 100644 pycov/z_1e5163ca105fd3f6_process_stack_pb2_py.html delete mode 100644 pycov/z_1e5163ca105fd3f6_tech_pb2_py.html delete mode 100644 pycov/z_2a1ea3ee988f9971_fastcap_runner_test_py.html delete mode 100644 pycov/z_2a6b66cd9c831353___init___py.html delete mode 100644 pycov/z_2a6b66cd9c831353_lvs_runner_py.html delete mode 100644 pycov/z_2a6b66cd9c831353_lvsdb_extractor_py.html delete mode 100644 pycov/z_2a6b66cd9c831353_netlist_csv_py.html delete mode 100644 pycov/z_2a6b66cd9c831353_netlist_expander_py.html delete mode 100644 pycov/z_2a6b66cd9c831353_netlist_reducer_py.html delete mode 100644 pycov/z_2a6b66cd9c831353_repair_rdb_py.html delete mode 100644 pycov/z_2a8ddab25760f5a7_fastcap_runner_test_py.html delete mode 100644 pycov/z_2dc81a3a091b1002_rcx25_test_py.html delete mode 100644 pycov/z_2ea764e3f741ac46___init___py.html delete mode 100644 pycov/z_2ea764e3f741ac46_lvs_runner_py.html delete mode 100644 pycov/z_2ea764e3f741ac46_lvsdb_extractor_py.html delete mode 100644 pycov/z_2ea764e3f741ac46_netlist_csv_py.html delete mode 100644 pycov/z_2ea764e3f741ac46_netlist_expander_py.html delete mode 100644 pycov/z_2ea764e3f741ac46_netlist_reducer_py.html delete mode 100644 pycov/z_2ea764e3f741ac46_repair_rdb_py.html delete mode 100644 pycov/z_31e83241eddb0cfa___init___py.html delete mode 100644 pycov/z_31e83241eddb0cfa_kpex_cli_py.html delete mode 100644 pycov/z_31e83241eddb0cfa_tech_info_py.html delete mode 100644 pycov/z_31e83241eddb0cfa_version_py.html delete mode 100644 pycov/z_45b499fe6cab3296___init___py.html delete mode 100644 pycov/z_45b499fe6cab3296_fastercap_model_generator_test_py.html delete mode 100644 pycov/z_45b499fe6cab3296_fastercap_runner_test_py.html delete mode 100644 pycov/z_4832265eea321c21___init___py.html delete mode 100644 pycov/z_4832265eea321c21_magic_runner_py.html delete mode 100644 pycov/z_48f13015c956926b___init___py.html delete mode 100644 pycov/z_48f13015c956926b_fastercap_model_generator_test_py.html delete mode 100644 pycov/z_48f13015c956926b_fastercap_runner_test_py.html delete mode 100644 pycov/z_4c73bcae445d81c6___init___py.html delete mode 100644 pycov/z_4c73bcae445d81c6_capacitance_matrix_test_py.html delete mode 100644 pycov/z_52482777700ec44a___init___py.html delete mode 100644 pycov/z_52482777700ec44a_argparse_helpers_py.html delete mode 100644 pycov/z_52482777700ec44a_multiple_choice_py.html delete mode 100644 pycov/z_588bb9d7e9b47fd3___init___py.html delete mode 100644 pycov/z_588bb9d7e9b47fd3_extraction_results_py.html delete mode 100644 pycov/z_588bb9d7e9b47fd3_extractor_py.html delete mode 100644 pycov/z_5aed77b868240c56___init___py.html delete mode 100644 pycov/z_5aed77b868240c56_logger_py.html delete mode 100644 pycov/z_5f30060c77e65d78_lvs_runner_test_py.html delete mode 100644 pycov/z_5f30060c77e65d78_netlist_expander_test_py.html delete mode 100644 pycov/z_5f30060c77e65d78_netlist_reducer_test_py.html delete mode 100644 pycov/z_741a08911aeaedad___init___py.html delete mode 100644 pycov/z_741a08911aeaedad_capacitance_matrix_test_py.html delete mode 100644 pycov/z_8d81377067d4aa92_process_parasitics_pb2_py.html delete mode 100644 pycov/z_8d81377067d4aa92_process_stack_pb2_py.html delete mode 100644 pycov/z_8d81377067d4aa92_tech_pb2_py.html delete mode 100644 pycov/z_95258413a42419dc_rcx25_test_py.html delete mode 100644 pycov/z_9747eacc0c5fa802_lvs_runner_test_py.html delete mode 100644 pycov/z_9747eacc0c5fa802_netlist_expander_test_py.html delete mode 100644 pycov/z_9747eacc0c5fa802_netlist_reducer_test_py.html delete mode 100644 pycov/z_a44f0ac069e85531___init___py.html delete mode 100644 pycov/z_a5841ccd503d0903___init___py.html delete mode 100644 pycov/z_a5841ccd503d0903_fastercap_input_builder_py.html delete mode 100644 pycov/z_a5841ccd503d0903_fastercap_model_generator_py.html delete mode 100644 pycov/z_a5841ccd503d0903_fastercap_runner_py.html delete mode 100644 pycov/z_b5137d8b20ededf9___init___py.html delete mode 100644 pycov/z_b5137d8b20ededf9_capacitance_matrix_py.html delete mode 100644 pycov/z_b7daf585f790d5fa___init___py.html delete mode 100644 pycov/z_b7daf585f790d5fa_extraction_results_py.html delete mode 100644 pycov/z_b7daf585f790d5fa_extractor_py.html delete mode 100644 pycov/z_b89b04cf284a76bf___init___py.html delete mode 100644 pycov/z_b89b04cf284a76bf_logger_py.html delete mode 100644 pycov/z_bb4acdb2528096e4___init___py.html delete mode 100644 pycov/z_bb4acdb2528096e4_magic_runner_py.html delete mode 100644 pycov/z_c489968eb1a5e358___init___py.html delete mode 100644 pycov/z_c489968eb1a5e358_fastercap_input_builder_py.html delete mode 100644 pycov/z_c489968eb1a5e358_fastercap_model_generator_py.html delete mode 100644 pycov/z_c489968eb1a5e358_fastercap_runner_py.html delete mode 100644 pycov/z_e404b588faff9084_fastcap_runner_py.html delete mode 100644 pycov/z_f40df6a530c8cf33___init___py.html delete mode 100644 pycov/z_f40df6a530c8cf33_capacitance_matrix_py.html delete mode 100644 pycov/z_f568a0cfbd87c836_fastcap_runner_py.html diff --git a/pycov/class_index.html b/pycov/class_index.html deleted file mode 100644 index 17c72afc..00000000 --- a/pycov/class_index.html +++ /dev/null @@ -1,795 +0,0 @@ - - - - - Coverage report - - - - - -
-
-

Coverage report: - 56% -

- -
- -
- - -
-
-

- Files - Functions - Classes -

-

- coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Fileclassstatementsmissingexcludedcoverage
build\python_kpex_protobuf\process_parasitics_pb2.py(no class)3321036%
build\python_kpex_protobuf\process_stack_pb2.py(no class)3725032%
build\python_kpex_protobuf\tech_pb2.py(no class)239061%
kpex\__init__.py(no class)000100%
kpex\common\__init__.py(no class)000100%
kpex\common\capacitance_matrix.pyCapacitanceMatrix332094%
kpex\common\capacitance_matrix.py(no class)1800100%
kpex\fastcap\fastcap_runner.py(no class)6228055%
kpex\fastercap\__init__.py(no class)000100%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder15315300%
kpex\fastercap\fastercap_input_builder.py(no class)1800100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder574093%
kpex\fastercap\fastercap_model_generator.pyHDielKey300100%
kpex\fastercap\fastercap_model_generator.pyHCondKey200100%
kpex\fastercap\fastercap_model_generator.pyVKey000100%
kpex\fastercap\fastercap_model_generator.pyPoint32033%
kpex\fastercap\fastercap_model_generator.pyTriangle149036%
kpex\fastercap\fastercap_model_generator.pyEdge200100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator45394079%
kpex\fastercap\fastercap_model_generator.py(no class)12400100%
kpex\fastercap\fastercap_runner.py(no class)5629048%
kpex\klayout\__init__.py(no class)100100%
kpex\klayout\lvs_runner.pyLVSRunner181800%
kpex\klayout\lvs_runner.py(no class)800100%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractedLayerInfo000100%
kpex\klayout\lvsdb_extractor.pyKLayoutMergedExtractedLayerInfo000100%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext9343054%
kpex\klayout\lvsdb_extractor.py(no class)3900100%
kpex\klayout\netlist_csv.pyNetlistCSVWriter171700%
kpex\klayout\netlist_csv.py(no class)600100%
kpex\klayout\netlist_expander.pyNetlistExpander612097%
kpex\klayout\netlist_expander.py(no class)900100%
kpex\klayout\netlist_reducer.pyNetlistReducer164075%
kpex\klayout\netlist_reducer.py(no class)600100%
kpex\klayout\repair_rdb.py(no class)7966016%
kpex\kpex_cli.pyInputMode000100%
kpex\kpex_cli.pyKpexCLI353229035%
kpex\kpex_cli.py(no class)592097%
kpex\log\__init__.py(no class)100100%
kpex\log\logger.pyLogLevel1100%
kpex\log\logger.pyLogLevelFormatter600100%
kpex\log\logger.pyLogLevelFilter600100%
kpex\log\logger.py(no class)6631095%
kpex\magic\__init__.py(no class)000100%
kpex\magic\magic_runner.pyMagicPEXMode000100%
kpex\magic\magic_runner.py(no class)4429034%
kpex\rcx25\__init__.py(no class)000100%
kpex\rcx25\extraction_results.pyNodeRegion000100%
kpex\rcx25\extraction_results.pySidewallKey000100%
kpex\rcx25\extraction_results.pySidewallCap000100%
kpex\rcx25\extraction_results.pyOverlapKey000100%
kpex\rcx25\extraction_results.pyOverlapCap000100%
kpex\rcx25\extraction_results.pySideOverlapKey1100%
kpex\rcx25\extraction_results.pySideOverlapCap1100%
kpex\rcx25\extraction_results.pyNetCoupleKey4400%
kpex\rcx25\extraction_results.pyExtractionSummary5500%
kpex\rcx25\extraction_results.pyCellExtractionResults4400%
kpex\rcx25\extraction_results.pyExtractionResults2200%
kpex\rcx25\extraction_results.py(no class)7400100%
kpex\rcx25\extractor.pyRCExtractor25325300%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.FringeEdgeNeighborhoodVisitor12612600%
kpex\rcx25\extractor.py(no class)2000100%
kpex\tech_info.pyTechInfo9787010%
kpex\tech_info.py(no class)6000100%
kpex\util\__init__.py(no class)000100%
kpex\util\argparse_helpers.py(no class)206070%
kpex\util\multiple_choice.pyMultipleChoicePattern2111048%
kpex\util\multiple_choice.py(no class)700100%
kpex\version.py(no class)100100%
tests\__init__.py(no class)000100%
tests\common\__init__.py(no class)000100%
tests\common\capacitance_matrix_test.pyTest2100100%
tests\common\capacitance_matrix_test.py(no class)1500100%
tests\fastcap\fastcap_runner_test.pyTest1200100%
tests\fastcap\fastcap_runner_test.py(no class)1000100%
tests\fastercap\__init__.py(no class)000100%
tests\fastercap\fastercap_model_generator_test.py(no class)601098%
tests\fastercap\fastercap_runner_test.pyTest1100100%
tests\fastercap\fastercap_runner_test.py(no class)1000100%
tests\klayout\lvs_runner_test.pyTest111100%
tests\klayout\lvs_runner_test.py(no class)1300100%
tests\klayout\netlist_expander_test.pyTest1800100%
tests\klayout\netlist_expander_test.py(no class)1900100%
tests\klayout\netlist_reducer_test.pyTest1600100%
tests\klayout\netlist_reducer_test.py(no class)1700100%
tests\rcx25\rcx25_test.py(no class)9517082%
Total 300413191056%
-

- No items found using the specified filter. -

-
- - - diff --git a/pycov/coverage_html_cb_497bf287.js b/pycov/coverage_html_cb_497bf287.js deleted file mode 100644 index 1a98b600..00000000 --- a/pycov/coverage_html_cb_497bf287.js +++ /dev/null @@ -1,733 +0,0 @@ -// Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 -// For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt - -// Coverage.py HTML report browser code. -/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ -/*global coverage: true, document, window, $ */ - -coverage = {}; - -// General helpers -function debounce(callback, wait) { - let timeoutId = null; - return function(...args) { - clearTimeout(timeoutId); - timeoutId = setTimeout(() => { - callback.apply(this, args); - }, wait); - }; -}; - -function checkVisible(element) { - const rect = element.getBoundingClientRect(); - const viewBottom = Math.max(document.documentElement.clientHeight, window.innerHeight); - const viewTop = 30; - return !(rect.bottom < viewTop || rect.top >= viewBottom); -} - -function on_click(sel, fn) { - const elt = document.querySelector(sel); - if (elt) { - elt.addEventListener("click", fn); - } -} - -// Helpers for table sorting -function getCellValue(row, column = 0) { - const cell = row.cells[column] // nosemgrep: eslint.detect-object-injection - if (cell.childElementCount == 1) { - var child = cell.firstElementChild; - if (child.tagName === "A") { - child = child.firstElementChild; - } - if (child instanceof HTMLDataElement && child.value) { - return child.value; - } - } - return cell.innerText || cell.textContent; -} - -function rowComparator(rowA, rowB, column = 0) { - let valueA = getCellValue(rowA, column); - let valueB = getCellValue(rowB, column); - if (!isNaN(valueA) && !isNaN(valueB)) { - return valueA - valueB; - } - return valueA.localeCompare(valueB, undefined, {numeric: true}); -} - -function sortColumn(th) { - // Get the current sorting direction of the selected header, - // clear state on other headers and then set the new sorting direction. - const currentSortOrder = th.getAttribute("aria-sort"); - [...th.parentElement.cells].forEach(header => header.setAttribute("aria-sort", "none")); - var direction; - if (currentSortOrder === "none") { - direction = th.dataset.defaultSortOrder || "ascending"; - } - else if (currentSortOrder === "ascending") { - direction = "descending"; - } - else { - direction = "ascending"; - } - th.setAttribute("aria-sort", direction); - - const column = [...th.parentElement.cells].indexOf(th) - - // Sort all rows and afterwards append them in order to move them in the DOM. - Array.from(th.closest("table").querySelectorAll("tbody tr")) - .sort((rowA, rowB) => rowComparator(rowA, rowB, column) * (direction === "ascending" ? 1 : -1)) - .forEach(tr => tr.parentElement.appendChild(tr)); - - // Save the sort order for next time. - if (th.id !== "region") { - let th_id = "file"; // Sort by file if we don't have a column id - let current_direction = direction; - const stored_list = localStorage.getItem(coverage.INDEX_SORT_STORAGE); - if (stored_list) { - ({th_id, direction} = JSON.parse(stored_list)) - } - localStorage.setItem(coverage.INDEX_SORT_STORAGE, JSON.stringify({ - "th_id": th.id, - "direction": current_direction - })); - if (th.id !== th_id || document.getElementById("region")) { - // Sort column has changed, unset sorting by function or class. - localStorage.setItem(coverage.SORTED_BY_REGION, JSON.stringify({ - "by_region": false, - "region_direction": current_direction - })); - } - } - else { - // Sort column has changed to by function or class, remember that. - localStorage.setItem(coverage.SORTED_BY_REGION, JSON.stringify({ - "by_region": true, - "region_direction": direction - })); - } -} - -// Find all the elements with data-shortcut attribute, and use them to assign a shortcut key. -coverage.assign_shortkeys = function () { - document.querySelectorAll("[data-shortcut]").forEach(element => { - document.addEventListener("keypress", event => { - if (event.target.tagName.toLowerCase() === "input") { - return; // ignore keypress from search filter - } - if (event.key === element.dataset.shortcut) { - element.click(); - } - }); - }); -}; - -// Create the events for the filter box. -coverage.wire_up_filter = function () { - // Populate the filter and hide100 inputs if there are saved values for them. - const saved_filter_value = localStorage.getItem(coverage.FILTER_STORAGE); - if (saved_filter_value) { - document.getElementById("filter").value = saved_filter_value; - } - const saved_hide100_value = localStorage.getItem(coverage.HIDE100_STORAGE); - if (saved_hide100_value) { - document.getElementById("hide100").checked = JSON.parse(saved_hide100_value); - } - - // Cache elements. - const table = document.querySelector("table.index"); - const table_body_rows = table.querySelectorAll("tbody tr"); - const no_rows = document.getElementById("no_rows"); - - // Observe filter keyevents. - const filter_handler = (event => { - // Keep running total of each metric, first index contains number of shown rows - const totals = new Array(table.rows[0].cells.length).fill(0); - // Accumulate the percentage as fraction - totals[totals.length - 1] = { "numer": 0, "denom": 0 }; // nosemgrep: eslint.detect-object-injection - - var text = document.getElementById("filter").value; - // Store filter value - localStorage.setItem(coverage.FILTER_STORAGE, text); - const casefold = (text === text.toLowerCase()); - const hide100 = document.getElementById("hide100").checked; - // Store hide value. - localStorage.setItem(coverage.HIDE100_STORAGE, JSON.stringify(hide100)); - - // Hide / show elements. - table_body_rows.forEach(row => { - var show = false; - // Check the text filter. - for (let column = 0; column < totals.length; column++) { - cell = row.cells[column]; - if (cell.classList.contains("name")) { - var celltext = cell.textContent; - if (casefold) { - celltext = celltext.toLowerCase(); - } - if (celltext.includes(text)) { - show = true; - } - } - } - - // Check the "hide covered" filter. - if (show && hide100) { - const [numer, denom] = row.cells[row.cells.length - 1].dataset.ratio.split(" "); - show = (numer !== denom); - } - - if (!show) { - // hide - row.classList.add("hidden"); - return; - } - - // show - row.classList.remove("hidden"); - totals[0]++; - - for (let column = 0; column < totals.length; column++) { - // Accumulate dynamic totals - cell = row.cells[column] // nosemgrep: eslint.detect-object-injection - if (cell.classList.contains("name")) { - continue; - } - if (column === totals.length - 1) { - // Last column contains percentage - const [numer, denom] = cell.dataset.ratio.split(" "); - totals[column]["numer"] += parseInt(numer, 10); // nosemgrep: eslint.detect-object-injection - totals[column]["denom"] += parseInt(denom, 10); // nosemgrep: eslint.detect-object-injection - } - else { - totals[column] += parseInt(cell.textContent, 10); // nosemgrep: eslint.detect-object-injection - } - } - }); - - // Show placeholder if no rows will be displayed. - if (!totals[0]) { - // Show placeholder, hide table. - no_rows.style.display = "block"; - table.style.display = "none"; - return; - } - - // Hide placeholder, show table. - no_rows.style.display = null; - table.style.display = null; - - const footer = table.tFoot.rows[0]; - // Calculate new dynamic sum values based on visible rows. - for (let column = 0; column < totals.length; column++) { - // Get footer cell element. - const cell = footer.cells[column]; // nosemgrep: eslint.detect-object-injection - if (cell.classList.contains("name")) { - continue; - } - - // Set value into dynamic footer cell element. - if (column === totals.length - 1) { - // Percentage column uses the numerator and denominator, - // and adapts to the number of decimal places. - const match = /\.([0-9]+)/.exec(cell.textContent); - const places = match ? match[1].length : 0; - const { numer, denom } = totals[column]; // nosemgrep: eslint.detect-object-injection - cell.dataset.ratio = `${numer} ${denom}`; - // Check denom to prevent NaN if filtered files contain no statements - cell.textContent = denom - ? `${(numer * 100 / denom).toFixed(places)}%` - : `${(100).toFixed(places)}%`; - } - else { - cell.textContent = totals[column]; // nosemgrep: eslint.detect-object-injection - } - } - }); - - document.getElementById("filter").addEventListener("input", debounce(filter_handler)); - document.getElementById("hide100").addEventListener("input", debounce(filter_handler)); - - // Trigger change event on setup, to force filter on page refresh - // (filter value may still be present). - document.getElementById("filter").dispatchEvent(new Event("input")); - document.getElementById("hide100").dispatchEvent(new Event("input")); -}; -coverage.FILTER_STORAGE = "COVERAGE_FILTER_VALUE"; -coverage.HIDE100_STORAGE = "COVERAGE_HIDE100_VALUE"; - -// Set up the click-to-sort columns. -coverage.wire_up_sorting = function () { - document.querySelectorAll("[data-sortable] th[aria-sort]").forEach( - th => th.addEventListener("click", e => sortColumn(e.target)) - ); - - // Look for a localStorage item containing previous sort settings: - let th_id = "file", direction = "ascending"; - const stored_list = localStorage.getItem(coverage.INDEX_SORT_STORAGE); - if (stored_list) { - ({th_id, direction} = JSON.parse(stored_list)); - } - let by_region = false, region_direction = "ascending"; - const sorted_by_region = localStorage.getItem(coverage.SORTED_BY_REGION); - if (sorted_by_region) { - ({ - by_region, - region_direction - } = JSON.parse(sorted_by_region)); - } - - const region_id = "region"; - if (by_region && document.getElementById(region_id)) { - direction = region_direction; - } - // If we are in a page that has a column with id of "region", sort on - // it if the last sort was by function or class. - let th; - if (document.getElementById(region_id)) { - th = document.getElementById(by_region ? region_id : th_id); - } - else { - th = document.getElementById(th_id); - } - th.setAttribute("aria-sort", direction === "ascending" ? "descending" : "ascending"); - th.click() -}; - -coverage.INDEX_SORT_STORAGE = "COVERAGE_INDEX_SORT_2"; -coverage.SORTED_BY_REGION = "COVERAGE_SORT_REGION"; - -// Loaded on index.html -coverage.index_ready = function () { - coverage.assign_shortkeys(); - coverage.wire_up_filter(); - coverage.wire_up_sorting(); - - on_click(".button_prev_file", coverage.to_prev_file); - on_click(".button_next_file", coverage.to_next_file); - - on_click(".button_show_hide_help", coverage.show_hide_help); -}; - -// -- pyfile stuff -- - -coverage.LINE_FILTERS_STORAGE = "COVERAGE_LINE_FILTERS"; - -coverage.pyfile_ready = function () { - // If we're directed to a particular line number, highlight the line. - var frag = location.hash; - if (frag.length > 2 && frag[1] === "t") { - document.querySelector(frag).closest(".n").classList.add("highlight"); - coverage.set_sel(parseInt(frag.substr(2), 10)); - } - else { - coverage.set_sel(0); - } - - on_click(".button_toggle_run", coverage.toggle_lines); - on_click(".button_toggle_mis", coverage.toggle_lines); - on_click(".button_toggle_exc", coverage.toggle_lines); - on_click(".button_toggle_par", coverage.toggle_lines); - - on_click(".button_next_chunk", coverage.to_next_chunk_nicely); - on_click(".button_prev_chunk", coverage.to_prev_chunk_nicely); - on_click(".button_top_of_page", coverage.to_top); - on_click(".button_first_chunk", coverage.to_first_chunk); - - on_click(".button_prev_file", coverage.to_prev_file); - on_click(".button_next_file", coverage.to_next_file); - on_click(".button_to_index", coverage.to_index); - - on_click(".button_show_hide_help", coverage.show_hide_help); - - coverage.filters = undefined; - try { - coverage.filters = localStorage.getItem(coverage.LINE_FILTERS_STORAGE); - } catch(err) {} - - if (coverage.filters) { - coverage.filters = JSON.parse(coverage.filters); - } - else { - coverage.filters = {run: false, exc: true, mis: true, par: true}; - } - - for (cls in coverage.filters) { - coverage.set_line_visibilty(cls, coverage.filters[cls]); // nosemgrep: eslint.detect-object-injection - } - - coverage.assign_shortkeys(); - coverage.init_scroll_markers(); - coverage.wire_up_sticky_header(); - - document.querySelectorAll("[id^=ctxs]").forEach( - cbox => cbox.addEventListener("click", coverage.expand_contexts) - ); - - // Rebuild scroll markers when the window height changes. - window.addEventListener("resize", coverage.build_scroll_markers); -}; - -coverage.toggle_lines = function (event) { - const btn = event.target.closest("button"); - const category = btn.value - const show = !btn.classList.contains("show_" + category); - coverage.set_line_visibilty(category, show); - coverage.build_scroll_markers(); - coverage.filters[category] = show; - try { - localStorage.setItem(coverage.LINE_FILTERS_STORAGE, JSON.stringify(coverage.filters)); - } catch(err) {} -}; - -coverage.set_line_visibilty = function (category, should_show) { - const cls = "show_" + category; - const btn = document.querySelector(".button_toggle_" + category); - if (btn) { - if (should_show) { - document.querySelectorAll("#source ." + category).forEach(e => e.classList.add(cls)); - btn.classList.add(cls); - } - else { - document.querySelectorAll("#source ." + category).forEach(e => e.classList.remove(cls)); - btn.classList.remove(cls); - } - } -}; - -// Return the nth line div. -coverage.line_elt = function (n) { - return document.getElementById("t" + n)?.closest("p"); -}; - -// Set the selection. b and e are line numbers. -coverage.set_sel = function (b, e) { - // The first line selected. - coverage.sel_begin = b; - // The next line not selected. - coverage.sel_end = (e === undefined) ? b+1 : e; -}; - -coverage.to_top = function () { - coverage.set_sel(0, 1); - coverage.scroll_window(0); -}; - -coverage.to_first_chunk = function () { - coverage.set_sel(0, 1); - coverage.to_next_chunk(); -}; - -coverage.to_prev_file = function () { - window.location = document.getElementById("prevFileLink").href; -} - -coverage.to_next_file = function () { - window.location = document.getElementById("nextFileLink").href; -} - -coverage.to_index = function () { - location.href = document.getElementById("indexLink").href; -} - -coverage.show_hide_help = function () { - const helpCheck = document.getElementById("help_panel_state") - helpCheck.checked = !helpCheck.checked; -} - -// Return a string indicating what kind of chunk this line belongs to, -// or null if not a chunk. -coverage.chunk_indicator = function (line_elt) { - const classes = line_elt?.className; - if (!classes) { - return null; - } - const match = classes.match(/\bshow_\w+\b/); - if (!match) { - return null; - } - return match[0]; -}; - -coverage.to_next_chunk = function () { - const c = coverage; - - // Find the start of the next colored chunk. - var probe = c.sel_end; - var chunk_indicator, probe_line; - while (true) { - probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - chunk_indicator = c.chunk_indicator(probe_line); - if (chunk_indicator) { - break; - } - probe++; - } - - // There's a next chunk, `probe` points to it. - var begin = probe; - - // Find the end of this chunk. - var next_indicator = chunk_indicator; - while (next_indicator === chunk_indicator) { - probe++; - probe_line = c.line_elt(probe); - next_indicator = c.chunk_indicator(probe_line); - } - c.set_sel(begin, probe); - c.show_selection(); -}; - -coverage.to_prev_chunk = function () { - const c = coverage; - - // Find the end of the prev colored chunk. - var probe = c.sel_begin-1; - var probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - var chunk_indicator = c.chunk_indicator(probe_line); - while (probe > 1 && !chunk_indicator) { - probe--; - probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - chunk_indicator = c.chunk_indicator(probe_line); - } - - // There's a prev chunk, `probe` points to its last line. - var end = probe+1; - - // Find the beginning of this chunk. - var prev_indicator = chunk_indicator; - while (prev_indicator === chunk_indicator) { - probe--; - if (probe <= 0) { - return; - } - probe_line = c.line_elt(probe); - prev_indicator = c.chunk_indicator(probe_line); - } - c.set_sel(probe+1, end); - c.show_selection(); -}; - -// Returns 0, 1, or 2: how many of the two ends of the selection are on -// the screen right now? -coverage.selection_ends_on_screen = function () { - if (coverage.sel_begin === 0) { - return 0; - } - - const begin = coverage.line_elt(coverage.sel_begin); - const end = coverage.line_elt(coverage.sel_end-1); - - return ( - (checkVisible(begin) ? 1 : 0) - + (checkVisible(end) ? 1 : 0) - ); -}; - -coverage.to_next_chunk_nicely = function () { - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: - // Set the top line on the screen as selection. - - // This will select the top-left of the viewport - // As this is most likely the span with the line number we take the parent - const line = document.elementFromPoint(0, 0).parentElement; - if (line.parentElement !== document.getElementById("source")) { - // The element is not a source line but the header or similar - coverage.select_line_or_chunk(1); - } - else { - // We extract the line number from the id - coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); - } - } - coverage.to_next_chunk(); -}; - -coverage.to_prev_chunk_nicely = function () { - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: - // Set the lowest line on the screen as selection. - - // This will select the bottom-left of the viewport - // As this is most likely the span with the line number we take the parent - const line = document.elementFromPoint(document.documentElement.clientHeight-1, 0).parentElement; - if (line.parentElement !== document.getElementById("source")) { - // The element is not a source line but the header or similar - coverage.select_line_or_chunk(coverage.lines_len); - } - else { - // We extract the line number from the id - coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); - } - } - coverage.to_prev_chunk(); -}; - -// Select line number lineno, or if it is in a colored chunk, select the -// entire chunk -coverage.select_line_or_chunk = function (lineno) { - var c = coverage; - var probe_line = c.line_elt(lineno); - if (!probe_line) { - return; - } - var the_indicator = c.chunk_indicator(probe_line); - if (the_indicator) { - // The line is in a highlighted chunk. - // Search backward for the first line. - var probe = lineno; - var indicator = the_indicator; - while (probe > 0 && indicator === the_indicator) { - probe--; - probe_line = c.line_elt(probe); - if (!probe_line) { - break; - } - indicator = c.chunk_indicator(probe_line); - } - var begin = probe + 1; - - // Search forward for the last line. - probe = lineno; - indicator = the_indicator; - while (indicator === the_indicator) { - probe++; - probe_line = c.line_elt(probe); - indicator = c.chunk_indicator(probe_line); - } - - coverage.set_sel(begin, probe); - } - else { - coverage.set_sel(lineno); - } -}; - -coverage.show_selection = function () { - // Highlight the lines in the chunk - document.querySelectorAll("#source .highlight").forEach(e => e.classList.remove("highlight")); - for (let probe = coverage.sel_begin; probe < coverage.sel_end; probe++) { - coverage.line_elt(probe).querySelector(".n").classList.add("highlight"); - } - - coverage.scroll_to_selection(); -}; - -coverage.scroll_to_selection = function () { - // Scroll the page if the chunk isn't fully visible. - if (coverage.selection_ends_on_screen() < 2) { - const element = coverage.line_elt(coverage.sel_begin); - coverage.scroll_window(element.offsetTop - 60); - } -}; - -coverage.scroll_window = function (to_pos) { - window.scroll({top: to_pos, behavior: "smooth"}); -}; - -coverage.init_scroll_markers = function () { - // Init some variables - coverage.lines_len = document.querySelectorAll("#source > p").length; - - // Build html - coverage.build_scroll_markers(); -}; - -coverage.build_scroll_markers = function () { - const temp_scroll_marker = document.getElementById("scroll_marker") - if (temp_scroll_marker) temp_scroll_marker.remove(); - // Don't build markers if the window has no scroll bar. - if (document.body.scrollHeight <= window.innerHeight) { - return; - } - - const marker_scale = window.innerHeight / document.body.scrollHeight; - const line_height = Math.min(Math.max(3, window.innerHeight / coverage.lines_len), 10); - - let previous_line = -99, last_mark, last_top; - - const scroll_marker = document.createElement("div"); - scroll_marker.id = "scroll_marker"; - document.getElementById("source").querySelectorAll( - "p.show_run, p.show_mis, p.show_exc, p.show_exc, p.show_par" - ).forEach(element => { - const line_top = Math.floor(element.offsetTop * marker_scale); - const line_number = parseInt(element.querySelector(".n a").id.substr(1)); - - if (line_number === previous_line + 1) { - // If this solid missed block just make previous mark higher. - last_mark.style.height = `${line_top + line_height - last_top}px`; - } - else { - // Add colored line in scroll_marker block. - last_mark = document.createElement("div"); - last_mark.id = `m${line_number}`; - last_mark.classList.add("marker"); - last_mark.style.height = `${line_height}px`; - last_mark.style.top = `${line_top}px`; - scroll_marker.append(last_mark); - last_top = line_top; - } - - previous_line = line_number; - }); - - // Append last to prevent layout calculation - document.body.append(scroll_marker); -}; - -coverage.wire_up_sticky_header = function () { - const header = document.querySelector("header"); - const header_bottom = ( - header.querySelector(".content h2").getBoundingClientRect().top - - header.getBoundingClientRect().top - ); - - function updateHeader() { - if (window.scrollY > header_bottom) { - header.classList.add("sticky"); - } - else { - header.classList.remove("sticky"); - } - } - - window.addEventListener("scroll", updateHeader); - updateHeader(); -}; - -coverage.expand_contexts = function (e) { - var ctxs = e.target.parentNode.querySelector(".ctxs"); - - if (!ctxs.classList.contains("expanded")) { - var ctxs_text = ctxs.textContent; - var width = Number(ctxs_text[0]); - ctxs.textContent = ""; - for (var i = 1; i < ctxs_text.length; i += width) { - key = ctxs_text.substring(i, i + width).trim(); - ctxs.appendChild(document.createTextNode(contexts[key])); - ctxs.appendChild(document.createElement("br")); - } - ctxs.classList.add("expanded"); - } -}; - -document.addEventListener("DOMContentLoaded", () => { - if (document.body.classList.contains("indexfile")) { - coverage.index_ready(); - } - else { - coverage.pyfile_ready(); - } -}); diff --git a/pycov/coverage_html_cb_6fb7b396.js b/pycov/coverage_html_cb_6fb7b396.js deleted file mode 100644 index 1face13d..00000000 --- a/pycov/coverage_html_cb_6fb7b396.js +++ /dev/null @@ -1,733 +0,0 @@ -// Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 -// For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt - -// Coverage.py HTML report browser code. -/*jslint browser: true, sloppy: true, vars: true, plusplus: true, maxerr: 50, indent: 4 */ -/*global coverage: true, document, window, $ */ - -coverage = {}; - -// General helpers -function debounce(callback, wait) { - let timeoutId = null; - return function(...args) { - clearTimeout(timeoutId); - timeoutId = setTimeout(() => { - callback.apply(this, args); - }, wait); - }; -}; - -function checkVisible(element) { - const rect = element.getBoundingClientRect(); - const viewBottom = Math.max(document.documentElement.clientHeight, window.innerHeight); - const viewTop = 30; - return !(rect.bottom < viewTop || rect.top >= viewBottom); -} - -function on_click(sel, fn) { - const elt = document.querySelector(sel); - if (elt) { - elt.addEventListener("click", fn); - } -} - -// Helpers for table sorting -function getCellValue(row, column = 0) { - const cell = row.cells[column] // nosemgrep: eslint.detect-object-injection - if (cell.childElementCount == 1) { - var child = cell.firstElementChild; - if (child.tagName === "A") { - child = child.firstElementChild; - } - if (child instanceof HTMLDataElement && child.value) { - return child.value; - } - } - return cell.innerText || cell.textContent; -} - -function rowComparator(rowA, rowB, column = 0) { - let valueA = getCellValue(rowA, column); - let valueB = getCellValue(rowB, column); - if (!isNaN(valueA) && !isNaN(valueB)) { - return valueA - valueB; - } - return valueA.localeCompare(valueB, undefined, {numeric: true}); -} - -function sortColumn(th) { - // Get the current sorting direction of the selected header, - // clear state on other headers and then set the new sorting direction. - const currentSortOrder = th.getAttribute("aria-sort"); - [...th.parentElement.cells].forEach(header => header.setAttribute("aria-sort", "none")); - var direction; - if (currentSortOrder === "none") { - direction = th.dataset.defaultSortOrder || "ascending"; - } - else if (currentSortOrder === "ascending") { - direction = "descending"; - } - else { - direction = "ascending"; - } - th.setAttribute("aria-sort", direction); - - const column = [...th.parentElement.cells].indexOf(th) - - // Sort all rows and afterwards append them in order to move them in the DOM. - Array.from(th.closest("table").querySelectorAll("tbody tr")) - .sort((rowA, rowB) => rowComparator(rowA, rowB, column) * (direction === "ascending" ? 1 : -1)) - .forEach(tr => tr.parentElement.appendChild(tr)); - - // Save the sort order for next time. - if (th.id !== "region") { - let th_id = "file"; // Sort by file if we don't have a column id - let current_direction = direction; - const stored_list = localStorage.getItem(coverage.INDEX_SORT_STORAGE); - if (stored_list) { - ({th_id, direction} = JSON.parse(stored_list)) - } - localStorage.setItem(coverage.INDEX_SORT_STORAGE, JSON.stringify({ - "th_id": th.id, - "direction": current_direction - })); - if (th.id !== th_id || document.getElementById("region")) { - // Sort column has changed, unset sorting by function or class. - localStorage.setItem(coverage.SORTED_BY_REGION, JSON.stringify({ - "by_region": false, - "region_direction": current_direction - })); - } - } - else { - // Sort column has changed to by function or class, remember that. - localStorage.setItem(coverage.SORTED_BY_REGION, JSON.stringify({ - "by_region": true, - "region_direction": direction - })); - } -} - -// Find all the elements with data-shortcut attribute, and use them to assign a shortcut key. -coverage.assign_shortkeys = function () { - document.querySelectorAll("[data-shortcut]").forEach(element => { - document.addEventListener("keypress", event => { - if (event.target.tagName.toLowerCase() === "input") { - return; // ignore keypress from search filter - } - if (event.key === element.dataset.shortcut) { - element.click(); - } - }); - }); -}; - -// Create the events for the filter box. -coverage.wire_up_filter = function () { - // Populate the filter and hide100 inputs if there are saved values for them. - const saved_filter_value = localStorage.getItem(coverage.FILTER_STORAGE); - if (saved_filter_value) { - document.getElementById("filter").value = saved_filter_value; - } - const saved_hide100_value = localStorage.getItem(coverage.HIDE100_STORAGE); - if (saved_hide100_value) { - document.getElementById("hide100").checked = JSON.parse(saved_hide100_value); - } - - // Cache elements. - const table = document.querySelector("table.index"); - const table_body_rows = table.querySelectorAll("tbody tr"); - const no_rows = document.getElementById("no_rows"); - - // Observe filter keyevents. - const filter_handler = (event => { - // Keep running total of each metric, first index contains number of shown rows - const totals = new Array(table.rows[0].cells.length).fill(0); - // Accumulate the percentage as fraction - totals[totals.length - 1] = { "numer": 0, "denom": 0 }; // nosemgrep: eslint.detect-object-injection - - var text = document.getElementById("filter").value; - // Store filter value - localStorage.setItem(coverage.FILTER_STORAGE, text); - const casefold = (text === text.toLowerCase()); - const hide100 = document.getElementById("hide100").checked; - // Store hide value. - localStorage.setItem(coverage.HIDE100_STORAGE, JSON.stringify(hide100)); - - // Hide / show elements. - table_body_rows.forEach(row => { - var show = false; - // Check the text filter. - for (let column = 0; column < totals.length; column++) { - cell = row.cells[column]; - if (cell.classList.contains("name")) { - var celltext = cell.textContent; - if (casefold) { - celltext = celltext.toLowerCase(); - } - if (celltext.includes(text)) { - show = true; - } - } - } - - // Check the "hide covered" filter. - if (show && hide100) { - const [numer, denom] = row.cells[row.cells.length - 1].dataset.ratio.split(" "); - show = (numer !== denom); - } - - if (!show) { - // hide - row.classList.add("hidden"); - return; - } - - // show - row.classList.remove("hidden"); - totals[0]++; - - for (let column = 0; column < totals.length; column++) { - // Accumulate dynamic totals - cell = row.cells[column] // nosemgrep: eslint.detect-object-injection - if (cell.classList.contains("name")) { - continue; - } - if (column === totals.length - 1) { - // Last column contains percentage - const [numer, denom] = cell.dataset.ratio.split(" "); - totals[column]["numer"] += parseInt(numer, 10); // nosemgrep: eslint.detect-object-injection - totals[column]["denom"] += parseInt(denom, 10); // nosemgrep: eslint.detect-object-injection - } - else { - totals[column] += parseInt(cell.textContent, 10); // nosemgrep: eslint.detect-object-injection - } - } - }); - - // Show placeholder if no rows will be displayed. - if (!totals[0]) { - // Show placeholder, hide table. - no_rows.style.display = "block"; - table.style.display = "none"; - return; - } - - // Hide placeholder, show table. - no_rows.style.display = null; - table.style.display = null; - - const footer = table.tFoot.rows[0]; - // Calculate new dynamic sum values based on visible rows. - for (let column = 0; column < totals.length; column++) { - // Get footer cell element. - const cell = footer.cells[column]; // nosemgrep: eslint.detect-object-injection - if (cell.classList.contains("name")) { - continue; - } - - // Set value into dynamic footer cell element. - if (column === totals.length - 1) { - // Percentage column uses the numerator and denominator, - // and adapts to the number of decimal places. - const match = /\.([0-9]+)/.exec(cell.textContent); - const places = match ? match[1].length : 0; - const { numer, denom } = totals[column]; // nosemgrep: eslint.detect-object-injection - cell.dataset.ratio = `${numer} ${denom}`; - // Check denom to prevent NaN if filtered files contain no statements - cell.textContent = denom - ? `${(numer * 100 / denom).toFixed(places)}%` - : `${(100).toFixed(places)}%`; - } - else { - cell.textContent = totals[column]; // nosemgrep: eslint.detect-object-injection - } - } - }); - - document.getElementById("filter").addEventListener("input", debounce(filter_handler)); - document.getElementById("hide100").addEventListener("input", debounce(filter_handler)); - - // Trigger change event on setup, to force filter on page refresh - // (filter value may still be present). - document.getElementById("filter").dispatchEvent(new Event("input")); - document.getElementById("hide100").dispatchEvent(new Event("input")); -}; -coverage.FILTER_STORAGE = "COVERAGE_FILTER_VALUE"; -coverage.HIDE100_STORAGE = "COVERAGE_HIDE100_VALUE"; - -// Set up the click-to-sort columns. -coverage.wire_up_sorting = function () { - document.querySelectorAll("[data-sortable] th[aria-sort]").forEach( - th => th.addEventListener("click", e => sortColumn(e.target)) - ); - - // Look for a localStorage item containing previous sort settings: - let th_id = "file", direction = "ascending"; - const stored_list = localStorage.getItem(coverage.INDEX_SORT_STORAGE); - if (stored_list) { - ({th_id, direction} = JSON.parse(stored_list)); - } - let by_region = false, region_direction = "ascending"; - const sorted_by_region = localStorage.getItem(coverage.SORTED_BY_REGION); - if (sorted_by_region) { - ({ - by_region, - region_direction - } = JSON.parse(sorted_by_region)); - } - - const region_id = "region"; - if (by_region && document.getElementById(region_id)) { - direction = region_direction; - } - // If we are in a page that has a column with id of "region", sort on - // it if the last sort was by function or class. - let th; - if (document.getElementById(region_id)) { - th = document.getElementById(by_region ? region_id : th_id); - } - else { - th = document.getElementById(th_id); - } - th.setAttribute("aria-sort", direction === "ascending" ? "descending" : "ascending"); - th.click() -}; - -coverage.INDEX_SORT_STORAGE = "COVERAGE_INDEX_SORT_2"; -coverage.SORTED_BY_REGION = "COVERAGE_SORT_REGION"; - -// Loaded on index.html -coverage.index_ready = function () { - coverage.assign_shortkeys(); - coverage.wire_up_filter(); - coverage.wire_up_sorting(); - - on_click(".button_prev_file", coverage.to_prev_file); - on_click(".button_next_file", coverage.to_next_file); - - on_click(".button_show_hide_help", coverage.show_hide_help); -}; - -// -- pyfile stuff -- - -coverage.LINE_FILTERS_STORAGE = "COVERAGE_LINE_FILTERS"; - -coverage.pyfile_ready = function () { - // If we're directed to a particular line number, highlight the line. - var frag = location.hash; - if (frag.length > 2 && frag[1] === "t") { - document.querySelector(frag).closest(".n").classList.add("highlight"); - coverage.set_sel(parseInt(frag.substr(2), 10)); - } - else { - coverage.set_sel(0); - } - - on_click(".button_toggle_run", coverage.toggle_lines); - on_click(".button_toggle_mis", coverage.toggle_lines); - on_click(".button_toggle_exc", coverage.toggle_lines); - on_click(".button_toggle_par", coverage.toggle_lines); - - on_click(".button_next_chunk", coverage.to_next_chunk_nicely); - on_click(".button_prev_chunk", coverage.to_prev_chunk_nicely); - on_click(".button_top_of_page", coverage.to_top); - on_click(".button_first_chunk", coverage.to_first_chunk); - - on_click(".button_prev_file", coverage.to_prev_file); - on_click(".button_next_file", coverage.to_next_file); - on_click(".button_to_index", coverage.to_index); - - on_click(".button_show_hide_help", coverage.show_hide_help); - - coverage.filters = undefined; - try { - coverage.filters = localStorage.getItem(coverage.LINE_FILTERS_STORAGE); - } catch(err) {} - - if (coverage.filters) { - coverage.filters = JSON.parse(coverage.filters); - } - else { - coverage.filters = {run: false, exc: true, mis: true, par: true}; - } - - for (cls in coverage.filters) { - coverage.set_line_visibilty(cls, coverage.filters[cls]); // nosemgrep: eslint.detect-object-injection - } - - coverage.assign_shortkeys(); - coverage.init_scroll_markers(); - coverage.wire_up_sticky_header(); - - document.querySelectorAll("[id^=ctxs]").forEach( - cbox => cbox.addEventListener("click", coverage.expand_contexts) - ); - - // Rebuild scroll markers when the window height changes. - window.addEventListener("resize", coverage.build_scroll_markers); -}; - -coverage.toggle_lines = function (event) { - const btn = event.target.closest("button"); - const category = btn.value - const show = !btn.classList.contains("show_" + category); - coverage.set_line_visibilty(category, show); - coverage.build_scroll_markers(); - coverage.filters[category] = show; - try { - localStorage.setItem(coverage.LINE_FILTERS_STORAGE, JSON.stringify(coverage.filters)); - } catch(err) {} -}; - -coverage.set_line_visibilty = function (category, should_show) { - const cls = "show_" + category; - const btn = document.querySelector(".button_toggle_" + category); - if (btn) { - if (should_show) { - document.querySelectorAll("#source ." + category).forEach(e => e.classList.add(cls)); - btn.classList.add(cls); - } - else { - document.querySelectorAll("#source ." + category).forEach(e => e.classList.remove(cls)); - btn.classList.remove(cls); - } - } -}; - -// Return the nth line div. -coverage.line_elt = function (n) { - return document.getElementById("t" + n)?.closest("p"); -}; - -// Set the selection. b and e are line numbers. -coverage.set_sel = function (b, e) { - // The first line selected. - coverage.sel_begin = b; - // The next line not selected. - coverage.sel_end = (e === undefined) ? b+1 : e; -}; - -coverage.to_top = function () { - coverage.set_sel(0, 1); - coverage.scroll_window(0); -}; - -coverage.to_first_chunk = function () { - coverage.set_sel(0, 1); - coverage.to_next_chunk(); -}; - -coverage.to_prev_file = function () { - window.location = document.getElementById("prevFileLink").href; -} - -coverage.to_next_file = function () { - window.location = document.getElementById("nextFileLink").href; -} - -coverage.to_index = function () { - location.href = document.getElementById("indexLink").href; -} - -coverage.show_hide_help = function () { - const helpCheck = document.getElementById("help_panel_state") - helpCheck.checked = !helpCheck.checked; -} - -// Return a string indicating what kind of chunk this line belongs to, -// or null if not a chunk. -coverage.chunk_indicator = function (line_elt) { - const classes = line_elt?.className; - if (!classes) { - return null; - } - const match = classes.match(/\bshow_\w+\b/); - if (!match) { - return null; - } - return match[0]; -}; - -coverage.to_next_chunk = function () { - const c = coverage; - - // Find the start of the next colored chunk. - var probe = c.sel_end; - var chunk_indicator, probe_line; - while (true) { - probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - chunk_indicator = c.chunk_indicator(probe_line); - if (chunk_indicator) { - break; - } - probe++; - } - - // There's a next chunk, `probe` points to it. - var begin = probe; - - // Find the end of this chunk. - var next_indicator = chunk_indicator; - while (next_indicator === chunk_indicator) { - probe++; - probe_line = c.line_elt(probe); - next_indicator = c.chunk_indicator(probe_line); - } - c.set_sel(begin, probe); - c.show_selection(); -}; - -coverage.to_prev_chunk = function () { - const c = coverage; - - // Find the end of the prev colored chunk. - var probe = c.sel_begin-1; - var probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - var chunk_indicator = c.chunk_indicator(probe_line); - while (probe > 1 && !chunk_indicator) { - probe--; - probe_line = c.line_elt(probe); - if (!probe_line) { - return; - } - chunk_indicator = c.chunk_indicator(probe_line); - } - - // There's a prev chunk, `probe` points to its last line. - var end = probe+1; - - // Find the beginning of this chunk. - var prev_indicator = chunk_indicator; - while (prev_indicator === chunk_indicator) { - probe--; - if (probe <= 0) { - return; - } - probe_line = c.line_elt(probe); - prev_indicator = c.chunk_indicator(probe_line); - } - c.set_sel(probe+1, end); - c.show_selection(); -}; - -// Returns 0, 1, or 2: how many of the two ends of the selection are on -// the screen right now? -coverage.selection_ends_on_screen = function () { - if (coverage.sel_begin === 0) { - return 0; - } - - const begin = coverage.line_elt(coverage.sel_begin); - const end = coverage.line_elt(coverage.sel_end-1); - - return ( - (checkVisible(begin) ? 1 : 0) - + (checkVisible(end) ? 1 : 0) - ); -}; - -coverage.to_next_chunk_nicely = function () { - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: - // Set the top line on the screen as selection. - - // This will select the top-left of the viewport - // As this is most likely the span with the line number we take the parent - const line = document.elementFromPoint(0, 0).parentElement; - if (line.parentElement !== document.getElementById("source")) { - // The element is not a source line but the header or similar - coverage.select_line_or_chunk(1); - } - else { - // We extract the line number from the id - coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); - } - } - coverage.to_next_chunk(); -}; - -coverage.to_prev_chunk_nicely = function () { - if (coverage.selection_ends_on_screen() === 0) { - // The selection is entirely off the screen: - // Set the lowest line on the screen as selection. - - // This will select the bottom-left of the viewport - // As this is most likely the span with the line number we take the parent - const line = document.elementFromPoint(document.documentElement.clientHeight-1, 0).parentElement; - if (line.parentElement !== document.getElementById("source")) { - // The element is not a source line but the header or similar - coverage.select_line_or_chunk(coverage.lines_len); - } - else { - // We extract the line number from the id - coverage.select_line_or_chunk(parseInt(line.id.substring(1), 10)); - } - } - coverage.to_prev_chunk(); -}; - -// Select line number lineno, or if it is in a colored chunk, select the -// entire chunk -coverage.select_line_or_chunk = function (lineno) { - var c = coverage; - var probe_line = c.line_elt(lineno); - if (!probe_line) { - return; - } - var the_indicator = c.chunk_indicator(probe_line); - if (the_indicator) { - // The line is in a highlighted chunk. - // Search backward for the first line. - var probe = lineno; - var indicator = the_indicator; - while (probe > 0 && indicator === the_indicator) { - probe--; - probe_line = c.line_elt(probe); - if (!probe_line) { - break; - } - indicator = c.chunk_indicator(probe_line); - } - var begin = probe + 1; - - // Search forward for the last line. - probe = lineno; - indicator = the_indicator; - while (indicator === the_indicator) { - probe++; - probe_line = c.line_elt(probe); - indicator = c.chunk_indicator(probe_line); - } - - coverage.set_sel(begin, probe); - } - else { - coverage.set_sel(lineno); - } -}; - -coverage.show_selection = function () { - // Highlight the lines in the chunk - document.querySelectorAll("#source .highlight").forEach(e => e.classList.remove("highlight")); - for (let probe = coverage.sel_begin; probe < coverage.sel_end; probe++) { - coverage.line_elt(probe).querySelector(".n").classList.add("highlight"); - } - - coverage.scroll_to_selection(); -}; - -coverage.scroll_to_selection = function () { - // Scroll the page if the chunk isn't fully visible. - if (coverage.selection_ends_on_screen() < 2) { - const element = coverage.line_elt(coverage.sel_begin); - coverage.scroll_window(element.offsetTop - 60); - } -}; - -coverage.scroll_window = function (to_pos) { - window.scroll({top: to_pos, behavior: "smooth"}); -}; - -coverage.init_scroll_markers = function () { - // Init some variables - coverage.lines_len = document.querySelectorAll("#source > p").length; - - // Build html - coverage.build_scroll_markers(); -}; - -coverage.build_scroll_markers = function () { - const temp_scroll_marker = document.getElementById("scroll_marker") - if (temp_scroll_marker) temp_scroll_marker.remove(); - // Don't build markers if the window has no scroll bar. - if (document.body.scrollHeight <= window.innerHeight) { - return; - } - - const marker_scale = window.innerHeight / document.body.scrollHeight; - const line_height = Math.min(Math.max(3, window.innerHeight / coverage.lines_len), 10); - - let previous_line = -99, last_mark, last_top; - - const scroll_marker = document.createElement("div"); - scroll_marker.id = "scroll_marker"; - document.getElementById("source").querySelectorAll( - "p.show_run, p.show_mis, p.show_exc, p.show_exc, p.show_par" - ).forEach(element => { - const line_top = Math.floor(element.offsetTop * marker_scale); - const line_number = parseInt(element.querySelector(".n a").id.substr(1)); - - if (line_number === previous_line + 1) { - // If this solid missed block just make previous mark higher. - last_mark.style.height = `${line_top + line_height - last_top}px`; - } - else { - // Add colored line in scroll_marker block. - last_mark = document.createElement("div"); - last_mark.id = `m${line_number}`; - last_mark.classList.add("marker"); - last_mark.style.height = `${line_height}px`; - last_mark.style.top = `${line_top}px`; - scroll_marker.append(last_mark); - last_top = line_top; - } - - previous_line = line_number; - }); - - // Append last to prevent layout calculation - document.body.append(scroll_marker); -}; - -coverage.wire_up_sticky_header = function () { - const header = document.querySelector("header"); - const header_bottom = ( - header.querySelector(".content h2").getBoundingClientRect().top - - header.getBoundingClientRect().top - ); - - function updateHeader() { - if (window.scrollY > header_bottom) { - header.classList.add("sticky"); - } - else { - header.classList.remove("sticky"); - } - } - - window.addEventListener("scroll", updateHeader); - updateHeader(); -}; - -coverage.expand_contexts = function (e) { - var ctxs = e.target.parentNode.querySelector(".ctxs"); - - if (!ctxs.classList.contains("expanded")) { - var ctxs_text = ctxs.textContent; - var width = Number(ctxs_text[0]); - ctxs.textContent = ""; - for (var i = 1; i < ctxs_text.length; i += width) { - key = ctxs_text.substring(i, i + width).trim(); - ctxs.appendChild(document.createTextNode(contexts[key])); - ctxs.appendChild(document.createElement("br")); - } - ctxs.classList.add("expanded"); - } -}; - -document.addEventListener("DOMContentLoaded", () => { - if (document.body.classList.contains("indexfile")) { - coverage.index_ready(); - } - else { - coverage.pyfile_ready(); - } -}); diff --git a/pycov/favicon_32_cb_58284776.png b/pycov/favicon_32_cb_58284776.png deleted file mode 100644 index 8649f0475d8d20793b2ec431fe25a186a414cf10..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1732 zcmV;#20QtQP)K2KOkBOVxIZChq#W-v7@TU%U6P(wycKT1hUJUToW3ke1U1ONa4 z000000000000000bb)GRa9mqwR9|UWHy;^RUrt?IT__Y0JUcxmBP0(51q1>E00030 z|NrOz)aw7%8sJzM<5^g%z7^qE`}_Ot|JUUG(NUkWzR|7K?Zo%@_v-8G-1N%N=D$;; zw;keH4dGY$`1t4M=HK_s*zm^0#KgqfwWhe3qO_HtvXYvtjgX>;-~C$L`&k>^R)9)7 zdPh2TL^pCnHC#0+_4D)M`p?qp!pq{jO_{8;$fbaflbx`Tn52n|n}8VFRTA1&ugOP< zPd{uvFjz7t*Vot1&d$l-xWCk}s;sQL&#O(Bskh6gqNJv>#iB=ypG1e3K!K4yc7!~M zfj4S*g^zZ7eP$+_Sl07Z646l;%urinP#D8a6TwRtnLIRcI!r4f@bK~9-`~;E(N?Lv zSEst7s;rcxsi~}{Nsytfz@MtUoR*iFc8!#vvx}Umhm4blk(_~MdVD-@dW&>!Nn~ro z_E~-ESVQAj6Wmn;(olz(O&_{U2*pZBc1aYjMh>Dq3z|6`jW`RDHV=t3I6yRKJ~LOX zz_z!!vbVXPqob#=pj3^VMT?x6t(irRmSKsMo1~LLkB&=#j!=M%NP35mfqim$drWb9 zYIb>no_LUwc!r^NkDzs4YHu@=ZHRzrafWDZd1EhEVq=tGX?tK$pIa)DTh#bkvh!J- z?^%@YS!U*0E8$q$_*aOTQ&)Ra64g>ep;BdcQgvlg8qQHrP*E$;P{-m=A*@axn@$bO zO-Y4JzS&EAi%YG}N?cn?YFS7ivPY=EMV6~YH;+Xxu|tefLS|Aza)Cg6us#)=JW!uH zQa?H>d^j+YHCtyjL^LulF*05|F$RG!AX_OHVI&MtA~_@=5_lU|0000rbW%=J06GH4 z^5LD8b8apw8vNh1ua1mF{{Hy)_U`NA;Nacc+sCpuHXa-V{r&yz?c(9#+}oX+NmiRW z+W-IqK1oDDR5;6GfCDCOP5}iL5fK(cB~ET81`MFgF2kGa9AjhSIk~-E-4&*tPPKdiilQJ11k_J082ZS z>@TvivP!5ZFG?t@{t+GpR3XR&@*hA_VE1|Lo8@L@)l*h(Z@=?c-NS$Fk&&61IzUU9 z*nPqBM=OBZ-6ka1SJgGAS-Us5EN)r#dUX%>wQZLa2ytPCtMKp)Ob z*xcu38Z&d5<-NBS)@jRD+*!W*cf-m_wmxDEqBf?czI%3U0J$Xik;lA`jg}VH?(S(V zE!M3;X2B8w0TnnW&6(8;_Uc)WD;Ms6PKP+s(sFgO!}B!^ES~GDt4qLPxwYB)^7)XA zZwo9zDy-B0B+jT6V=!=bo(zs_8{eBA78gT9GH$(DVhz;4VAYwz+bOIdZ-PNb|I&rl z^XG=vFLF)1{&nT2*0vMz#}7^9hXzzf&ZdKlEj{LihP;|;Ywqn35ajP?H?7t|i-Un% z&&kxee@9B{nwgv1+S-~0)E1{ob1^Wn`F2isurqThKK=3%&;`@{0{!D- z&CSj80t;uPu&FaJFtSXKH#ajgGj}=sEad7US6jP0|Db@0j)?(5@sf<7`~a9>s;wCa zm^)spe{uxGFmrJYI9cOh7s$>8Npkt-5EWB1UKc`{W{y5Ce$1+nM9Cr;);=Ju#N^62OSlJMn7omiUgP&ErsYzT~iGxcW aE(`!K@+CXylaC4j0000 - - - - Coverage report - - - - - -
-
-

Coverage report: - 56% -

- -
- -
- - -
-
-

- Files - Functions - Classes -

-

- coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Filefunctionstatementsmissingexcludedcoverage
build\python_kpex_protobuf\process_parasitics_pb2.py(no function)3321036%
build\python_kpex_protobuf\process_stack_pb2.py(no function)3725032%
build\python_kpex_protobuf\tech_pb2.py(no function)239061%
kpex\__init__.py(no function)000100%
kpex\common\__init__.py(no function)000100%
kpex\common\capacitance_matrix.pyCapacitanceMatrix.__getitem__100100%
kpex\common\capacitance_matrix.pyCapacitanceMatrix.__setitem__1100%
kpex\common\capacitance_matrix.pyCapacitanceMatrix.dimension100100%
kpex\common\capacitance_matrix.pyCapacitanceMatrix.parse_csv101090%
kpex\common\capacitance_matrix.pyCapacitanceMatrix.write_csv900100%
kpex\common\capacitance_matrix.pyCapacitanceMatrix.averaged_off_diagonals1100100%
kpex\common\capacitance_matrix.py(no function)1800100%
kpex\fastcap\fastcap_runner.pyrun_fastcap262600%
kpex\fastcap\fastcap_runner.pyfastcap_parse_capacitance_matrix272093%
kpex\fastcap\fastcap_runner.py(no function)900100%
kpex\fastercap\__init__.py(no function)000100%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.__init__5500%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.dbu1100%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.gds_pair7700%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.shapes_of_net7700%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.shapes_of_layer7700%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.top_cell_bbox1100%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.build12212200%
kpex\fastercap\fastercap_input_builder.pyFasterCapInputBuilder.build.format_terminal3300%
kpex\fastercap\fastercap_input_builder.py(no function)1800100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder.__init__1100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder.add_material100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder.add_dielectric31067%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder.add_conductor31067%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder._norm2z100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder._z2norm100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder._add_layer111091%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelBuilder.generate261096%
kpex\fastercap\fastercap_model_generator.pyHDielKey.__str__100100%
kpex\fastercap\fastercap_model_generator.pyHDielKey.topic100100%
kpex\fastercap\fastercap_model_generator.pyHDielKey.reversed100100%
kpex\fastercap\fastercap_model_generator.pyHCondKey.__str__100100%
kpex\fastercap\fastercap_model_generator.pyHCondKey.topic100100%
kpex\fastercap\fastercap_model_generator.pyPoint.__sub__1100%
kpex\fastercap\fastercap_model_generator.pyPoint.sq_length100100%
kpex\fastercap\fastercap_model_generator.pyPoint.to_fastcap1100%
kpex\fastercap\fastercap_model_generator.pyvector_product200100%
kpex\fastercap\fastercap_model_generator.pydot_product200100%
kpex\fastercap\fastercap_model_generator.pyTriangle.reversed100100%
kpex\fastercap\fastercap_model_generator.pyTriangle.outside_reference_point6600%
kpex\fastercap\fastercap_model_generator.pyTriangle.to_fastcap1100%
kpex\fastercap\fastercap_model_generator.pyTriangle.__len__1100%
kpex\fastercap\fastercap_model_generator.pyTriangle.__getitem__51080%
kpex\fastercap\fastercap_model_generator.pyEdge.vector_of_edge100100%
kpex\fastercap\fastercap_model_generator.pyEdge.reversed100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.__init__1600100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.reset200100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.add_in400100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.add_out400100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.finish_z1037093%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.next_z4500100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.generate_hdiel1000100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.generate_v_surface1000100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.generate_vdiel61083%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.generate_hcond_in1000100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.generate_hcond_out1100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.generate_vcond61083%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.triangulate600100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.triangulate.convert_point300100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.finalize241096%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.write_fastcap454402%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._write_fastercap_geo111100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.check131092%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._check_tris144071%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._normed_edges800100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._normed_edges.normed_dbu100100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._point2s1100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._edge2s1100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._is_antiparallel500100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._split_edges3300100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator.dump_stl8800%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._write_as_stl141400%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._merge_events2400100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._collect_diel_tris1000100%
kpex\fastercap\fastercap_model_generator.pyFasterCapModelGenerator._collect_cond_tris500100%
kpex\fastercap\fastercap_model_generator.py(no function)12000100%
kpex\fastercap\fastercap_runner.pyrun_fastercap272700%
kpex\fastercap\fastercap_runner.pyfastercap_parse_capacitance_matrix212090%
kpex\fastercap\fastercap_runner.py(no function)800100%
kpex\klayout\__init__.py(no function)100100%
kpex\klayout\lvs_runner.pyLVSRunner.run_klayout_lvs181800%
kpex\klayout\lvs_runner.py(no function)800100%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext.prepare_extraction1500100%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext.build_LVS_layer_map226073%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext.nonempty_extracted_layers256076%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext.top_cell_bbox5500%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext.shapes_of_net131300%
kpex\klayout\lvsdb_extractor.pyKLayoutExtractionContext.shapes_of_layer131300%
kpex\klayout\lvsdb_extractor.py(no function)3900100%
kpex\klayout\netlist_csv.pyNetlistCSVWriter.write_csv171700%
kpex\klayout\netlist_csv.py(no function)600100%
kpex\klayout\netlist_expander.pyNetlistExpander.expand531098%
kpex\klayout\netlist_expander.pyNetlistExpander.expand.add_parasitic_cap81088%
kpex\klayout\netlist_expander.py(no function)900100%
kpex\klayout\netlist_reducer.pyNetlistReducer.reduce164075%
kpex\klayout\netlist_reducer.py(no function)600100%
kpex\klayout\repair_rdb.pyparse_category_path292900%
kpex\klayout\repair_rdb.pyrepair_rdb_xml202000%
kpex\klayout\repair_rdb.pyrepair_rdb131300%
kpex\klayout\repair_rdb.py(no function)174076%
kpex\kpex_cli.pyKpexCLI.parse_args521098%
kpex\kpex_cli.pyKpexCLI.validate_args8748045%
kpex\kpex_cli.pyKpexCLI.validate_args.input_file_stem100100%
kpex\kpex_cli.pyKpexCLI.build_fastercap_input131300%
kpex\kpex_cli.pyKpexCLI.run_fastercap_extraction333300%
kpex\kpex_cli.pyKpexCLI.run_magic_extraction151500%
kpex\kpex_cli.pyKpexCLI.run_fastcap_extraction222200%
kpex\kpex_cli.pyKpexCLI.run_kpex_2_5d_engine212100%
kpex\kpex_cli.pyKpexCLI.setup_logging173082%
kpex\kpex_cli.pyKpexCLI.setup_logging.register_log_file_handler600100%
kpex\kpex_cli.pyKpexCLI.setup_logging.reregister_log_file_handler135062%
kpex\kpex_cli.pyKpexCLI.modification_date2200%
kpex\kpex_cli.pyKpexCLI.create_lvsdb171700%
kpex\kpex_cli.pyKpexCLI.main3934013%
kpex\kpex_cli.pyKpexCLI.main.dump_layers9900%
kpex\kpex_cli.pyKpexCLI.rcx25_extraction_results3300%
kpex\kpex_cli.pyKpexCLI.rcx25_extracted_csv_path3300%
kpex\kpex_cli.py(no function)592097%
kpex\log\__init__.py(no function)100100%
kpex\log\logger.pyLogLevel.level_by_name1100%
kpex\log\logger.pyLogLevelFormatter.format600100%
kpex\log\logger.pyLogLevelFilter.__init__300100%
kpex\log\logger.pyLogLevelFilter.filter300100%
kpex\log\logger.pyset_log_level100100%
kpex\log\logger.pyregister_additional_handler100100%
kpex\log\logger.pyderegister_additional_handler100100%
kpex\log\logger.pyconfigure_logger1100100%
kpex\log\logger.pydebug300100%
kpex\log\logger.pysubproc3300%
kpex\log\logger.pyrule009100%
kpex\log\logger.pyinfo300100%
kpex\log\logger.pywarning300100%
kpex\log\logger.pyerror300100%
kpex\log\logger.py(no function)3701100%
kpex\magic\__init__.py(no function)000100%
kpex\magic\magic_runner.pyprepare_magic_script121200%
kpex\magic\magic_runner.pyrun_magic171700%
kpex\magic\magic_runner.py(no function)1500100%
kpex\rcx25\__init__.py(no function)000100%
kpex\rcx25\extraction_results.pySideOverlapKey.__repr__1100%
kpex\rcx25\extraction_results.pySideOverlapCap.__str__1100%
kpex\rcx25\extraction_results.pyNetCoupleKey.__repr__1100%
kpex\rcx25\extraction_results.pyNetCoupleKey.normed3300%
kpex\rcx25\extraction_results.pyExtractionSummary.merged5500%
kpex\rcx25\extraction_results.pyCellExtractionResults.summarize4400%
kpex\rcx25\extraction_results.pyExtractionResults.summarize2200%
kpex\rcx25\extraction_results.py(no function)7400100%
kpex\rcx25\extractor.pyRCExtractor.__init__3300%
kpex\rcx25\extractor.pyRCExtractor.gds_pair7700%
kpex\rcx25\extractor.pyRCExtractor.shapes_of_net7700%
kpex\rcx25\extractor.pyRCExtractor.shapes_of_layer7700%
kpex\rcx25\extractor.pyRCExtractor.extract7700%
kpex\rcx25\extractor.pyRCExtractor.extract_cell22022000%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.rdb_output2200%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.FringeEdgeNeighborhoodVisitor.__init__141400%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.FringeEdgeNeighborhoodVisitor.begin_polygon2200%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.FringeEdgeNeighborhoodVisitor.end_polygon1100%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.FringeEdgeNeighborhoodVisitor.on_edge10210200%
kpex\rcx25\extractor.pyRCExtractor.extract_cell.FringeEdgeNeighborhoodVisitor.on_edge.distance_near7700%
kpex\rcx25\extractor.py(no function)2000100%
kpex\tech_info.pyTechInfo.parse_tech_def400100%
kpex\tech_info.pyTechInfo.from_json200100%
kpex\tech_info.pyTechInfo.__init__200100%
kpex\tech_info.pyTechInfo.gds_pair_for_computed_layer_name100100%
kpex\tech_info.pyTechInfo.computed_layer_info_by_name100100%
kpex\tech_info.pyTechInfo.computed_layer_info_by_gds_pair1100%
kpex\tech_info.pyTechInfo.canonical_layer_name_by_gds_pair1100%
kpex\tech_info.pyTechInfo.layer_info_by_name1100%
kpex\tech_info.pyTechInfo.gds_pair_for_layer_name1100%
kpex\tech_info.pyTechInfo.layer_info_by_gds_pair1100%
kpex\tech_info.pyTechInfo.process_stack_layer_by_name1100%
kpex\tech_info.pyTechInfo.process_stack_layer_by_gds_pair1100%
kpex\tech_info.pyTechInfo.process_substrate_layer1100%
kpex\tech_info.pyTechInfo.process_diffusion_layers1100%
kpex\tech_info.pyTechInfo.gate_poly_layer1100%
kpex\tech_info.pyTechInfo.field_oxide_layer1100%
kpex\tech_info.pyTechInfo.process_metal_layers1100%
kpex\tech_info.pyTechInfo.filtered_dielectric_layers7700%
kpex\tech_info.pyTechInfo.dielectric_by_name101000%
kpex\tech_info.pyTechInfo.sidewall_dielectric_layer161600%
kpex\tech_info.pyTechInfo.simple_dielectric_above_metal131300%
kpex\tech_info.pyTechInfo.substrate_cap_by_layer_name1100%
kpex\tech_info.pyTechInfo.overlap_cap_by_layer_names9900%
kpex\tech_info.pyTechInfo.overlap_cap_by_layer_names.convert_substrate_to_overlap_cap5500%
kpex\tech_info.pyTechInfo.sidewall_cap_by_layer_name1100%
kpex\tech_info.pyTechInfo.internal_substrate_layer_name1100%
kpex\tech_info.pyTechInfo.side_overlap_cap_by_layer_names7700%
kpex\tech_info.pyTechInfo.side_overlap_cap_by_layer_names.convert_substrate_to_side_overlap_cap5500%
kpex\tech_info.py(no function)6000100%
kpex\util\__init__.py(no function)000100%
kpex\util\argparse_helpers.pyrender_enum_help61083%
kpex\util\argparse_helpers.pytrue_or_false95044%
kpex\util\argparse_helpers.py(no function)500100%
kpex\util\multiple_choice.pyMultipleChoicePattern.__init__133077%
kpex\util\multiple_choice.pyMultipleChoicePattern.filter3300%
kpex\util\multiple_choice.pyMultipleChoicePattern.is_included5500%
kpex\util\multiple_choice.py(no function)700100%
kpex\version.py(no function)100100%
tests\__init__.py(no function)000100%
tests\common\__init__.py(no function)000100%
tests\common\capacitance_matrix_test.pyTest.klayout_testdata_dir100100%
tests\common\capacitance_matrix_test.pyTest.test_parse_csv700100%
tests\common\capacitance_matrix_test.pyTest.test_write_csv600100%
tests\common\capacitance_matrix_test.pyTest.test_averaged_off_diagonals700100%
tests\common\capacitance_matrix_test.py(no function)1500100%
tests\fastcap\fastcap_runner_test.pyTest.fastcap_testdata_dir100100%
tests\fastcap\fastcap_runner_test.pyTest.test_fastcap_parse_capacitance_matrix1100100%
tests\fastcap\fastcap_runner_test.py(no function)1000100%
tests\fastercap\__init__.py(no function)000100%
tests\fastercap\fastercap_model_generator_test.pytest_fastercap_model_generator531098%
tests\fastercap\fastercap_model_generator_test.py(no function)700100%
tests\fastercap\fastercap_runner_test.pyTest.fastercap_testdata_dir100100%
tests\fastercap\fastercap_runner_test.pyTest.test_fastercap_parse_capacitance_matrix1000100%
tests\fastercap\fastercap_runner_test.py(no function)1000100%
tests\klayout\lvs_runner_test.pyTest.testdata_dir1100%
tests\klayout\lvs_runner_test.pyTest.test_run_klayout_lvs101000%
tests\klayout\lvs_runner_test.py(no function)1300100%
tests\klayout\netlist_expander_test.pyTest.klayout_testdata_dir100100%
tests\klayout\netlist_expander_test.pyTest.tech_info_json_path100100%
tests\klayout\netlist_expander_test.pyTest.test_netlist_expansion1600100%
tests\klayout\netlist_expander_test.py(no function)1900100%
tests\klayout\netlist_reducer_test.pyTest.setUpClass100100%
tests\klayout\netlist_reducer_test.pyTest.klayout_testdata_dir100100%
tests\klayout\netlist_reducer_test.pyTest._test_netlist_reduction1000100%
tests\klayout\netlist_reducer_test.pyTest.test_netlist_reduction_1200100%
tests\klayout\netlist_reducer_test.pyTest.test_netlist_reduction_2200100%
tests\klayout\netlist_reducer_test.py(no function)1700100%
tests\rcx25\rcx25_test.py_kpex_pdk_dir100100%
tests\rcx25\rcx25_test.py_sky130a_testdata_dir100100%
tests\rcx25\rcx25_test.py_gds100100%
tests\rcx25\rcx25_test.py_save_layout_preview1200100%
tests\rcx25\rcx25_test.py_run_rcx25d_single_cell125058%
tests\rcx25\rcx25_test.pyassert_expected_matches_obtained131208%
tests\rcx25\rcx25_test.pytest_single_plate_100um_x_100um_li1_over_substrate100100%
tests\rcx25\rcx25_test.pytest_overlap_plates_100um_x_100um_li1_m1100100%
tests\rcx25\rcx25_test.pytest_overlap_plates_100um_x_100um_li1_m1_m2_m3100100%
tests\rcx25\rcx25_test.pytest_sidewall_100um_x_100um_distance_200nm_li1100100%
tests\rcx25\rcx25_test.pytest_sidewall_net_uturn_l1_redux100100%
tests\rcx25\rcx25_test.pytest_sidewall_cap_vpp_04p4x04p6_l1_redux100100%
tests\rcx25\rcx25_test.pytest_near_body_shield_li1_m1100100%
tests\rcx25\rcx25_test.pytest_sideoverlap_simple_plates_li1_m1100100%
tests\rcx25\rcx25_test.py(no function)4700100%
Total 300413191056%
-

- No items found using the specified filter. -

-
- - - diff --git a/pycov/index.html b/pycov/index.html deleted file mode 100644 index 54ee8d75..00000000 --- a/pycov/index.html +++ /dev/null @@ -1,398 +0,0 @@ - - - - - Coverage report - - - - - -
-
-

Coverage report: - 56% -

- -
- -
- - -
-
-

- Files - Functions - Classes -

-

- coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

-
-
-
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Filestatementsmissingexcludedcoverage
build\python_kpex_protobuf\process_parasitics_pb2.py3321036%
build\python_kpex_protobuf\process_stack_pb2.py3725032%
build\python_kpex_protobuf\tech_pb2.py239061%
kpex\__init__.py000100%
kpex\common\__init__.py000100%
kpex\common\capacitance_matrix.py512096%
kpex\fastcap\fastcap_runner.py6228055%
kpex\fastercap\__init__.py000100%
kpex\fastercap\fastercap_input_builder.py171153011%
kpex\fastercap\fastercap_model_generator.py658109083%
kpex\fastercap\fastercap_runner.py5629048%
kpex\klayout\__init__.py100100%
kpex\klayout\lvs_runner.py2618031%
kpex\klayout\lvsdb_extractor.py13243067%
kpex\klayout\netlist_csv.py2317026%
kpex\klayout\netlist_expander.py702097%
kpex\klayout\netlist_reducer.py224082%
kpex\klayout\repair_rdb.py7966016%
kpex\kpex_cli.py412231044%
kpex\log\__init__.py100100%
kpex\log\logger.py7941095%
kpex\magic\__init__.py000100%
kpex\magic\magic_runner.py4429034%
kpex\rcx25\__init__.py000100%
kpex\rcx25\extraction_results.py9117081%
kpex\rcx25\extractor.py39937905%
kpex\tech_info.py15787045%
kpex\util\__init__.py000100%
kpex\util\argparse_helpers.py206070%
kpex\util\multiple_choice.py2811061%
kpex\version.py100100%
tests\__init__.py000100%
tests\common\__init__.py000100%
tests\common\capacitance_matrix_test.py3600100%
tests\fastcap\fastcap_runner_test.py2200100%
tests\fastercap\__init__.py000100%
tests\fastercap\fastercap_model_generator_test.py601098%
tests\fastercap\fastercap_runner_test.py2100100%
tests\klayout\lvs_runner_test.py2411054%
tests\klayout\netlist_expander_test.py3700100%
tests\klayout\netlist_reducer_test.py3300100%
tests\rcx25\rcx25_test.py9517082%
Total300413191056%
-

- No items found using the specified filter. -

-
- - - diff --git a/pycov/keybd_closed_cb_ce680311.png b/pycov/keybd_closed_cb_ce680311.png deleted file mode 100644 index ba119c47df81ed2bbd27a06988abf700139c4f99..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 9004 zcmeHLc{tSF+aIY=A^R4_poB4tZAN2XC;O7M(inrW3}(h&Q4}dl*&-65$i9^&vW6_# zcM4g`Qix=GhkBl;=lwnJ@Ap2}^}hc-b6vBXb3XUyzR%~}_c`-Dw+!?&>5p(90RRB> zXe~7($~PP3eT?=X<@3~Q1w84vX~IoSx~1#~02+TopXK(db;4v6!{+W`RHLkkHO zo;+s?)puc`+$yOwHv>I$5^8v^F3<|$44HA8AFnFB0cAP|C`p}aSMJK*-CUB{eQ!;K z-9Ju3OQ+xVPr3P#o4>_lNBT;M+1vgV&B~6!naOGHb-LFA9TkfHv1IFA1Y!Iz!Zl3) z%c#-^zNWPq7U_}6I7aHSmFWi125RZrNBKyvnV^?64)zviS;E!UD%LaGRl6@zn!3E{ zJ`B$5``cH_3a)t1#6I7d==JeB_IcSU%=I#DrRCBGm8GvCmA=+XHEvC2SIfsNa0(h9 z7P^C4U`W@@`9p>2f^zyb5B=lpc*RZMn-%%IqrxSWQF8{ec3i?-AB(_IVe z)XgT>Y^u41MwOMFvU=I4?!^#jaS-%bjnx@ zmL44yVEslR_ynm18F!u}Ru#moEn3EE?1=9@$B1Z5aLi5b8{&?V(IAYBzIar!SiY3< z`l0V)djHtrImy}(!7x-Pmq+njM)JFQ9mx*(C+9a3M)(_SW|lrN=gfxFhStu^zvynS zm@gl;>d8i8wpUkX42vS3BEzE3-yctH%t0#N%s+6-&_<*Fe7+h=`=FM?DOg1)eGL~~ zQvIFm$D*lqEh07XrXY=jb%hdyP4)`wyMCb$=-z9(lOme9=tirVkb)_GOl2MJn;=Ky z^0pV1owR7KP-BSxhI@@@+gG0roD-kXE1;!#R7KY1QiUbyDdTElm|ul7{mMdF1%UDJ z_vp=Vo!TCF?D*?u% zk~}4!xK2MSQd-QKC0${G=ZRv2x8%8ZqdfR!?Dv=5Mj^8WU)?iH;C?o6rSQy*^YwQb zf@5V)q=xah#a3UEIBC~N7on(p4jQd4K$|i7k`d8mw|M{Mxapl46Z^X^9U}JgqH#;T z`CTzafpMD+J-LjzF+3Xau>xM_sXisRj6m-287~i9g|%gHc}v77>n_+p7ZgmJszx!b zSmL4wV;&*5Z|zaCk`rOYFdOjZLLQr!WSV6AlaqYh_OE)>rYdtx`gk$yAMO=-E1b~J zIZY6gM*}1UWsJ)TW(pf1=h?lJy_0TFOr|nALGW>$IE1E7z+$`^2WJY+>$$nJo8Rs` z)xS>AH{N~X3+b=2+8Q_|n(1JoGv55r>TuwBV~MXE&9?3Zw>cIxnOPNs#gh~C4Zo=k z&!s;5)^6UG>!`?hh0Q|r|Qbm>}pgtOt23Vh!NSibozH$`#LSiYL)HR4bkfEJMa zBHwC3TaHx|BzD|MXAr>mm&FbZXeEX-=W}Ji&!pji4sO$#0Wk^Q7j%{8#bJPn$C=E% zPlB}0)@Ti^r_HMJrTMN?9~4LQbIiUiOKBVNm_QjABKY4;zC88yVjvB>ZETNzr%^(~ zI3U&Ont?P`r&4 z#Bp)jcVV_N_{c1_qW}_`dQm)D`NG?h{+S!YOaUgWna4i8SuoLcXAZ|#Jh&GNn7B}3 z?vZ8I{LpmCYT=@6)dLPd@|(;d<08ufov%+V?$mgUYQHYTrc%eA=CDUzK}v|G&9}yJ z)|g*=+RH1IQ>rvkY9UIam=fkxWDyGIKQ2RU{GqOQjD8nG#sl+$V=?wpzJdT=wlNWr z1%lw&+;kVs(z?e=YRWRA&jc75rQ~({*TS<( z8X!j>B}?Bxrrp%wEE7yBefQ?*nM20~+ZoQK(NO_wA`RNhsqVkXHy|sod@mqen=B#@ zmLi=x2*o9rWqTMWoB&qdZph$~qkJJTVNc*8^hU?gH_fY{GYPEBE8Q{j0Y$tvjMv%3 z)j#EyBf^7n)2d8IXDYX2O0S%ZTnGhg4Ss#sEIATKpE_E4TU=GimrD5F6K(%*+T-!o z?Se7^Vm`$ZKDwq+=~jf?w0qC$Kr&R-;IF#{iLF*8zKu8(=#chRO;>x zdM;h{i{RLpJgS!B-ueTFs8&4U4+D8|7nP~UZ@P`J;*0sj^#f_WqT#xpA?@qHonGB& zQ<^;OLtOG1w#)N~&@b0caUL7syAsAxV#R`n>-+eVL9aZwnlklzE>-6!1#!tVA`uNo z>Gv^P)sohc~g_1YMC;^f(N<{2y5C^;QCEXo;LQ^#$0 zr>jCrdoeXuff!dJ^`#=Wy2Gumo^Qt7BZrI~G+Pyl_kL>is3P0^JlE;Sjm-YfF~I>t z_KeNpK|5U&F4;v?WS&#l(jxUWDarfcIcl=-6!8>^S`57!M6;hZea5IFA@)2+*Rt85 zi-MBs_b^DU8LygXXQGkG+86N7<%M|baM(orG*ASffC`p!?@m{qd}IcYmZyi^d}#Q& zNjk-0@CajpUI-gPm20ERVDO!L8@p`tMJ69FD(ASIkdoLdiRV6h9TPKRz>2WK4upHd z6OZK33EP?`GoJkXh)S035}uLUO$;TlXwNdMg-WOhLB)7a`-%*a9lFmjf6n+4ZmIHN z-V@$ z8PXsoR4*`5RwXz=A8|5;aXKtSHFccj%dG7cO~UBJnt)61K>-uPX)`vu{7fcX6_>zZ zw_2V&Li+7mxbf!f7{Rk&VVyY!UtZywac%g!cH+xh#j$a`uf?XWl<``t`36W;p7=_* zO6uf~2{sAdkZn=Ts@p0>8N8rzw2ZLS@$ibV-c-QmG@%|3gUUrRxu=e*ekhTa+f?8q z3$JVGPr9w$VQG~QCq~Y=2ThLIH!T@(>{NihJ6nj*HA_C#Popv)CBa)+UI-bx8u8zfCT^*1|k z&N9oFYsZEijPn31Yx_yO5pFs>0tOAV=oRx~Wpy5ie&S_449m4R^{LWQMA~}vocV1O zIf#1ZV85E>tvZE4mz~zn{hs!pkIQM;EvZMimqiPAJu-9P@mId&nb$lsrICS=)zU3~ zn>a#9>}5*3N)9;PTMZ)$`5k} z?iG}Rwj$>Y*|(D3S3e&fxhaPHma8@vwu(cwdlaCjX+NIK6=$H4U`rfzcWQVOhp{fnzuZhgCCGpw|p zTi`>cv~xVzdx|^`C0vXdlMwPae3S?>3|7v$e*Bs6-5gS>>FMHk_r2M(ADOV{KV7+6 zA@5Q(mdx%7J}MY}K461iuQ}5GwDGI=Yc&g0MZHu)7gC3{5@QZj6SJl*o0MS2Cl_ia zyK?9QmC9tJ6yn{EA-erJ4wk$+!E#X(s~9h^HOmQ_|6V_s1)k;%9Q6Niw}SyT?jxl4 z;HYz2$Nj$8Q_*Xo`TWEUx^Q9b+ik@$o39`mlY&P}G8wnjdE+Dlj?uL;$aB$n;x zWoh-M_u>9}_Ok@d_uidMqz10zJc}RQijPW3Fs&~1am=j*+A$QWTvxf9)6n;n8zTQW z!Q_J1%apTsJzLF`#^P_#mRv2Ya_keUE7iMSP!ha-WQoo0vZZG?gyR;+4q8F6tL#u< zRj8Hu5f-p1$J;)4?WpGL{4@HmJ6&tF9A5Tc8Trp>;Y>{^s?Q1&bam}?OjsnKd?|Z82aix26wUOLxbEW~E)|CgJ#)MLf_me# zv4?F$o@A~Um)6>HlM0=3Bd-vc91EM}D+t6-@!}O%i*&Wl%@#C8X+?5+nv`oPu!!=5 znbL+Fk_#J_%8vOq^FIv~5N(nk03kyo1p@l|1c+rO^zCG3bk2?|%AF;*|4si1XM<`a z1NY0-8$wv?&129!(g_A1lXR!+pD*1*cF?T~e1d6*G1Fz)jcSaZoKpxtA%FNnKP2jo zLXn@OR#1z@6zuH%mMB98}-t zHJqClsZ!G5xMSgIs_=<8sBePXxfoXsuvy`|buON9BX%s-o>OVLA)k3W=wKnw1?so$ zEjm0aS=zu@Xu#;{A)QTjJ$a9_={++ACkRY*sk3jLk&Fu}RxR<-DXR<`5`$VNG*wJE zidM6VzaQ!M0gbQM98@x@;#0qUS8O)p6mrYwTk*;8J~!ovbY6jon^Ki}uggd3#J5G8 z>awvtF85Y<9yE{Iag}J7O7)1O=ylk^255@XmV5J06-{xaaSNASZoTKKp~$tSxdUI~ zU1RZ&UuW37Ro&_ryj^cSt$Jd&pt|+h!A&dwcr&`S=R5E`=6Tm`+(qGm@$YZ8(8@a$ zXfo@Rwtvm7N3RMmVCb7radAs-@QtCXx^CQ-<)V>QPLZy@jH{#dc4#(y zV)6Hp{ZMz!|NG8!>i01gZMy)G<8Hf2X7e&LH_gOaajW<<^Xi55@OnlY*|S|*TS8;u_nHbv7lgmmZ+Q<5 zi!*lLCJmdpyzl(L${$C?(pVo|oR%r~x_B_ocPePa_);27^=n4L=`toZ;xdBut9rSv z?wDQ7j2I3WQBdhz%X7`2YaG_y|wA!7|s?k;A&WNMLMTZEzCaE^d??E&u?f=ejQBR~|< z)=thyP2(p8r6mt?Ad}tXAP_GvF9|P630I;$1cpQ+Ay7C34hK^ZV3H4kjPV8&NP>G5 zKRDEIBrFl{M#j4mfP0)68&?mqJP1S?2mU0djAGTjDV;wZ?6vplNn~3Hn$nP>%!dMi zz@bnC7zzi&k&s{QDWkf&zgrVXKUJjY3Gv3bL0}S4h>OdgEJ$Q^&p-VAr3J}^a*+rz z!jW7(h*+GuCyqcC{MD(Ovj^!{pB^OKUe|uy&bD?CN>KZrf3?v>>l*xSvnQiH-o^ViN$%FRdm9url;%(*jf5H$*S)8;i0xWHdl>$p);nH9v0)YfW?Vz$! zNCeUbi9`NEg(i^57y=fzM@1o*z*Bf6?QCV>2p9}(BLlYsOCfMjFv1pw1mlo)Py{8v zppw{MDfEeWN+n>Ne~oI7%9cU}mz0r3!es2gNF0t5jkGipjIo2lz;-e)7}Ul_#!eDv zw;#>kI>;#-pyfeu3Fsd^2F@6=oh#8r9;A!G0`-mm7%{=S;Ec(bJ=I_`FodKGQVNEY zmXwr4{9*jpDl%4{ggQZ5Ac z%wYTdl*!1c5^)%^E78Q&)ma|27c6j(a=)g4sGrp$r{jv>>M2 z6y)E5|Aooe!PSfKzvKA>`a6pfK3=E8vL14ksP&f=>gOP?}rG6ye@9ZR3 zJF*vsh*P$w390i!FV~~_Hv6t2Zl<4VUi|rNja#boFt{%q~xGb z(2petq9A*_>~B*>?d?Olx^lmYg4)}sH2>G42RE; diff --git a/pycov/status.json b/pycov/status.json deleted file mode 100644 index 8d0282c1..00000000 --- a/pycov/status.json +++ /dev/null @@ -1 +0,0 @@ -{"note":"This file is an internal implementation detail to speed up HTML report generation. Its format can change at any time. You might be looking for the JSON report: https://coverage.rtfd.io/cmd.html#cmd-json","format":5,"version":"7.6.8","globals":"b58095001dc8074f9da754c7838e7c7d","files":{"z_8d81377067d4aa92_process_parasitics_pb2_py":{"hash":"4243d6cd990a5f2ed4988d4295552d50","index":{"url":"z_8d81377067d4aa92_process_parasitics_pb2_py.html","file":"build\\python_kpex_protobuf\\process_parasitics_pb2.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":33,"n_excluded":0,"n_missing":21,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_8d81377067d4aa92_process_stack_pb2_py":{"hash":"b61f4c540e594d87560c8fecbe94d491","index":{"url":"z_8d81377067d4aa92_process_stack_pb2_py.html","file":"build\\python_kpex_protobuf\\process_stack_pb2.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":37,"n_excluded":0,"n_missing":25,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_8d81377067d4aa92_tech_pb2_py":{"hash":"eb1006697bf9f721a60930a0fff765eb","index":{"url":"z_8d81377067d4aa92_tech_pb2_py.html","file":"build\\python_kpex_protobuf\\tech_pb2.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":23,"n_excluded":0,"n_missing":9,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_31e83241eddb0cfa___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_31e83241eddb0cfa___init___py.html","file":"kpex\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_f40df6a530c8cf33___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_f40df6a530c8cf33___init___py.html","file":"kpex\\common\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_f40df6a530c8cf33_capacitance_matrix_py":{"hash":"dfa018a58cee9edabf9321730845c6c7","index":{"url":"z_f40df6a530c8cf33_capacitance_matrix_py.html","file":"kpex\\common\\capacitance_matrix.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":51,"n_excluded":0,"n_missing":2,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_f568a0cfbd87c836_fastcap_runner_py":{"hash":"8782d668fcbc254e1079fbf2aaafcd6b","index":{"url":"z_f568a0cfbd87c836_fastcap_runner_py.html","file":"kpex\\fastcap\\fastcap_runner.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":62,"n_excluded":0,"n_missing":28,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_c489968eb1a5e358___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_c489968eb1a5e358___init___py.html","file":"kpex\\fastercap\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_c489968eb1a5e358_fastercap_input_builder_py":{"hash":"50c01818469f93294dd4e8d2f6e3c237","index":{"url":"z_c489968eb1a5e358_fastercap_input_builder_py.html","file":"kpex\\fastercap\\fastercap_input_builder.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":171,"n_excluded":0,"n_missing":153,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_c489968eb1a5e358_fastercap_model_generator_py":{"hash":"81d83714afeaf5b39352bbf5852f73d4","index":{"url":"z_c489968eb1a5e358_fastercap_model_generator_py.html","file":"kpex\\fastercap\\fastercap_model_generator.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":658,"n_excluded":0,"n_missing":109,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_c489968eb1a5e358_fastercap_runner_py":{"hash":"acfa2d7129bb1e6e602ff3873f406805","index":{"url":"z_c489968eb1a5e358_fastercap_runner_py.html","file":"kpex\\fastercap\\fastercap_runner.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":56,"n_excluded":0,"n_missing":29,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46___init___py":{"hash":"4cd39d325bb785870b3e7003a0f0fe59","index":{"url":"z_2ea764e3f741ac46___init___py.html","file":"kpex\\klayout\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":1,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46_lvs_runner_py":{"hash":"644effd58659ddb8da86b11ad171a0ce","index":{"url":"z_2ea764e3f741ac46_lvs_runner_py.html","file":"kpex\\klayout\\lvs_runner.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":26,"n_excluded":0,"n_missing":18,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46_lvsdb_extractor_py":{"hash":"d14386a1cfdc19215c9b4e44140c1e00","index":{"url":"z_2ea764e3f741ac46_lvsdb_extractor_py.html","file":"kpex\\klayout\\lvsdb_extractor.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":132,"n_excluded":0,"n_missing":43,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46_netlist_csv_py":{"hash":"b068875b2e30fa8557ab4d0046234f68","index":{"url":"z_2ea764e3f741ac46_netlist_csv_py.html","file":"kpex\\klayout\\netlist_csv.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":23,"n_excluded":0,"n_missing":17,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46_netlist_expander_py":{"hash":"8b69ec9d7d9d01843e506d254f2e285d","index":{"url":"z_2ea764e3f741ac46_netlist_expander_py.html","file":"kpex\\klayout\\netlist_expander.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":70,"n_excluded":0,"n_missing":2,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46_netlist_reducer_py":{"hash":"2c6fe9eb64e8c2d6f6a9732b6ba6045f","index":{"url":"z_2ea764e3f741ac46_netlist_reducer_py.html","file":"kpex\\klayout\\netlist_reducer.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":22,"n_excluded":0,"n_missing":4,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2ea764e3f741ac46_repair_rdb_py":{"hash":"7b201d5a72c90062742b48f9eaf19b22","index":{"url":"z_2ea764e3f741ac46_repair_rdb_py.html","file":"kpex\\klayout\\repair_rdb.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":79,"n_excluded":0,"n_missing":66,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_31e83241eddb0cfa_kpex_cli_py":{"hash":"e0eefcde4d2f6a4f4efcc5681c1fdfbd","index":{"url":"z_31e83241eddb0cfa_kpex_cli_py.html","file":"kpex\\kpex_cli.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":412,"n_excluded":0,"n_missing":231,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_b89b04cf284a76bf___init___py":{"hash":"7641f0ed844595f498486cf1bdc3c591","index":{"url":"z_b89b04cf284a76bf___init___py.html","file":"kpex\\log\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":1,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_b89b04cf284a76bf_logger_py":{"hash":"2d0c42c002f5f45ca073c4685c911738","index":{"url":"z_b89b04cf284a76bf_logger_py.html","file":"kpex\\log\\logger.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":79,"n_excluded":10,"n_missing":4,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_bb4acdb2528096e4___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_bb4acdb2528096e4___init___py.html","file":"kpex\\magic\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_bb4acdb2528096e4_magic_runner_py":{"hash":"e1226befa4ff1649b6fc6949807dd43e","index":{"url":"z_bb4acdb2528096e4_magic_runner_py.html","file":"kpex\\magic\\magic_runner.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":44,"n_excluded":0,"n_missing":29,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_588bb9d7e9b47fd3___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_588bb9d7e9b47fd3___init___py.html","file":"kpex\\rcx25\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_588bb9d7e9b47fd3_extraction_results_py":{"hash":"d0813accdb016eab8eedca33034e2581","index":{"url":"z_588bb9d7e9b47fd3_extraction_results_py.html","file":"kpex\\rcx25\\extraction_results.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":91,"n_excluded":0,"n_missing":17,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_588bb9d7e9b47fd3_extractor_py":{"hash":"98f0f3c22dc5193d428c05241e929215","index":{"url":"z_588bb9d7e9b47fd3_extractor_py.html","file":"kpex\\rcx25\\extractor.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":399,"n_excluded":0,"n_missing":379,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_31e83241eddb0cfa_tech_info_py":{"hash":"8d5aaca2fe0f3f5d270ebb6423a8514e","index":{"url":"z_31e83241eddb0cfa_tech_info_py.html","file":"kpex\\tech_info.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":157,"n_excluded":0,"n_missing":87,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_52482777700ec44a___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_52482777700ec44a___init___py.html","file":"kpex\\util\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_52482777700ec44a_argparse_helpers_py":{"hash":"33a7778effc164bd7511ae8eea6e81e5","index":{"url":"z_52482777700ec44a_argparse_helpers_py.html","file":"kpex\\util\\argparse_helpers.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":20,"n_excluded":0,"n_missing":6,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_52482777700ec44a_multiple_choice_py":{"hash":"7050dcb3cb1323fc51c90a6b1f1d2517","index":{"url":"z_52482777700ec44a_multiple_choice_py.html","file":"kpex\\util\\multiple_choice.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":28,"n_excluded":0,"n_missing":11,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_31e83241eddb0cfa_version_py":{"hash":"d8abb72f98e3990e754d1c5c25bd7ae2","index":{"url":"z_31e83241eddb0cfa_version_py.html","file":"kpex\\version.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":1,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_a44f0ac069e85531___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_a44f0ac069e85531___init___py.html","file":"tests\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_4c73bcae445d81c6___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_4c73bcae445d81c6___init___py.html","file":"tests\\common\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_4c73bcae445d81c6_capacitance_matrix_test_py":{"hash":"bb2e11c8d290f0c8b88bb96274a3373e","index":{"url":"z_4c73bcae445d81c6_capacitance_matrix_test_py.html","file":"tests\\common\\capacitance_matrix_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":36,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_2a8ddab25760f5a7_fastcap_runner_test_py":{"hash":"ad3381e8eafd5aa9fcf8ce655e5ec47e","index":{"url":"z_2a8ddab25760f5a7_fastcap_runner_test_py.html","file":"tests\\fastcap\\fastcap_runner_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":22,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_48f13015c956926b___init___py":{"hash":"ccb7d60951e0a34fcf73e5d60494ded7","index":{"url":"z_48f13015c956926b___init___py.html","file":"tests\\fastercap\\__init__.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":0,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_48f13015c956926b_fastercap_model_generator_test_py":{"hash":"abb4734c27168c75bc551c3a43dd6e08","index":{"url":"z_48f13015c956926b_fastercap_model_generator_test_py.html","file":"tests\\fastercap\\fastercap_model_generator_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":60,"n_excluded":0,"n_missing":1,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_48f13015c956926b_fastercap_runner_test_py":{"hash":"0040386548a24f5d40496c399dc38d66","index":{"url":"z_48f13015c956926b_fastercap_runner_test_py.html","file":"tests\\fastercap\\fastercap_runner_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":21,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_9747eacc0c5fa802_lvs_runner_test_py":{"hash":"81d930e2b25dbf02ae8bce8ac5fac609","index":{"url":"z_9747eacc0c5fa802_lvs_runner_test_py.html","file":"tests\\klayout\\lvs_runner_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":24,"n_excluded":0,"n_missing":11,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_9747eacc0c5fa802_netlist_expander_test_py":{"hash":"b73ac72651a1edd9eb766405be2bbd61","index":{"url":"z_9747eacc0c5fa802_netlist_expander_test_py.html","file":"tests\\klayout\\netlist_expander_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":37,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_9747eacc0c5fa802_netlist_reducer_test_py":{"hash":"1afe06f84dfcfcd14b33477a72059c0d","index":{"url":"z_9747eacc0c5fa802_netlist_reducer_test_py.html","file":"tests\\klayout\\netlist_reducer_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":33,"n_excluded":0,"n_missing":0,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}},"z_95258413a42419dc_rcx25_test_py":{"hash":"89b2e92edd43a7459916e4eec45c9210","index":{"url":"z_95258413a42419dc_rcx25_test_py.html","file":"tests\\rcx25\\rcx25_test.py","description":"","nums":{"precision":0,"n_files":1,"n_statements":95,"n_excluded":0,"n_missing":17,"n_branches":0,"n_partial_branches":0,"n_missing_branches":0}}}}} \ No newline at end of file diff --git a/pycov/style_cb_718ce007.css b/pycov/style_cb_718ce007.css deleted file mode 100644 index 03046835..00000000 --- a/pycov/style_cb_718ce007.css +++ /dev/null @@ -1,337 +0,0 @@ -@charset "UTF-8"; -/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */ -/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */ -/* Don't edit this .css file. Edit the .scss file instead! */ -html, body, h1, h2, h3, p, table, td, th { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; } - -body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; font-size: 1em; background: #fff; color: #000; } - -@media (prefers-color-scheme: dark) { body { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { body { color: #eee; } } - -html > body { font-size: 16px; } - -a:active, a:focus { outline: 2px dashed #007acc; } - -p { font-size: .875em; line-height: 1.4em; } - -table { border-collapse: collapse; } - -td { vertical-align: top; } - -table tr.hidden { display: none !important; } - -p#no_rows { display: none; font-size: 1.15em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; } - -a.nav { text-decoration: none; color: inherit; } - -a.nav:hover { text-decoration: underline; color: inherit; } - -.hidden { display: none; } - -header { background: #f8f8f8; width: 100%; z-index: 2; border-bottom: 1px solid #ccc; } - -@media (prefers-color-scheme: dark) { header { background: black; } } - -@media (prefers-color-scheme: dark) { header { border-color: #333; } } - -header .content { padding: 1rem 3.5rem; } - -header h2 { margin-top: .5em; font-size: 1em; } - -header h2 a.button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; background: #eee; color: inherit; text-decoration: none; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: #ccc; } - -@media (prefers-color-scheme: dark) { header h2 a.button { background: #333; } } - -@media (prefers-color-scheme: dark) { header h2 a.button { border-color: #444; } } - -header h2 a.button.current { border: 2px solid; background: #fff; border-color: #999; cursor: default; } - -@media (prefers-color-scheme: dark) { header h2 a.button.current { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { header h2 a.button.current { border-color: #777; } } - -header p.text { margin: .5em 0 -.5em; color: #666; font-style: italic; } - -@media (prefers-color-scheme: dark) { header p.text { color: #aaa; } } - -header.sticky { position: fixed; left: 0; right: 0; height: 2.5em; } - -header.sticky .text { display: none; } - -header.sticky h1, header.sticky h2 { font-size: 1em; margin-top: 0; display: inline-block; } - -header.sticky .content { padding: 0.5rem 3.5rem; } - -header.sticky .content p { font-size: 1em; } - -header.sticky ~ #source { padding-top: 6.5em; } - -main { position: relative; z-index: 1; } - -footer { margin: 1rem 3.5rem; } - -footer .content { padding: 0; color: #666; font-style: italic; } - -@media (prefers-color-scheme: dark) { footer .content { color: #aaa; } } - -#index { margin: 1rem 0 0 3.5rem; } - -h1 { font-size: 1.25em; display: inline-block; } - -#filter_container { float: right; margin: 0 2em 0 0; line-height: 1.66em; } - -#filter_container #filter { width: 10em; padding: 0.2em 0.5em; border: 2px solid #ccc; background: #fff; color: #000; } - -@media (prefers-color-scheme: dark) { #filter_container #filter { border-color: #444; } } - -@media (prefers-color-scheme: dark) { #filter_container #filter { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { #filter_container #filter { color: #eee; } } - -#filter_container #filter:focus { border-color: #007acc; } - -#filter_container :disabled ~ label { color: #ccc; } - -@media (prefers-color-scheme: dark) { #filter_container :disabled ~ label { color: #444; } } - -#filter_container label { font-size: .875em; color: #666; } - -@media (prefers-color-scheme: dark) { #filter_container label { color: #aaa; } } - -header button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; background: #eee; color: inherit; text-decoration: none; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: #ccc; } - -@media (prefers-color-scheme: dark) { header button { background: #333; } } - -@media (prefers-color-scheme: dark) { header button { border-color: #444; } } - -header button:active, header button:focus { outline: 2px dashed #007acc; } - -header button.run { background: #eeffee; } - -@media (prefers-color-scheme: dark) { header button.run { background: #373d29; } } - -header button.run.show_run { background: #dfd; border: 2px solid #00dd00; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.run.show_run { background: #373d29; } } - -header button.mis { background: #ffeeee; } - -@media (prefers-color-scheme: dark) { header button.mis { background: #4b1818; } } - -header button.mis.show_mis { background: #fdd; border: 2px solid #ff0000; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.mis.show_mis { background: #4b1818; } } - -header button.exc { background: #f7f7f7; } - -@media (prefers-color-scheme: dark) { header button.exc { background: #333; } } - -header button.exc.show_exc { background: #eee; border: 2px solid #808080; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.exc.show_exc { background: #333; } } - -header button.par { background: #ffffd5; } - -@media (prefers-color-scheme: dark) { header button.par { background: #650; } } - -header button.par.show_par { background: #ffa; border: 2px solid #bbbb00; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.par.show_par { background: #650; } } - -#help_panel, #source p .annotate.long { display: none; position: absolute; z-index: 999; background: #ffffcc; border: 1px solid #888; border-radius: .2em; color: #333; padding: .25em .5em; } - -#source p .annotate.long { white-space: normal; float: right; top: 1.75em; right: 1em; height: auto; } - -#help_panel_wrapper { float: right; position: relative; } - -#keyboard_icon { margin: 5px; } - -#help_panel_state { display: none; } - -#help_panel { top: 25px; right: 0; padding: .75em; border: 1px solid #883; color: #333; } - -#help_panel .keyhelp p { margin-top: .75em; } - -#help_panel .legend { font-style: italic; margin-bottom: 1em; } - -.indexfile #help_panel { width: 25em; } - -.pyfile #help_panel { width: 18em; } - -#help_panel_state:checked ~ #help_panel { display: block; } - -kbd { border: 1px solid black; border-color: #888 #333 #333 #888; padding: .1em .35em; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-weight: bold; background: #eee; border-radius: 3px; } - -#source { padding: 1em 0 1em 3.5rem; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; } - -#source p { position: relative; white-space: pre; } - -#source p * { box-sizing: border-box; } - -#source p .n { float: left; text-align: right; width: 3.5rem; box-sizing: border-box; margin-left: -3.5rem; padding-right: 1em; color: #999; user-select: none; } - -@media (prefers-color-scheme: dark) { #source p .n { color: #777; } } - -#source p .n.highlight { background: #ffdd00; } - -#source p .n a { scroll-margin-top: 6em; text-decoration: none; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n a { color: #777; } } - -#source p .n a:hover { text-decoration: underline; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n a:hover { color: #777; } } - -#source p .t { display: inline-block; width: 100%; box-sizing: border-box; margin-left: -.5em; padding-left: 0.3em; border-left: 0.2em solid #fff; } - -@media (prefers-color-scheme: dark) { #source p .t { border-color: #1e1e1e; } } - -#source p .t:hover { background: #f2f2f2; } - -@media (prefers-color-scheme: dark) { #source p .t:hover { background: #282828; } } - -#source p .t:hover ~ .r .annotate.long { display: block; } - -#source p .t .com { color: #008000; font-style: italic; line-height: 1px; } - -@media (prefers-color-scheme: dark) { #source p .t .com { color: #6a9955; } } - -#source p .t .key { font-weight: bold; line-height: 1px; } - -#source p .t .str { color: #0451a5; } - -@media (prefers-color-scheme: dark) { #source p .t .str { color: #9cdcfe; } } - -#source p.mis .t { border-left: 0.2em solid #ff0000; } - -#source p.mis.show_mis .t { background: #fdd; } - -@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t { background: #4b1818; } } - -#source p.mis.show_mis .t:hover { background: #f2d2d2; } - -@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t:hover { background: #532323; } } - -#source p.run .t { border-left: 0.2em solid #00dd00; } - -#source p.run.show_run .t { background: #dfd; } - -@media (prefers-color-scheme: dark) { #source p.run.show_run .t { background: #373d29; } } - -#source p.run.show_run .t:hover { background: #d2f2d2; } - -@media (prefers-color-scheme: dark) { #source p.run.show_run .t:hover { background: #404633; } } - -#source p.exc .t { border-left: 0.2em solid #808080; } - -#source p.exc.show_exc .t { background: #eee; } - -@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t { background: #333; } } - -#source p.exc.show_exc .t:hover { background: #e2e2e2; } - -@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t:hover { background: #3c3c3c; } } - -#source p.par .t { border-left: 0.2em solid #bbbb00; } - -#source p.par.show_par .t { background: #ffa; } - -@media (prefers-color-scheme: dark) { #source p.par.show_par .t { background: #650; } } - -#source p.par.show_par .t:hover { background: #f2f2a2; } - -@media (prefers-color-scheme: dark) { #source p.par.show_par .t:hover { background: #6d5d0c; } } - -#source p .r { position: absolute; top: 0; right: 2.5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; } - -#source p .annotate { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; color: #666; padding-right: .5em; } - -@media (prefers-color-scheme: dark) { #source p .annotate { color: #ddd; } } - -#source p .annotate.short:hover ~ .long { display: block; } - -#source p .annotate.long { width: 30em; right: 2.5em; } - -#source p input { display: none; } - -#source p input ~ .r label.ctx { cursor: pointer; border-radius: .25em; } - -#source p input ~ .r label.ctx::before { content: "â–¶ "; } - -#source p input ~ .r label.ctx:hover { background: #e8f4ff; color: #666; } - -@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { background: #0f3a42; } } - -@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { color: #aaa; } } - -#source p input:checked ~ .r label.ctx { background: #d0e8ff; color: #666; border-radius: .75em .75em 0 0; padding: 0 .5em; margin: -.25em 0; } - -@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { background: #056; } } - -@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { color: #aaa; } } - -#source p input:checked ~ .r label.ctx::before { content: "â–¼ "; } - -#source p input:checked ~ .ctxs { padding: .25em .5em; overflow-y: scroll; max-height: 10.5em; } - -#source p label.ctx { color: #999; display: inline-block; padding: 0 .5em; font-size: .8333em; } - -@media (prefers-color-scheme: dark) { #source p label.ctx { color: #777; } } - -#source p .ctxs { display: block; max-height: 0; overflow-y: hidden; transition: all .2s; padding: 0 .5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; white-space: nowrap; background: #d0e8ff; border-radius: .25em; margin-right: 1.75em; text-align: right; } - -@media (prefers-color-scheme: dark) { #source p .ctxs { background: #056; } } - -#index { font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-size: 0.875em; } - -#index table.index { margin-left: -.5em; } - -#index td, #index th { text-align: right; padding: .25em .5em; border-bottom: 1px solid #eee; } - -@media (prefers-color-scheme: dark) { #index td, #index th { border-color: #333; } } - -#index td.name, #index th.name { text-align: left; width: auto; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; min-width: 15em; } - -#index th { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; font-style: italic; color: #333; cursor: pointer; } - -@media (prefers-color-scheme: dark) { #index th { color: #ddd; } } - -#index th:hover { background: #eee; } - -@media (prefers-color-scheme: dark) { #index th:hover { background: #333; } } - -#index th .arrows { color: #666; font-size: 85%; font-family: sans-serif; font-style: normal; pointer-events: none; } - -#index th[aria-sort="ascending"], #index th[aria-sort="descending"] { white-space: nowrap; background: #eee; padding-left: .5em; } - -@media (prefers-color-scheme: dark) { #index th[aria-sort="ascending"], #index th[aria-sort="descending"] { background: #333; } } - -#index th[aria-sort="ascending"] .arrows::after { content: " â–²"; } - -#index th[aria-sort="descending"] .arrows::after { content: " â–¼"; } - -#index td.name { font-size: 1.15em; } - -#index td.name a { text-decoration: none; color: inherit; } - -#index td.name .no-noun { font-style: italic; } - -#index tr.total td, #index tr.total_dynamic td { font-weight: bold; border-top: 1px solid #ccc; border-bottom: none; } - -#index tr.region:hover { background: #eee; } - -@media (prefers-color-scheme: dark) { #index tr.region:hover { background: #333; } } - -#index tr.region:hover td.name { text-decoration: underline; color: inherit; } - -#scroll_marker { position: fixed; z-index: 3; right: 0; top: 0; width: 16px; height: 100%; background: #fff; border-left: 1px solid #eee; will-change: transform; } - -@media (prefers-color-scheme: dark) { #scroll_marker { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { #scroll_marker { border-color: #333; } } - -#scroll_marker .marker { background: #ccc; position: absolute; min-height: 3px; width: 100%; } - -@media (prefers-color-scheme: dark) { #scroll_marker .marker { background: #444; } } diff --git a/pycov/style_cb_8e611ae1.css b/pycov/style_cb_8e611ae1.css deleted file mode 100644 index 3cdaf05a..00000000 --- a/pycov/style_cb_8e611ae1.css +++ /dev/null @@ -1,337 +0,0 @@ -@charset "UTF-8"; -/* Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 */ -/* For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt */ -/* Don't edit this .css file. Edit the .scss file instead! */ -html, body, h1, h2, h3, p, table, td, th { margin: 0; padding: 0; border: 0; font-weight: inherit; font-style: inherit; font-size: 100%; font-family: inherit; vertical-align: baseline; } - -body { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; font-size: 1em; background: #fff; color: #000; } - -@media (prefers-color-scheme: dark) { body { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { body { color: #eee; } } - -html > body { font-size: 16px; } - -a:active, a:focus { outline: 2px dashed #007acc; } - -p { font-size: .875em; line-height: 1.4em; } - -table { border-collapse: collapse; } - -td { vertical-align: top; } - -table tr.hidden { display: none !important; } - -p#no_rows { display: none; font-size: 1.15em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; } - -a.nav { text-decoration: none; color: inherit; } - -a.nav:hover { text-decoration: underline; color: inherit; } - -.hidden { display: none; } - -header { background: #f8f8f8; width: 100%; z-index: 2; border-bottom: 1px solid #ccc; } - -@media (prefers-color-scheme: dark) { header { background: black; } } - -@media (prefers-color-scheme: dark) { header { border-color: #333; } } - -header .content { padding: 1rem 3.5rem; } - -header h2 { margin-top: .5em; font-size: 1em; } - -header h2 a.button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; background: #eee; color: inherit; text-decoration: none; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: #ccc; } - -@media (prefers-color-scheme: dark) { header h2 a.button { background: #333; } } - -@media (prefers-color-scheme: dark) { header h2 a.button { border-color: #444; } } - -header h2 a.button.current { border: 2px solid; background: #fff; border-color: #999; cursor: default; } - -@media (prefers-color-scheme: dark) { header h2 a.button.current { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { header h2 a.button.current { border-color: #777; } } - -header p.text { margin: .5em 0 -.5em; color: #666; font-style: italic; } - -@media (prefers-color-scheme: dark) { header p.text { color: #aaa; } } - -header.sticky { position: fixed; left: 0; right: 0; height: 2.5em; } - -header.sticky .text { display: none; } - -header.sticky h1, header.sticky h2 { font-size: 1em; margin-top: 0; display: inline-block; } - -header.sticky .content { padding: 0.5rem 3.5rem; } - -header.sticky .content p { font-size: 1em; } - -header.sticky ~ #source { padding-top: 6.5em; } - -main { position: relative; z-index: 1; } - -footer { margin: 1rem 3.5rem; } - -footer .content { padding: 0; color: #666; font-style: italic; } - -@media (prefers-color-scheme: dark) { footer .content { color: #aaa; } } - -#index { margin: 1rem 0 0 3.5rem; } - -h1 { font-size: 1.25em; display: inline-block; } - -#filter_container { float: right; margin: 0 2em 0 0; line-height: 1.66em; } - -#filter_container #filter { width: 10em; padding: 0.2em 0.5em; border: 2px solid #ccc; background: #fff; color: #000; } - -@media (prefers-color-scheme: dark) { #filter_container #filter { border-color: #444; } } - -@media (prefers-color-scheme: dark) { #filter_container #filter { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { #filter_container #filter { color: #eee; } } - -#filter_container #filter:focus { border-color: #007acc; } - -#filter_container :disabled ~ label { color: #ccc; } - -@media (prefers-color-scheme: dark) { #filter_container :disabled ~ label { color: #444; } } - -#filter_container label { font-size: .875em; color: #666; } - -@media (prefers-color-scheme: dark) { #filter_container label { color: #aaa; } } - -header button { font-family: inherit; font-size: inherit; border: 1px solid; border-radius: .2em; background: #eee; color: inherit; text-decoration: none; padding: .1em .5em; margin: 1px calc(.1em + 1px); cursor: pointer; border-color: #ccc; } - -@media (prefers-color-scheme: dark) { header button { background: #333; } } - -@media (prefers-color-scheme: dark) { header button { border-color: #444; } } - -header button:active, header button:focus { outline: 2px dashed #007acc; } - -header button.run { background: #eeffee; } - -@media (prefers-color-scheme: dark) { header button.run { background: #373d29; } } - -header button.run.show_run { background: #dfd; border: 2px solid #00dd00; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.run.show_run { background: #373d29; } } - -header button.mis { background: #ffeeee; } - -@media (prefers-color-scheme: dark) { header button.mis { background: #4b1818; } } - -header button.mis.show_mis { background: #fdd; border: 2px solid #ff0000; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.mis.show_mis { background: #4b1818; } } - -header button.exc { background: #f7f7f7; } - -@media (prefers-color-scheme: dark) { header button.exc { background: #333; } } - -header button.exc.show_exc { background: #eee; border: 2px solid #808080; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.exc.show_exc { background: #333; } } - -header button.par { background: #ffffd5; } - -@media (prefers-color-scheme: dark) { header button.par { background: #650; } } - -header button.par.show_par { background: #ffa; border: 2px solid #bbbb00; margin: 0 .1em; } - -@media (prefers-color-scheme: dark) { header button.par.show_par { background: #650; } } - -#help_panel, #source p .annotate.long { display: none; position: absolute; z-index: 999; background: #ffffcc; border: 1px solid #888; border-radius: .2em; color: #333; padding: .25em .5em; } - -#source p .annotate.long { white-space: normal; float: right; top: 1.75em; right: 1em; height: auto; } - -#help_panel_wrapper { float: right; position: relative; } - -#keyboard_icon { margin: 5px; } - -#help_panel_state { display: none; } - -#help_panel { top: 25px; right: 0; padding: .75em; border: 1px solid #883; color: #333; } - -#help_panel .keyhelp p { margin-top: .75em; } - -#help_panel .legend { font-style: italic; margin-bottom: 1em; } - -.indexfile #help_panel { width: 25em; } - -.pyfile #help_panel { width: 18em; } - -#help_panel_state:checked ~ #help_panel { display: block; } - -kbd { border: 1px solid black; border-color: #888 #333 #333 #888; padding: .1em .35em; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-weight: bold; background: #eee; border-radius: 3px; } - -#source { padding: 1em 0 1em 3.5rem; font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; } - -#source p { position: relative; white-space: pre; } - -#source p * { box-sizing: border-box; } - -#source p .n { float: left; text-align: right; width: 3.5rem; box-sizing: border-box; margin-left: -3.5rem; padding-right: 1em; color: #999; user-select: none; } - -@media (prefers-color-scheme: dark) { #source p .n { color: #777; } } - -#source p .n.highlight { background: #ffdd00; } - -#source p .n a { scroll-margin-top: 6em; text-decoration: none; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n a { color: #777; } } - -#source p .n a:hover { text-decoration: underline; color: #999; } - -@media (prefers-color-scheme: dark) { #source p .n a:hover { color: #777; } } - -#source p .t { display: inline-block; width: 100%; box-sizing: border-box; margin-left: -.5em; padding-left: 0.3em; border-left: 0.2em solid #fff; } - -@media (prefers-color-scheme: dark) { #source p .t { border-color: #1e1e1e; } } - -#source p .t:hover { background: #f2f2f2; } - -@media (prefers-color-scheme: dark) { #source p .t:hover { background: #282828; } } - -#source p .t:hover ~ .r .annotate.long { display: block; } - -#source p .t .com { color: #008000; font-style: italic; line-height: 1px; } - -@media (prefers-color-scheme: dark) { #source p .t .com { color: #6a9955; } } - -#source p .t .key { font-weight: bold; line-height: 1px; } - -#source p .t .str { color: #0451a5; } - -@media (prefers-color-scheme: dark) { #source p .t .str { color: #9cdcfe; } } - -#source p.mis .t { border-left: 0.2em solid #ff0000; } - -#source p.mis.show_mis .t { background: #fdd; } - -@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t { background: #4b1818; } } - -#source p.mis.show_mis .t:hover { background: #f2d2d2; } - -@media (prefers-color-scheme: dark) { #source p.mis.show_mis .t:hover { background: #532323; } } - -#source p.run .t { border-left: 0.2em solid #00dd00; } - -#source p.run.show_run .t { background: #dfd; } - -@media (prefers-color-scheme: dark) { #source p.run.show_run .t { background: #373d29; } } - -#source p.run.show_run .t:hover { background: #d2f2d2; } - -@media (prefers-color-scheme: dark) { #source p.run.show_run .t:hover { background: #404633; } } - -#source p.exc .t { border-left: 0.2em solid #808080; } - -#source p.exc.show_exc .t { background: #eee; } - -@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t { background: #333; } } - -#source p.exc.show_exc .t:hover { background: #e2e2e2; } - -@media (prefers-color-scheme: dark) { #source p.exc.show_exc .t:hover { background: #3c3c3c; } } - -#source p.par .t { border-left: 0.2em solid #bbbb00; } - -#source p.par.show_par .t { background: #ffa; } - -@media (prefers-color-scheme: dark) { #source p.par.show_par .t { background: #650; } } - -#source p.par.show_par .t:hover { background: #f2f2a2; } - -@media (prefers-color-scheme: dark) { #source p.par.show_par .t:hover { background: #6d5d0c; } } - -#source p .r { position: absolute; top: 0; right: 2.5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; } - -#source p .annotate { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; color: #666; padding-right: .5em; } - -@media (prefers-color-scheme: dark) { #source p .annotate { color: #ddd; } } - -#source p .annotate.short:hover ~ .long { display: block; } - -#source p .annotate.long { width: 30em; right: 2.5em; } - -#source p input { display: none; } - -#source p input ~ .r label.ctx { cursor: pointer; border-radius: .25em; } - -#source p input ~ .r label.ctx::before { content: "â–¶ "; } - -#source p input ~ .r label.ctx:hover { background: #e8f4ff; color: #666; } - -@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { background: #0f3a42; } } - -@media (prefers-color-scheme: dark) { #source p input ~ .r label.ctx:hover { color: #aaa; } } - -#source p input:checked ~ .r label.ctx { background: #d0e8ff; color: #666; border-radius: .75em .75em 0 0; padding: 0 .5em; margin: -.25em 0; } - -@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { background: #056; } } - -@media (prefers-color-scheme: dark) { #source p input:checked ~ .r label.ctx { color: #aaa; } } - -#source p input:checked ~ .r label.ctx::before { content: "â–¼ "; } - -#source p input:checked ~ .ctxs { padding: .25em .5em; overflow-y: scroll; max-height: 10.5em; } - -#source p label.ctx { color: #999; display: inline-block; padding: 0 .5em; font-size: .8333em; } - -@media (prefers-color-scheme: dark) { #source p label.ctx { color: #777; } } - -#source p .ctxs { display: block; max-height: 0; overflow-y: hidden; transition: all .2s; padding: 0 .5em; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; white-space: nowrap; background: #d0e8ff; border-radius: .25em; margin-right: 1.75em; text-align: right; } - -@media (prefers-color-scheme: dark) { #source p .ctxs { background: #056; } } - -#index { font-family: SFMono-Regular, Menlo, Monaco, Consolas, monospace; font-size: 0.875em; } - -#index table.index { margin-left: -.5em; } - -#index td, #index th { text-align: right; padding: .25em .5em; border-bottom: 1px solid #eee; } - -@media (prefers-color-scheme: dark) { #index td, #index th { border-color: #333; } } - -#index td.name, #index th.name { text-align: left; width: auto; font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; min-width: 15em; } - -#index th { font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Ubuntu, Cantarell, "Helvetica Neue", sans-serif; font-style: italic; color: #333; cursor: pointer; } - -@media (prefers-color-scheme: dark) { #index th { color: #ddd; } } - -#index th:hover { background: #eee; } - -@media (prefers-color-scheme: dark) { #index th:hover { background: #333; } } - -#index th .arrows { color: #666; font-size: 85%; font-family: sans-serif; font-style: normal; pointer-events: none; } - -#index th[aria-sort="ascending"], #index th[aria-sort="descending"] { white-space: nowrap; background: #eee; padding-left: .5em; } - -@media (prefers-color-scheme: dark) { #index th[aria-sort="ascending"], #index th[aria-sort="descending"] { background: #333; } } - -#index th[aria-sort="ascending"] .arrows::after { content: " â–²"; } - -#index th[aria-sort="descending"] .arrows::after { content: " â–¼"; } - -#index td.name { font-size: 1.15em; } - -#index td.name a { text-decoration: none; color: inherit; } - -#index td.name .no-noun { font-style: italic; } - -#index tr.total td, #index tr.total_dynamic td { font-weight: bold; border-top: 1px solid #ccc; border-bottom: none; } - -#index tr.region:hover { background: #eee; } - -@media (prefers-color-scheme: dark) { #index tr.region:hover { background: #333; } } - -#index tr.region:hover td.name { text-decoration: underline; color: inherit; } - -#scroll_marker { position: fixed; z-index: 3; right: 0; top: 0; width: 16px; height: 100%; background: #fff; border-left: 1px solid #eee; will-change: transform; } - -@media (prefers-color-scheme: dark) { #scroll_marker { background: #1e1e1e; } } - -@media (prefers-color-scheme: dark) { #scroll_marker { border-color: #333; } } - -#scroll_marker .marker { background: #ccc; position: absolute; min-height: 3px; width: 100%; } - -@media (prefers-color-scheme: dark) { #scroll_marker .marker { background: #444; } } diff --git a/pycov/z_143e04ff0a847ff6___init___py.html b/pycov/z_143e04ff0a847ff6___init___py.html deleted file mode 100644 index 0a42fb8c..00000000 --- a/pycov/z_143e04ff0a847ff6___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex/util/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/util/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_143e04ff0a847ff6_argparse_helpers_py.html b/pycov/z_143e04ff0a847ff6_argparse_helpers_py.html deleted file mode 100644 index cbf758f2..00000000 --- a/pycov/z_143e04ff0a847ff6_argparse_helpers_py.html +++ /dev/null @@ -1,149 +0,0 @@ - - - - - Coverage for kpex/util/argparse_helpers.py: 70% - - - - - -
-
-

- Coverage for kpex/util/argparse_helpers.py: - 70% -

- -

- 20 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26import argparse 

-

27from enum import Enum 

-

28from typing import * 

-

29 

-

30 

-

31def render_enum_help(topic: str, 

-

32 enum_cls: Type[Enum], 

-

33 print_default: bool = True) -> str: 

-

34 if not hasattr(enum_cls, 'DEFAULT'): 

-

35 raise ValueError("Enum must declare case 'DEFAULT'") 

-

36 enum_help = f"{topic}{set([name.lower() for name, member in enum_cls.__members__.items()])}" 

-

37 if print_default: 

-

38 enum_help += f".\nDefaults to '{enum_cls.DEFAULT.name.lower()}'" 

-

39 return enum_help 

-

40 

-

41 

-

42def true_or_false(arg) -> bool: 

-

43 if isinstance(arg, bool): 

-

44 return arg 

-

45 

-

46 match str(arg).lower(): 

-

47 case 'yes' | 'true' | 't' | 'y' | 1: 

-

48 return True 

-

49 case 'no' | 'false' | 'f' | 'n' | 0: 

-

50 return False 

-

51 case _: 

-

52 raise argparse.ArgumentTypeError('Boolean value expected.') 

-
- - - diff --git a/pycov/z_143e04ff0a847ff6_multiple_choice_py.html b/pycov/z_143e04ff0a847ff6_multiple_choice_py.html deleted file mode 100644 index 988dece7..00000000 --- a/pycov/z_143e04ff0a847ff6_multiple_choice_py.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Coverage for kpex/util/multiple_choice.py: 61% - - - - - -
-
-

- Coverage for kpex/util/multiple_choice.py: - 61% -

- -

- 28 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25from functools import cached_property 

-

26from typing import * 

-

27 

-

28 

-

29class MultipleChoicePattern: 

-

30 def __init__(self, pattern: str): 

-

31 """ 

-

32 Multiple Choice pattern, allows blacklisting and whitelisting. 

-

33 For example, given a list of dielectric, let the user decide which of them to include or exclude. 

-

34 Allowed patterns: 

-

35 - all (default): complete list of choices included 

-

36 - none: no choices included at all 

-

37 - +dielname: include choice named 'dielname' 

-

38 - -dielname: exclude choice named 'dielname' 

-

39 Examples: 

-

40 - all,-nild5,-nild6 

-

41 - include all dielectrics except nild5 and nild6 

-

42 - none,+nild5,+capild 

-

43 - include only dielectrics named nild5 and capild 

-

44 """ 

-

45 self.pattern = pattern 

-

46 

-

47 components = pattern.split(sep=',') 

-

48 components = [c.lower().strip() for c in components] 

-

49 self.has_all = 'all' in components 

-

50 self.has_none = 'none' in components 

-

51 self.included = [c[1:] for c in components if c.startswith('+')] 

-

52 self.excluded = [c[1:] for c in components if c.startswith('-')] 

-

53 if self.has_none and self.has_all: 

-

54 raise ValueError("Multiple choice pattern can't have both subpatterns all and none") 

-

55 if self.has_none and len(self.excluded) >= 1: 

-

56 raise ValueError("Multiple choice pattern based on none can only have inclusive (+) subpatterns") 

-

57 if self.has_all and len(self.included) >= 1: 

-

58 raise ValueError("Multiple choice pattern based on all can only have exclusive (-) subpatterns") 

-

59 

-

60 def filter(self, choices: List[str]) -> List[str]: 

-

61 if self.has_all: 

-

62 return [c for c in choices if c not in self.excluded] 

-

63 return [c for c in choices if c in self.included] 

-

64 

-

65 def is_included(self, choice: str) -> bool: 

-

66 if self.has_none: 

-

67 return choice in self.included 

-

68 if self.has_all: 

-

69 return choice not in self.excluded 

-

70 return False 

-
- - - diff --git a/pycov/z_1e5163ca105fd3f6_process_parasitics_pb2_py.html b/pycov/z_1e5163ca105fd3f6_process_parasitics_pb2_py.html deleted file mode 100644 index 0c581806..00000000 --- a/pycov/z_1e5163ca105fd3f6_process_parasitics_pb2_py.html +++ /dev/null @@ -1,151 +0,0 @@ - - - - - Coverage for build/python_kpex_protobuf/process_parasitics_pb2.py: 36% - - - - - -
-
-

- Coverage for build/python_kpex_protobuf/process_parasitics_pb2.py: - 36% -

- -

- 33 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# -*- coding: utf-8 -*- 

-

2# Generated by the protocol buffer compiler. DO NOT EDIT! 

-

3# NO CHECKED-IN PROTOBUF GENCODE 

-

4# source: process_parasitics.proto 

-

5# Protobuf Python Version: 5.29.0 

-

6"""Generated protocol buffer code.""" 

-

7from google.protobuf import descriptor as _descriptor 

-

8from google.protobuf import descriptor_pool as _descriptor_pool 

-

9from google.protobuf import runtime_version as _runtime_version 

-

10from google.protobuf import symbol_database as _symbol_database 

-

11from google.protobuf.internal import builder as _builder 

-

12_runtime_version.ValidateProtobufRuntimeVersion( 

-

13 _runtime_version.Domain.PUBLIC, 

-

14 5, 

-

15 29, 

-

16 0, 

-

17 '', 

-

18 'process_parasitics.proto' 

-

19) 

-

20# @@protoc_insertion_point(imports) 

-

21 

-

22_sym_db = _symbol_database.Default() 

-

23 

-

24 

-

25 

-

26 

-

27DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18process_parasitics.proto\x12\tkpex.tech\"\x8a\x01\n\x15ProcessParasiticsInfo\x12\x11\n\tside_halo\x18\n \x01(\x05\x12-\n\nresistance\x18n \x01(\x0b\x32\x19.kpex.tech.ResistanceInfo\x12/\n\x0b\x63\x61pacitance\x18o \x01(\x0b\x32\x1a.kpex.tech.CapacitanceInfo\"\x98\x02\n\x0eResistanceInfo\x12\x39\n\x06layers\x18\x01 \x03(\x0b\x32).kpex.tech.ResistanceInfo.LayerResistance\x12\x35\n\x04vias\x18\x02 \x03(\x0b\x32\'.kpex.tech.ResistanceInfo.ViaResistance\x1a]\n\x0fLayerResistance\x12\x12\n\nlayer_name\x18\x01 \x01(\t\x12\x12\n\nresistance\x18\x02 \x01(\x01\x12\"\n\x1a\x63orner_adjustment_fraction\x18\x03 \x01(\x01\x1a\x35\n\rViaResistance\x12\x10\n\x08via_name\x18\x01 \x01(\t\x12\x12\n\nresistance\x18\x02 \x01(\x01\"\x98\x05\n\x0f\x43\x61pacitanceInfo\x12\x44\n\nsubstrates\x18\xc8\x01 \x03(\x0b\x32/.kpex.tech.CapacitanceInfo.SubstrateCapacitance\x12@\n\x08overlaps\x18\xc9\x01 \x03(\x0b\x32-.kpex.tech.CapacitanceInfo.OverlapCapacitance\x12\x42\n\tsidewalls\x18\xca\x01 \x03(\x0b\x32..kpex.tech.CapacitanceInfo.SidewallCapacitance\x12H\n\x0csideoverlaps\x18\xcb\x01 \x03(\x0b\x32\x31.kpex.tech.CapacitanceInfo.SideOverlapCapacitance\x1a\x63\n\x14SubstrateCapacitance\x12\x12\n\nlayer_name\x18\x01 \x01(\t\x12\x18\n\x10\x61rea_capacitance\x18\x02 \x01(\x01\x12\x1d\n\x15perimeter_capacitance\x18\x03 \x01(\x01\x1a\\\n\x12OverlapCapacitance\x12\x16\n\x0etop_layer_name\x18\x01 \x01(\t\x12\x19\n\x11\x62ottom_layer_name\x18\x02 \x01(\t\x12\x13\n\x0b\x63\x61pacitance\x18\x03 \x01(\x01\x1aN\n\x13SidewallCapacitance\x12\x12\n\nlayer_name\x18\x01 \x01(\t\x12\x13\n\x0b\x63\x61pacitance\x18\x02 \x01(\x01\x12\x0e\n\x06offset\x18\x03 \x01(\x01\x1a\\\n\x16SideOverlapCapacitance\x12\x15\n\rin_layer_name\x18\x01 \x01(\t\x12\x16\n\x0eout_layer_name\x18\x02 \x01(\t\x12\x13\n\x0b\x63\x61pacitance\x18\x03 \x01(\x01\"\x0e\n\x0cStyleVariantb\x06proto3') 

-

28 

-

29_globals = globals() 

-

30_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 

-

31_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'process_parasitics_pb2', _globals) 

-

32if not _descriptor._USE_C_DESCRIPTORS: 

-

33 DESCRIPTOR._loaded_options = None 

-

34 _globals['_PROCESSPARASITICSINFO']._serialized_start=40 

-

35 _globals['_PROCESSPARASITICSINFO']._serialized_end=178 

-

36 _globals['_RESISTANCEINFO']._serialized_start=181 

-

37 _globals['_RESISTANCEINFO']._serialized_end=461 

-

38 _globals['_RESISTANCEINFO_LAYERRESISTANCE']._serialized_start=313 

-

39 _globals['_RESISTANCEINFO_LAYERRESISTANCE']._serialized_end=406 

-

40 _globals['_RESISTANCEINFO_VIARESISTANCE']._serialized_start=408 

-

41 _globals['_RESISTANCEINFO_VIARESISTANCE']._serialized_end=461 

-

42 _globals['_CAPACITANCEINFO']._serialized_start=464 

-

43 _globals['_CAPACITANCEINFO']._serialized_end=1128 

-

44 _globals['_CAPACITANCEINFO_SUBSTRATECAPACITANCE']._serialized_start=761 

-

45 _globals['_CAPACITANCEINFO_SUBSTRATECAPACITANCE']._serialized_end=860 

-

46 _globals['_CAPACITANCEINFO_OVERLAPCAPACITANCE']._serialized_start=862 

-

47 _globals['_CAPACITANCEINFO_OVERLAPCAPACITANCE']._serialized_end=954 

-

48 _globals['_CAPACITANCEINFO_SIDEWALLCAPACITANCE']._serialized_start=956 

-

49 _globals['_CAPACITANCEINFO_SIDEWALLCAPACITANCE']._serialized_end=1034 

-

50 _globals['_CAPACITANCEINFO_SIDEOVERLAPCAPACITANCE']._serialized_start=1036 

-

51 _globals['_CAPACITANCEINFO_SIDEOVERLAPCAPACITANCE']._serialized_end=1128 

-

52 _globals['_STYLEVARIANT']._serialized_start=1130 

-

53 _globals['_STYLEVARIANT']._serialized_end=1144 

-

54# @@protoc_insertion_point(module_scope) 

-
- - - diff --git a/pycov/z_1e5163ca105fd3f6_process_stack_pb2_py.html b/pycov/z_1e5163ca105fd3f6_process_stack_pb2_py.html deleted file mode 100644 index eafc4b50..00000000 --- a/pycov/z_1e5163ca105fd3f6_process_stack_pb2_py.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - - Coverage for build/python_kpex_protobuf/process_stack_pb2.py: 32% - - - - - -
-
-

- Coverage for build/python_kpex_protobuf/process_stack_pb2.py: - 32% -

- -

- 37 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# -*- coding: utf-8 -*- 

-

2# Generated by the protocol buffer compiler. DO NOT EDIT! 

-

3# NO CHECKED-IN PROTOBUF GENCODE 

-

4# source: process_stack.proto 

-

5# Protobuf Python Version: 5.29.0 

-

6"""Generated protocol buffer code.""" 

-

7from google.protobuf import descriptor as _descriptor 

-

8from google.protobuf import descriptor_pool as _descriptor_pool 

-

9from google.protobuf import runtime_version as _runtime_version 

-

10from google.protobuf import symbol_database as _symbol_database 

-

11from google.protobuf.internal import builder as _builder 

-

12_runtime_version.ValidateProtobufRuntimeVersion( 

-

13 _runtime_version.Domain.PUBLIC, 

-

14 5, 

-

15 29, 

-

16 0, 

-

17 '', 

-

18 'process_stack.proto' 

-

19) 

-

20# @@protoc_insertion_point(imports) 

-

21 

-

22_sym_db = _symbol_database.Default() 

-

23 

-

24 

-

25 

-

26 

-

27DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13process_stack.proto\x12\tkpex.tech\"\xb5\x0f\n\x10ProcessStackInfo\x12\x35\n\x06layers\x18\x64 \x03(\x0b\x32%.kpex.tech.ProcessStackInfo.LayerInfo\x1a?\n\x07\x43ontact\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bmetal_above\x18\n \x01(\t\x12\x11\n\tthickness\x18\x14 \x01(\x01\x1a\x46\n\x0eSubstrateLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\tthickness\x18\x02 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1ak\n\nNWellLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x12:\n\rcontact_above\x18( \x01(\x0b\x32#.kpex.tech.ProcessStackInfo.Contact\x1ao\n\x0e\x44iffusionLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x12:\n\rcontact_above\x18( \x01(\x0b\x32#.kpex.tech.ProcessStackInfo.Contact\x1a\'\n\x0f\x46ieldOxideLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x1a@\n\x15SimpleDielectricLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1a\x9f\x01\n\x18\x43onformalDielectricLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x12\x1c\n\x14thickness_over_metal\x18\x14 \x01(\x01\x12 \n\x18thickness_where_no_metal\x18\x15 \x01(\x01\x12\x1a\n\x12thickness_sidewall\x18\x16 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1a~\n\x17SidewallDielectricLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x12\x1a\n\x12height_above_metal\x18\x14 \x01(\x01\x12\x1e\n\x16width_outside_sidewall\x18\x15 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1a\x9d\x01\n\nMetalLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\tthickness\x18\x02 \x01(\x01\x12\x17\n\x0freference_below\x18\x1e \x01(\t\x12\x17\n\x0freference_above\x18\x1f \x01(\t\x12:\n\rcontact_above\x18( \x01(\x0b\x32#.kpex.tech.ProcessStackInfo.Contact\x1a\xc4\x05\n\tLayerInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x39\n\nlayer_type\x18\x02 \x01(\x0e\x32%.kpex.tech.ProcessStackInfo.LayerType\x12\x45\n\x0fsubstrate_layer\x18Z \x01(\x0b\x32*.kpex.tech.ProcessStackInfo.SubstrateLayerH\x00\x12=\n\x0bnwell_layer\x18\t \x01(\x0b\x32&.kpex.tech.ProcessStackInfo.NWellLayerH\x00\x12\x45\n\x0f\x64iffusion_layer\x18\n \x01(\x0b\x32*.kpex.tech.ProcessStackInfo.DiffusionLayerH\x00\x12H\n\x11\x66ield_oxide_layer\x18\x0b \x01(\x0b\x32+.kpex.tech.ProcessStackInfo.FieldOxideLayerH\x00\x12T\n\x17simple_dielectric_layer\x18\x0c \x01(\x0b\x32\x31.kpex.tech.ProcessStackInfo.SimpleDielectricLayerH\x00\x12Z\n\x1a\x63onformal_dielectric_layer\x18\r \x01(\x0b\x32\x34.kpex.tech.ProcessStackInfo.ConformalDielectricLayerH\x00\x12X\n\x19sidewall_dielectric_layer\x18\x0e \x01(\x0b\x32\x33.kpex.tech.ProcessStackInfo.SidewallDielectricLayerH\x00\x12=\n\x0bmetal_layer\x18\x0f \x01(\x0b\x32&.kpex.tech.ProcessStackInfo.MetalLayerH\x00\x42\x0c\n\nparameters\"\x8e\x02\n\tLayerType\x12\x1a\n\x16LAYER_TYPE_UNSPECIFIED\x10\x00\x12\x18\n\x14LAYER_TYPE_SUBSTRATE\x10\n\x12\x14\n\x10LAYER_TYPE_NWELL\x10\x14\x12\x18\n\x14LAYER_TYPE_DIFFUSION\x10\x1e\x12\x1a\n\x16LAYER_TYPE_FIELD_OXIDE\x10(\x12 \n\x1cLAYER_TYPE_SIMPLE_DIELECTRIC\x10\x32\x12#\n\x1fLAYER_TYPE_CONFORMAL_DIELECTRIC\x10<\x12\"\n\x1eLAYER_TYPE_SIDEWALL_DIELECTRIC\x10\x46\x12\x14\n\x10LAYER_TYPE_METAL\x10Pb\x06proto3') 

-

28 

-

29_globals = globals() 

-

30_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 

-

31_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'process_stack_pb2', _globals) 

-

32if not _descriptor._USE_C_DESCRIPTORS: 

-

33 DESCRIPTOR._loaded_options = None 

-

34 _globals['_PROCESSSTACKINFO']._serialized_start=35 

-

35 _globals['_PROCESSSTACKINFO']._serialized_end=2008 

-

36 _globals['_PROCESSSTACKINFO_CONTACT']._serialized_start=110 

-

37 _globals['_PROCESSSTACKINFO_CONTACT']._serialized_end=173 

-

38 _globals['_PROCESSSTACKINFO_SUBSTRATELAYER']._serialized_start=175 

-

39 _globals['_PROCESSSTACKINFO_SUBSTRATELAYER']._serialized_end=245 

-

40 _globals['_PROCESSSTACKINFO_NWELLLAYER']._serialized_start=247 

-

41 _globals['_PROCESSSTACKINFO_NWELLLAYER']._serialized_end=354 

-

42 _globals['_PROCESSSTACKINFO_DIFFUSIONLAYER']._serialized_start=356 

-

43 _globals['_PROCESSSTACKINFO_DIFFUSIONLAYER']._serialized_end=467 

-

44 _globals['_PROCESSSTACKINFO_FIELDOXIDELAYER']._serialized_start=469 

-

45 _globals['_PROCESSSTACKINFO_FIELDOXIDELAYER']._serialized_end=508 

-

46 _globals['_PROCESSSTACKINFO_SIMPLEDIELECTRICLAYER']._serialized_start=510 

-

47 _globals['_PROCESSSTACKINFO_SIMPLEDIELECTRICLAYER']._serialized_end=574 

-

48 _globals['_PROCESSSTACKINFO_CONFORMALDIELECTRICLAYER']._serialized_start=577 

-

49 _globals['_PROCESSSTACKINFO_CONFORMALDIELECTRICLAYER']._serialized_end=736 

-

50 _globals['_PROCESSSTACKINFO_SIDEWALLDIELECTRICLAYER']._serialized_start=738 

-

51 _globals['_PROCESSSTACKINFO_SIDEWALLDIELECTRICLAYER']._serialized_end=864 

-

52 _globals['_PROCESSSTACKINFO_METALLAYER']._serialized_start=867 

-

53 _globals['_PROCESSSTACKINFO_METALLAYER']._serialized_end=1024 

-

54 _globals['_PROCESSSTACKINFO_LAYERINFO']._serialized_start=1027 

-

55 _globals['_PROCESSSTACKINFO_LAYERINFO']._serialized_end=1735 

-

56 _globals['_PROCESSSTACKINFO_LAYERTYPE']._serialized_start=1738 

-

57 _globals['_PROCESSSTACKINFO_LAYERTYPE']._serialized_end=2008 

-

58# @@protoc_insertion_point(module_scope) 

-
- - - diff --git a/pycov/z_1e5163ca105fd3f6_tech_pb2_py.html b/pycov/z_1e5163ca105fd3f6_tech_pb2_py.html deleted file mode 100644 index 39f4204d..00000000 --- a/pycov/z_1e5163ca105fd3f6_tech_pb2_py.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - Coverage for build/python_kpex_protobuf/tech_pb2.py: 61% - - - - - -
-
-

- Coverage for build/python_kpex_protobuf/tech_pb2.py: - 61% -

- -

- 23 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# -*- coding: utf-8 -*- 

-

2# Generated by the protocol buffer compiler. DO NOT EDIT! 

-

3# NO CHECKED-IN PROTOBUF GENCODE 

-

4# source: tech.proto 

-

5# Protobuf Python Version: 5.29.0 

-

6"""Generated protocol buffer code.""" 

-

7from google.protobuf import descriptor as _descriptor 

-

8from google.protobuf import descriptor_pool as _descriptor_pool 

-

9from google.protobuf import runtime_version as _runtime_version 

-

10from google.protobuf import symbol_database as _symbol_database 

-

11from google.protobuf.internal import builder as _builder 

-

12_runtime_version.ValidateProtobufRuntimeVersion( 

-

13 _runtime_version.Domain.PUBLIC, 

-

14 5, 

-

15 29, 

-

16 0, 

-

17 '', 

-

18 'tech.proto' 

-

19) 

-

20# @@protoc_insertion_point(imports) 

-

21 

-

22_sym_db = _symbol_database.Default() 

-

23 

-

24 

-

25import process_stack_pb2 as process__stack__pb2 

-

26import process_parasitics_pb2 as process__parasitics__pb2 

-

27 

-

28 

-

29DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ntech.proto\x12\tkpex.tech\x1a\x13process_stack.proto\x1a\x18process_parasitics.proto\"\xef\x01\n\nTechnology\x12\x0c\n\x04name\x18\x01 \x01(\t\x12$\n\x06layers\x18\x65 \x03(\x0b\x32\x14.kpex.tech.LayerInfo\x12\x39\n\x13lvs_computed_layers\x18x \x03(\x0b\x32\x1c.kpex.tech.ComputedLayerInfo\x12\x33\n\rprocess_stack\x18\x8c\x01 \x01(\x0b\x32\x1b.kpex.tech.ProcessStackInfo\x12=\n\x12process_parasitics\x18\xc8\x01 \x01(\x0b\x32 .kpex.tech.ProcessParasiticsInfo\"W\n\tLayerInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0b \x01(\t\x12\x11\n\tgds_layer\x18\x15 \x01(\r\x12\x14\n\x0cgds_datatype\x18\x1f \x01(\r\"\xf0\x01\n\x11\x43omputedLayerInfo\x12/\n\x04kind\x18\n \x01(\x0e\x32!.kpex.tech.ComputedLayerInfo.Kind\x12(\n\nlayer_info\x18\x14 \x01(\x0b\x32\x14.kpex.tech.LayerInfo\x12\x1b\n\x13original_layer_name\x18\x1e \x01(\t\"c\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\x10\n\x0cKIND_REGULAR\x10\x01\x12\x19\n\x15KIND_DEVICE_CAPACITOR\x10\x02\x12\x18\n\x14KIND_DEVICE_RESISTOR\x10\x03\x62\x06proto3') 

-

30 

-

31_globals = globals() 

-

32_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 

-

33_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tech_pb2', _globals) 

-

34if not _descriptor._USE_C_DESCRIPTORS: 

-

35 DESCRIPTOR._loaded_options = None 

-

36 _globals['_TECHNOLOGY']._serialized_start=73 

-

37 _globals['_TECHNOLOGY']._serialized_end=312 

-

38 _globals['_LAYERINFO']._serialized_start=314 

-

39 _globals['_LAYERINFO']._serialized_end=401 

-

40 _globals['_COMPUTEDLAYERINFO']._serialized_start=404 

-

41 _globals['_COMPUTEDLAYERINFO']._serialized_end=644 

-

42 _globals['_COMPUTEDLAYERINFO_KIND']._serialized_start=545 

-

43 _globals['_COMPUTEDLAYERINFO_KIND']._serialized_end=644 

-

44# @@protoc_insertion_point(module_scope) 

-
- - - diff --git a/pycov/z_2a1ea3ee988f9971_fastcap_runner_test_py.html b/pycov/z_2a1ea3ee988f9971_fastcap_runner_test_py.html deleted file mode 100644 index fc204770..00000000 --- a/pycov/z_2a1ea3ee988f9971_fastcap_runner_test_py.html +++ /dev/null @@ -1,150 +0,0 @@ - - - - - Coverage for tests/fastcap/fastcap_runner_test.py: 100% - - - - - -
-
-

- Coverage for tests/fastcap/fastcap_runner_test.py: - 100% -

- -

- 22 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import unittest 

-

27 

-

28from kpex.fastcap.fastcap_runner import fastcap_parse_capacitance_matrix 

-

29 

-

30 

-

31@allure.parent_suite("Unit Tests") 

-

32@allure.tag("Capacitance", "FastCap") 

-

33class Test(unittest.TestCase): 

-

34 @property 

-

35 def fastcap_testdata_dir(self) -> str: 

-

36 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 'testdata', 'fastcap')) 

-

37 

-

38 def test_fastcap_parse_capacitance_matrix(self): 

-

39 testdata_path = os.path.join(self.fastcap_testdata_dir, 'cap_mim_m3_w18p9_l5p1__REDUX122_FastCap_Output.txt') 

-

40 obtained_matrix = fastcap_parse_capacitance_matrix(log_path=testdata_path) 

-

41 self.assertEqual(4, len(obtained_matrix.rows)) 

-

42 self.assertEqual(4, len(obtained_matrix.rows[0])) 

-

43 self.assertEqual(4, len(obtained_matrix.rows[1])) 

-

44 self.assertEqual(4, len(obtained_matrix.rows[2])) 

-

45 self.assertEqual(4, len(obtained_matrix.rows[3])) 

-

46 self.assertEqual( 

-

47 ['$1%GROUP2', '$1%GROUP2', '$2%GROUP3', '$2%GROUP3'], 

-

48 obtained_matrix.conductor_names 

-

49 ) 

-

50 

-

51 output_path = os.path.join(self.fastcap_testdata_dir, 'cap_mim_m3_w18p9_l5p1__REDUX122_FastCap_Result_Matrix.csv') 

-

52 obtained_matrix.write_csv(output_path=output_path, separator=';') 

-

53 allure.attach.file(output_path, attachment_type=allure.attachment_type.CSV) 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353___init___py.html b/pycov/z_2a6b66cd9c831353___init___py.html deleted file mode 100644 index 547ebf58..00000000 --- a/pycov/z_2a6b66cd9c831353___init___py.html +++ /dev/null @@ -1,124 +0,0 @@ - - - - - Coverage for kpex/klayout/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/klayout/__init__.py: - 100% -

- -

- 1 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from .lvsdb_extractor import ( 

-

25 KLayoutExtractedLayerInfo, 

-

26 KLayoutExtractionContext 

-

27) 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353_lvs_runner_py.html b/pycov/z_2a6b66cd9c831353_lvs_runner_py.html deleted file mode 100644 index 578e497f..00000000 --- a/pycov/z_2a6b66cd9c831353_lvs_runner_py.html +++ /dev/null @@ -1,194 +0,0 @@ - - - - - Coverage for kpex/klayout/lvs_runner.py: 31% - - - - - -
-
-

- Coverage for kpex/klayout/lvs_runner.py: - 31% -

- -

- 26 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import os 

-

27import subprocess 

-

28import time 

-

29 

-

30from kpex.log import ( 

-

31 debug, 

-

32 info, 

-

33 warning, 

-

34 error, 

-

35 subproc, 

-

36 rule 

-

37) 

-

38 

-

39 

-

40class LVSRunner: 

-

41 @staticmethod 

-

42 def run_klayout_lvs(exe_path: str, 

-

43 lvs_script: str, 

-

44 gds_path: str, 

-

45 schematic_path: str, 

-

46 log_path: str, 

-

47 lvsdb_path: str): 

-

48 args = [ 

-

49 exe_path, 

-

50 '-b', 

-

51 '-r', lvs_script, 

-

52 '-rd', f"input={os.path.abspath(gds_path)}", 

-

53 '-rd', f"report={os.path.abspath(lvsdb_path)}", 

-

54 '-rd', f"schematic={os.path.abspath(schematic_path)}", 

-

55 '-rd', 'thr=22', 

-

56 '-rd', 'run_mode=deep', 

-

57 '-rd', 'spice_net_names=true', 

-

58 '-rd', 'spice_comments=false', 

-

59 '-rd', 'scale=false', 

-

60 '-rd', 'verbose=true', 

-

61 '-rd', 'schematic_simplify=false', 

-

62 '-rd', 'net_only=false', 

-

63 '-rd', 'top_lvl_pins=true', 

-

64 '-rd', 'combine=false', 

-

65 '-rd', 'combine_devices=false', # IHP 

-

66 '-rd', 'purge=false', 

-

67 '-rd', 'purge_nets=false', 

-

68 '-rd', 'no_simplify=true', # IHP 

-

69 ] 

-

70 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

71 rule() 

-

72 start = time.time() 

-

73 

-

74 proc = subprocess.Popen(args, 

-

75 stdin=subprocess.DEVNULL, 

-

76 stdout=subprocess.PIPE, 

-

77 stderr=subprocess.STDOUT, 

-

78 universal_newlines=True, 

-

79 text=True) 

-

80 with open(log_path, 'w') as f: 

-

81 while True: 

-

82 line = proc.stdout.readline() 

-

83 if not line: 

-

84 break 

-

85 subproc(line[:-1]) # remove newline 

-

86 f.writelines([line]) 

-

87 proc.wait() 

-

88 

-

89 duration = time.time() - start 

-

90 

-

91 rule() 

-

92 

-

93 if proc.returncode == 0: 

-

94 info(f"klayout LVS succeeded after {'%.4g' % duration}s") 

-

95 else: 

-

96 warning(f"klayout LVS failed with status code {proc.returncode} after {'%.4g' % duration}s, " 

-

97 f"see log file: {log_path}") 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353_lvsdb_extractor_py.html b/pycov/z_2a6b66cd9c831353_lvsdb_extractor_py.html deleted file mode 100644 index 37ac1d49..00000000 --- a/pycov/z_2a6b66cd9c831353_lvsdb_extractor_py.html +++ /dev/null @@ -1,374 +0,0 @@ - - - - - Coverage for kpex/klayout/lvsdb_extractor.py: 67% - - - - - -
-
-

- Coverage for kpex/klayout/lvsdb_extractor.py: - 67% -

- -

- 132 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import tempfile 

-

27from typing import * 

-

28from dataclasses import dataclass 

-

29from rich.pretty import pprint 

-

30 

-

31import klayout.db as kdb 

-

32 

-

33import tech_pb2 

-

34from ..log import ( 

-

35 console, 

-

36 debug, 

-

37 info, 

-

38 warning, 

-

39 error, 

-

40 rule 

-

41) 

-

42 

-

43from ..tech_info import TechInfo 

-

44 

-

45 

-

46GDSPair = Tuple[int, int] 

-

47 

-

48 

-

49@dataclass 

-

50class KLayoutExtractedLayerInfo: 

-

51 index: int 

-

52 lvs_layer_name: str # NOTE: this can be computed, so gds_pair is preferred 

-

53 gds_pair: GDSPair 

-

54 region: kdb.Region 

-

55 

-

56 

-

57@dataclass 

-

58class KLayoutMergedExtractedLayerInfo: 

-

59 source_layers: List[KLayoutExtractedLayerInfo] 

-

60 gds_pair: GDSPair 

-

61 

-

62 

-

63@dataclass 

-

64class KLayoutExtractionContext: 

-

65 lvsdb: kdb.LayoutToNetlist 

-

66 dbu: float 

-

67 top_cell: kdb.Cell 

-

68 layer_map: Dict[int, kdb.LayerInfo] 

-

69 cell_mapping: kdb.CellMapping 

-

70 target_layout: kdb.Layout 

-

71 extracted_layers: Dict[GDSPair, KLayoutMergedExtractedLayerInfo] 

-

72 unnamed_layers: List[KLayoutExtractedLayerInfo] 

-

73 

-

74 @classmethod 

-

75 def prepare_extraction(cls, 

-

76 lvsdb: kdb.LayoutToNetlist, 

-

77 top_cell: str, 

-

78 tech: TechInfo, 

-

79 blackbox_devices: bool) -> KLayoutExtractionContext: 

-

80 dbu = lvsdb.internal_layout().dbu 

-

81 target_layout = kdb.Layout() 

-

82 target_layout.dbu = dbu 

-

83 top_cell = target_layout.create_cell(top_cell) 

-

84 

-

85 # CellMapping 

-

86 # mapping of internal layout to target layout for the circuit mapping 

-

87 # https://www.klayout.de/doc-qt5/code/class_CellMapping.html 

-

88 # --- 

-

89 # https://www.klayout.de/doc-qt5/code/class_LayoutToNetlist.html#method18 

-

90 # Creates a cell mapping for copying shapes from the internal layout to the given target layout 

-

91 cm = lvsdb.cell_mapping_into(target_layout, # target layout 

-

92 top_cell, 

-

93 not blackbox_devices) # with_device_cells 

-

94 

-

95 lm = cls.build_LVS_layer_map(target_layout=target_layout, 

-

96 lvsdb=lvsdb, 

-

97 tech=tech, 

-

98 blackbox_devices=blackbox_devices) 

-

99 

-

100 net_name_prop_num = 1 

-

101 

-

102 # Build a full hierarchical representation of the nets 

-

103 # https://www.klayout.de/doc-qt5/code/class_LayoutToNetlist.html#method14 

-

104 # hier_mode = None 

-

105 hier_mode = kdb.LayoutToNetlist.BuildNetHierarchyMode.BNH_Flatten 

-

106 # hier_mode = kdb.LayoutToNetlist.BuildNetHierarchyMode.BNH_SubcircuitCells 

-

107 

-

108 lvsdb.build_all_nets( 

-

109 cmap=cm, # mapping of internal layout to target layout for the circuit mapping 

-

110 target=target_layout, # target layout 

-

111 lmap=lm, # maps: target layer index => net regions 

-

112 hier_mode=hier_mode, # hier mode 

-

113 netname_prop=net_name_prop_num, # property name to which to attach the net name 

-

114 circuit_cell_name_prefix="CIRCUIT_", 

-

115 device_cell_name_prefix=None # "DEVICE_" 

-

116 ) 

-

117 

-

118 extracted_layers, unnamed_layers = cls.nonempty_extracted_layers(lvsdb=lvsdb, 

-

119 tech=tech, 

-

120 blackbox_devices=blackbox_devices) 

-

121 

-

122 rule('Non-empty layers in LVS database:') 

-

123 for gds_pair, layer_info in extracted_layers.items(): 

-

124 names = [l.lvs_layer_name for l in layer_info.source_layers] 

-

125 info(f"{gds_pair} -> ({' '.join(names)})") 

-

126 

-

127 return KLayoutExtractionContext( 

-

128 lvsdb=lvsdb, 

-

129 dbu=dbu, 

-

130 top_cell=top_cell, 

-

131 layer_map=lm, 

-

132 cell_mapping=cm, 

-

133 target_layout=target_layout, 

-

134 extracted_layers=extracted_layers, 

-

135 unnamed_layers=unnamed_layers 

-

136 ) 

-

137 

-

138 @staticmethod 

-

139 def build_LVS_layer_map(target_layout: kdb.Layout, 

-

140 lvsdb: kdb.LayoutToNetlist, 

-

141 tech: TechInfo, 

-

142 blackbox_devices: bool) -> Dict[int, kdb.LayerInfo]: 

-

143 # NOTE: currently, the layer numbers are auto-assigned 

-

144 # by the sequence they occur in the LVS script, hence not well defined! 

-

145 # build a layer map for the layers that correspond to original ones. 

-

146 

-

147 # https://www.klayout.de/doc-qt5/code/class_LayerInfo.html 

-

148 lm: Dict[int, kdb.LayerInfo] = {} 

-

149 

-

150 if not hasattr(lvsdb, "layer_indexes"): 

-

151 raise Exception("Needs at least KLayout version 0.29.2") 

-

152 

-

153 for layer_index in lvsdb.layer_indexes(): 

-

154 lname = lvsdb.layer_name(layer_index) 

-

155 

-

156 computed_layer_info = tech.computed_layer_info_by_name.get(lname, None) 

-

157 if computed_layer_info and blackbox_devices: 

-

158 match computed_layer_info.kind: 

-

159 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_RESISTOR: 

-

160 continue 

-

161 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_CAPACITOR: 

-

162 continue 

-

163 

-

164 gds_pair = tech.gds_pair_for_computed_layer_name.get(lname, None) 

-

165 if not gds_pair: 

-

166 li = lvsdb.internal_layout().get_info(layer_index) 

-

167 if li != kdb.LayerInfo(): 

-

168 gds_pair = (li.layer, li.datatype) 

-

169 

-

170 if gds_pair is not None: 

-

171 target_layer_index = target_layout.layer(*gds_pair) # Creates a new internal layer! 

-

172 region = lvsdb.layer_by_index(layer_index) 

-

173 lm[target_layer_index] = region 

-

174 

-

175 return lm 

-

176 

-

177 @staticmethod 

-

178 def nonempty_extracted_layers(lvsdb: kdb.LayoutToNetlist, 

-

179 tech: TechInfo, 

-

180 blackbox_devices: bool) -> Tuple[Dict[GDSPair, KLayoutMergedExtractedLayerInfo], List[KLayoutExtractedLayerInfo]]: 

-

181 # https://www.klayout.de/doc-qt5/code/class_LayoutToNetlist.html#method18 

-

182 nonempty_layers: Dict[GDSPair, KLayoutMergedExtractedLayerInfo] = {} 

-

183 

-

184 unnamed_layers: List[KLayoutExtractedLayerInfo] = [] 

-

185 

-

186 for idx, ln in enumerate(lvsdb.layer_names()): 

-

187 layer = lvsdb.layer_by_name(ln) 

-

188 if layer.count() >= 1: 

-

189 computed_layer_info = tech.computed_layer_info_by_name.get(ln, None) 

-

190 if not computed_layer_info: 

-

191 warning(f"Unable to find info about extracted LVS layer '{ln}'") 

-

192 gds_pair = (1000 + idx, 20) 

-

193 linfo = KLayoutExtractedLayerInfo( 

-

194 index=idx, 

-

195 lvs_layer_name=ln, 

-

196 gds_pair=gds_pair, 

-

197 region=layer 

-

198 ) 

-

199 unnamed_layers.append(linfo) 

-

200 continue 

-

201 

-

202 if blackbox_devices: 

-

203 match computed_layer_info.kind: 

-

204 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_RESISTOR: 

-

205 continue 

-

206 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_CAPACITOR: 

-

207 continue 

-

208 

-

209 gds_pair = (computed_layer_info.layer_info.gds_layer, computed_layer_info.layer_info.gds_datatype) 

-

210 

-

211 linfo = KLayoutExtractedLayerInfo( 

-

212 index=idx, 

-

213 lvs_layer_name=ln, 

-

214 gds_pair=gds_pair, 

-

215 region=layer 

-

216 ) 

-

217 

-

218 entry = nonempty_layers.get(gds_pair, None) 

-

219 if entry: 

-

220 entry.source_layers.append(linfo) 

-

221 else: 

-

222 nonempty_layers[gds_pair] = KLayoutMergedExtractedLayerInfo( 

-

223 source_layers=[linfo], 

-

224 gds_pair=gds_pair, 

-

225 ) 

-

226 

-

227 return nonempty_layers, unnamed_layers 

-

228 

-

229 def top_cell_bbox(self) -> kdb.Box: 

-

230 b1: kdb.Box = self.target_layout.top_cell().bbox() 

-

231 b2: kdb.Box = self.lvsdb.internal_layout().top_cell().bbox() 

-

232 if b1.area() > b2.area(): 

-

233 return b1 

-

234 else: 

-

235 return b2 

-

236 

-

237 def shapes_of_net(self, gds_pair: GDSPair, net: kdb.Net) -> Optional[kdb.Region]: 

-

238 lyr = self.extracted_layers.get(gds_pair, None) 

-

239 if not lyr: 

-

240 return None 

-

241 

-

242 shapes: kdb.Region 

-

243 

-

244 match len(lyr.source_layers): 

-

245 case 0: 

-

246 raise AssertionError('Internal error: Empty list of source_layers') 

-

247 case 1: 

-

248 shapes = self.lvsdb.shapes_of_net(net, lyr.source_layers[0].region, True) 

-

249 case _: 

-

250 shapes = kdb.Region() 

-

251 for sl in lyr.source_layers: 

-

252 shapes += self.lvsdb.shapes_of_net(net, sl.region, True) 

-

253 # shapes.merge() 

-

254 

-

255 return shapes 

-

256 

-

257 def shapes_of_layer(self, gds_pair: GDSPair) -> Optional[kdb.Region]: 

-

258 lyr = self.extracted_layers.get(gds_pair, None) 

-

259 if not lyr: 

-

260 return None 

-

261 

-

262 shapes: kdb.Region 

-

263 

-

264 match len(lyr.source_layers): 

-

265 case 0: 

-

266 raise AssertionError('Internal error: Empty list of source_layers') 

-

267 case 1: 

-

268 shapes = lyr.source_layers[0].region 

-

269 case _: 

-

270 shapes = kdb.Region() 

-

271 for sl in lyr.source_layers: 

-

272 shapes += sl.region 

-

273 # shapes.merge() 

-

274 

-

275 return shapes 

-

276 

-

277 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353_netlist_csv_py.html b/pycov/z_2a6b66cd9c831353_netlist_csv_py.html deleted file mode 100644 index a6bf9096..00000000 --- a/pycov/z_2a6b66cd9c831353_netlist_csv_py.html +++ /dev/null @@ -1,156 +0,0 @@ - - - - - Coverage for kpex/klayout/netlist_csv.py: 26% - - - - - -
-
-

- Coverage for kpex/klayout/netlist_csv.py: - 26% -

- -

- 23 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import klayout.db as kdb 

-

27 

-

28from kpex.log import ( 

-

29 info, 

-

30) 

-

31 

-

32 

-

33class NetlistCSVWriter: 

-

34 @staticmethod 

-

35 def write_csv(netlist: kdb.Netlist, 

-

36 top_cell_name: str, 

-

37 output_path: str): 

-

38 with open(output_path, 'w') as f: 

-

39 f.write('Device;Net1;Net2;Capacitance [F];Capacitance [fF]\n') 

-

40 

-

41 top_circuit: kdb.Circuit = netlist.circuit_by_name(top_cell_name) 

-

42 

-

43 # NOTE: only caps for now 

-

44 for d in top_circuit.each_device(): 

-

45 # https://www.klayout.de/doc-qt5/code/class_Device.html 

-

46 dc = d.device_class() 

-

47 if isinstance(dc, kdb.DeviceClassCapacitor): 

-

48 dn = d.expanded_name() or d.name 

-

49 if dc.name != 'PEX_CAP': 

-

50 info(f"Ignoring device {dn}") 

-

51 continue 

-

52 param_defs = dc.parameter_definitions() 

-

53 params = {p.name: d.parameter(p.id()) for p in param_defs} 

-

54 d: kdb.Device 

-

55 net1 = d.net_for_terminal('A') 

-

56 net2 = d.net_for_terminal('B') 

-

57 cap = params['C'] 

-

58 cap_femto = round(cap * 1e15, 2) 

-

59 f.write(f"{dn};{net1.name};{net2.name};{'%.12g' % cap};{cap_femto}f\n") 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353_netlist_expander_py.html b/pycov/z_2a6b66cd9c831353_netlist_expander_py.html deleted file mode 100644 index 4b4ce49e..00000000 --- a/pycov/z_2a6b66cd9c831353_netlist_expander_py.html +++ /dev/null @@ -1,245 +0,0 @@ - - - - - Coverage for kpex/klayout/netlist_expander.py: 97% - - - - - -
-
-

- Coverage for kpex/klayout/netlist_expander.py: - 97% -

- -

- 70 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import re 

-

27from typing import * 

-

28 

-

29import klayout.db as kdb 

-

30 

-

31from kpex.log import ( 

-

32 info, 

-

33 warning, 

-

34) 

-

35from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

36 

-

37 

-

38class NetlistExpander: 

-

39 @staticmethod 

-

40 def expand(extracted_netlist: kdb.Netlist, 

-

41 top_cell_name: str, 

-

42 cap_matrix: CapacitanceMatrix, 

-

43 blackbox_devices: bool) -> kdb.Netlist: 

-

44 expanded_netlist: kdb.Netlist = extracted_netlist.dup() 

-

45 top_circuit: kdb.Circuit = expanded_netlist.circuit_by_name(top_cell_name) 

-

46 

-

47 if not blackbox_devices: 

-

48 for d in top_circuit.each_device(): 

-

49 name = d.name or d.expanded_name() 

-

50 info(f"Removing whiteboxed device {name}") 

-

51 top_circuit.remove_device(d) 

-

52 

-

53 # create capacitor class 

-

54 cap = kdb.DeviceClassCapacitor() 

-

55 cap.name = 'PEX_CAP' 

-

56 cap.description = "Extracted by kpex/FasterCap PEX" 

-

57 expanded_netlist.add(cap) 

-

58 

-

59 fc_gnd_net = top_circuit.create_net('FC_GND') # create GROUND net 

-

60 vsubs_net = top_circuit.create_net("VSUBS") 

-

61 nets: List[kdb.Net] = [] 

-

62 

-

63 # build table: name -> net 

-

64 name2net: Dict[str, kdb.Net] = {n.expanded_name(): n for n in top_circuit.each_net()} 

-

65 

-

66 # find nets for the matrix axes 

-

67 pattern = re.compile(r'^g\d+_(.*)$') 

-

68 for idx, nn in enumerate(cap_matrix.conductor_names): 

-

69 m = pattern.match(nn) 

-

70 nn = m.group(1) 

-

71 if nn not in name2net: 

-

72 raise Exception(f"No net found with name {nn}, net names are: {list(name2net.keys())}") 

-

73 n = name2net[nn] 

-

74 nets.append(n) 

-

75 

-

76 cap_threshold = 0.0 

-

77 

-

78 def add_parasitic_cap(i: int, 

-

79 j: int, 

-

80 net1: kdb.Net, 

-

81 net2: kdb.Net, 

-

82 cap_value: float): 

-

83 if cap_value > cap_threshold: 

-

84 c: kdb.Device = top_circuit.create_device(cap, f"Cext_{i}_{j}") 

-

85 c.connect_terminal('A', net1) 

-

86 c.connect_terminal('B', net2) 

-

87 c.set_parameter('C', cap_value) 

-

88 if net1 == net2: 

-

89 raise Exception(f"Invalid attempt to create cap {c.name} between " 

-

90 f"same net {net1} with value {'%.12g' % cap_value}") 

-

91 else: 

-

92 warning(f"Ignoring capacitance matrix cell [{i},{j}], " 

-

93 f"{'%.12g' % cap_value} is below threshold {'%.12g' % cap_threshold}") 

-

94 

-

95 # ------------------------------------------------------------- 

-

96 # Example capacitance matrix: 

-

97 # [C11+C12+C13 -C12 -C13] 

-

98 # [-C21 C21+C22+C23 -C23] 

-

99 # [-C31 -C32 C31+C32+C33] 

-

100 # ------------------------------------------------------------- 

-

101 # 

-

102 # - Diagonal elements m[i][i] contain the capacitance over GND (Cii), 

-

103 # but in a sum including all the other values of the row 

-

104 # 

-

105 # https://www.fastfieldsolvers.com/Papers/The_Maxwell_Capacitance_Matrix_WP110301_R03.pdf 

-

106 # 

-

107 for i in range(0, cap_matrix.dimension): 

-

108 row = cap_matrix[i] 

-

109 cap_ii = row[i] 

-

110 for j in range(0, cap_matrix.dimension): 

-

111 if i == j: 

-

112 continue 

-

113 cap_value = -row[j] # off-diagonals are always stored as negative values 

-

114 cap_ii -= cap_value # subtract summands to filter out Cii 

-

115 if j > i: 

-

116 add_parasitic_cap(i=i, j=j, 

-

117 net1=nets[i], net2=nets[j], 

-

118 cap_value=cap_value) 

-

119 if i > 0: 

-

120 add_parasitic_cap(i=i, j=i, 

-

121 net1=nets[i], net2=nets[0], 

-

122 cap_value=cap_ii) 

-

123 

-

124 # Short VSUBS and FC_GND together 

-

125 # VSUBS ... substrate block 

-

126 # FC_GND ... FasterCap's GND, i.e. the diagonal Cii elements 

-

127 # create capacitor class 

-

128 

-

129 res = kdb.DeviceClassResistor() 

-

130 res.name = 'PEX_RES' 

-

131 res.description = "Extracted by kpex/FasterCap PEX" 

-

132 expanded_netlist.add(res) 

-

133 

-

134 gnd_net = name2net.get('GND', None) 

-

135 if not gnd_net: 

-

136 gnd_net = top_circuit.create_net('GND') # create GROUND net 

-

137 

-

138 c: kdb.Device = top_circuit.create_device(res, f"Rext_FC_GND_GND") 

-

139 c.connect_terminal('A', fc_gnd_net) 

-

140 c.connect_terminal('B', gnd_net) 

-

141 c.set_parameter('R', 0) 

-

142 

-

143 c: kdb.Device = top_circuit.create_device(res, f"Rext_VSUBS_GND") 

-

144 c.connect_terminal('A', vsubs_net) 

-

145 c.connect_terminal('B', gnd_net) 

-

146 c.set_parameter('R', 0) 

-

147 

-

148 return expanded_netlist 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353_netlist_reducer_py.html b/pycov/z_2a6b66cd9c831353_netlist_reducer_py.html deleted file mode 100644 index 108a7972..00000000 --- a/pycov/z_2a6b66cd9c831353_netlist_reducer_py.html +++ /dev/null @@ -1,160 +0,0 @@ - - - - - Coverage for kpex/klayout/netlist_reducer.py: 95% - - - - - -
-
-

- Coverage for kpex/klayout/netlist_reducer.py: - 95% -

- -

- 22 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from typing import * 

-

25 

-

26import klayout.db as kdb 

-

27 

-

28from ..log import ( 

-

29 info, 

-

30) 

-

31 

-

32 

-

33class NetlistReducer: 

-

34 @staticmethod 

-

35 def reduce(netlist: kdb.Netlist, 

-

36 top_cell_name: str, 

-

37 cap_threshold: float = 0.05e-15) -> kdb.Netlist: 

-

38 reduced_netlist: kdb.Netlist = netlist.dup() 

-

39 reduced_netlist.combine_devices() # merge C/R 

-

40 

-

41 top_circuit: kdb.Circuit = reduced_netlist.circuit_by_name(top_cell_name) 

-

42 

-

43 devices_to_remove: List[kdb.Device] = [] 

-

44 

-

45 for d in top_circuit.each_device(): 

-

46 d: kdb.Device 

-

47 dc = d.device_class() 

-

48 if isinstance(dc, kdb.DeviceClassCapacitor): 

-

49 # net_a = d.net_for_terminal('A') 

-

50 # net_b = d.net_for_terminal('B') 

-

51 c_value = d.parameter('C') 

-

52 if c_value < cap_threshold: 

-

53 devices_to_remove.append(d) 

-

54 

-

55 elif isinstance(dc, kdb.DeviceClassResistor): 

-

56 # TODO 

-

57 pass 

-

58 

-

59 for d in devices_to_remove: 

-

60 info(f"Removed device {d.name} {d.parameter('C')}") 

-

61 top_circuit.remove_device(d) 

-

62 

-

63 return reduced_netlist 

-
- - - diff --git a/pycov/z_2a6b66cd9c831353_repair_rdb_py.html b/pycov/z_2a6b66cd9c831353_repair_rdb_py.html deleted file mode 100644 index 086dfd08..00000000 --- a/pycov/z_2a6b66cd9c831353_repair_rdb_py.html +++ /dev/null @@ -1,234 +0,0 @@ - - - - - Coverage for kpex/klayout/repair_rdb.py: 16% - - - - - -
-
-

- Coverage for kpex/klayout/repair_rdb.py: - 16% -

- -

- 79 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import gzip 

-

25import io 

-

26import os.path 

-

27import shutil 

-

28import sys 

-

29from typing import * 

-

30import xml.etree.ElementTree as ET 

-

31 

-

32import klayout.rdb as rdb 

-

33from kpex.log import ( 

-

34 LogLevel, 

-

35 set_log_level, 

-

36 register_additional_handler, 

-

37 deregister_additional_handler, 

-

38 # console, 

-

39 # debug, 

-

40 info, 

-

41 warning, 

-

42 subproc, 

-

43 error, 

-

44 rule 

-

45) 

-

46 

-

47 

-

48def parse_category_path(category_path: str) -> List[str]: 

-

49 within_escaped = False 

-

50 within_backslash = False 

-

51 current_word = '' 

-

52 path_list = [] 

-

53 for c in category_path: 

-

54 match c: 

-

55 case '.': 

-

56 if within_backslash: 

-

57 current_word += c 

-

58 within_backslash = False 

-

59 elif within_escaped: 

-

60 current_word += c 

-

61 else: 

-

62 path_list.append(current_word) 

-

63 current_word = '' 

-

64 case '\\': 

-

65 if within_backslash: 

-

66 current_word += c 

-

67 within_backslash = False 

-

68 else: 

-

69 within_backslash = True 

-

70 case '\'': 

-

71 if within_backslash: 

-

72 current_word += c 

-

73 within_backslash = False 

-

74 else: 

-

75 within_escaped = not within_escaped 

-

76 case _: 

-

77 current_word += c 

-

78 if len(current_word) >= 1: 

-

79 path_list.append(current_word) 

-

80 return path_list 

-

81 

-

82def repair_rdb_xml(xml_file: io.IOBase, new_xml_path: str): 

-

83 et = ET.parse(xml_file) 

-

84 root = et.getroot() 

-

85 

-

86 categories: Set[str] = set( 

-

87 [e.text for e in root.findall('./items/item/category')] 

-

88 ) 

-

89 category_paths = [parse_category_path(c) for c in categories] 

-

90 category_paths.sort() 

-

91 # print(category_paths) 

-

92 for p in category_paths: 

-

93 elem = root 

-

94 for c in p: 

-

95 elemcats = elem.find("./categories") 

-

96 subelem = elemcats.find("./category/name[.='{0}']/..".format(c)) 

-

97 if subelem is None: 

-

98 warning(f"In category path {p}, can't find element for component {c}") 

-

99 new_category = ET.SubElement(elemcats, "category") 

-

100 new_cname = ET.SubElement(new_category, "name") 

-

101 new_cname.text = c 

-

102 ET.SubElement(new_category, 'description') 

-

103 ET.SubElement(new_category, 'categories') 

-

104 elem = new_category 

-

105 else: 

-

106 elem = subelem 

-

107 

-

108 et.write(new_xml_path) 

-

109 

-

110 

-

111def repair_rdb(rdb_path: str): 

-

112 rdb_file: io.IOBase 

-

113 suffix = os.path.splitext(rdb_path)[-1] 

-

114 new_xml_path = rdb_path + '.repair.xml' 

-

115 

-

116 if suffix == '.gz': 

-

117 with gzip.open(rdb_path, 'r') as f: 

-

118 repair_rdb_xml(f, new_xml_path) 

-

119 else: 

-

120 with open(rdb_path, 'r') as f: 

-

121 repair_rdb_xml(f, new_xml_path) 

-

122 

-

123 report = rdb.ReportDatabase('') 

-

124 try: 

-

125 report.load(new_xml_path) 

-

126 info(f"Succeeded in repairing broken marker database {rdb_path} under {new_xml_path}") 

-

127 

-

128 except Exception as e: 

-

129 error(f"Failed to repair broken marker database {rdb_path} due to exception: {e}") 

-

130 

-

131 

-

132if __name__ == "__main__": 

-

133 if len(sys.argv) < 2: 

-

134 print(f"Usage: {sys.argv[0]} file.rdb.gz") 

-

135 sys.exit(1) 

-

136 

-

137 repair_rdb(sys.argv[1]) 

-
- - - diff --git a/pycov/z_2a8ddab25760f5a7_fastcap_runner_test_py.html b/pycov/z_2a8ddab25760f5a7_fastcap_runner_test_py.html deleted file mode 100644 index bc2cac0a..00000000 --- a/pycov/z_2a8ddab25760f5a7_fastcap_runner_test_py.html +++ /dev/null @@ -1,150 +0,0 @@ - - - - - Coverage for tests\fastcap\fastcap_runner_test.py: 100% - - - - - -
-
-

- Coverage for tests\fastcap\fastcap_runner_test.py: - 100% -

- -

- 22 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import unittest 

-

27 

-

28from kpex.fastcap.fastcap_runner import fastcap_parse_capacitance_matrix 

-

29 

-

30 

-

31@allure.parent_suite("Unit Tests") 

-

32@allure.tag("Capacitance", "FastCap") 

-

33class Test(unittest.TestCase): 

-

34 @property 

-

35 def fastcap_testdata_dir(self) -> str: 

-

36 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 'testdata', 'fastcap')) 

-

37 

-

38 def test_fastcap_parse_capacitance_matrix(self): 

-

39 testdata_path = os.path.join(self.fastcap_testdata_dir, 'cap_mim_m3_w18p9_l5p1__REDUX122_FastCap_Output.txt') 

-

40 obtained_matrix = fastcap_parse_capacitance_matrix(log_path=testdata_path) 

-

41 self.assertEqual(4, len(obtained_matrix.rows)) 

-

42 self.assertEqual(4, len(obtained_matrix.rows[0])) 

-

43 self.assertEqual(4, len(obtained_matrix.rows[1])) 

-

44 self.assertEqual(4, len(obtained_matrix.rows[2])) 

-

45 self.assertEqual(4, len(obtained_matrix.rows[3])) 

-

46 self.assertEqual( 

-

47 ['$1%GROUP2', '$1%GROUP2', '$2%GROUP3', '$2%GROUP3'], 

-

48 obtained_matrix.conductor_names 

-

49 ) 

-

50 

-

51 output_path = os.path.join(self.fastcap_testdata_dir, 'cap_mim_m3_w18p9_l5p1__REDUX122_FastCap_Result_Matrix.csv') 

-

52 obtained_matrix.write_csv(output_path=output_path, separator=';') 

-

53 allure.attach.file(output_path, attachment_type=allure.attachment_type.CSV) 

-
- - - diff --git a/pycov/z_2dc81a3a091b1002_rcx25_test_py.html b/pycov/z_2dc81a3a091b1002_rcx25_test_py.html deleted file mode 100644 index 6b627997..00000000 --- a/pycov/z_2dc81a3a091b1002_rcx25_test_py.html +++ /dev/null @@ -1,409 +0,0 @@ - - - - - Coverage for tests/rcx25/rcx25_test.py: 82% - - - - - -
-
-

- Coverage for tests/rcx25/rcx25_test.py: - 82% -

- -

- 95 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import io 

-

27import json 

-

28import tempfile 

-

29 

-

30import allure 

-

31import csv_diff 

-

32import os 

-

33from typing import * 

-

34import unittest 

-

35 

-

36import klayout.db as kdb 

-

37import klayout.lay as klay 

-

38 

-

39from kpex.kpex_cli import KpexCLI 

-

40from kpex.rcx25.extraction_results import CellExtractionResults 

-

41 

-

42 

-

43CSVPath = str 

-

44PNGPath = str 

-

45parent_suite = "kpex/2.5D Extraction Tests" 

-

46tags = ("PEX", "2.5D", "MAGIC") 

-

47 

-

48 

-

49def _kpex_pdk_dir() -> str: 

-

50 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

51 'pdk', 'sky130A', 'libs.tech', 'kpex')) 

-

52 

-

53 

-

54def _sky130a_testdata_dir() -> str: 

-

55 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

56 'testdata', 'designs', 'sky130A')) 

-

57 

-

58 

-

59def _gds(*path_components) -> str: 

-

60 return os.path.join(_sky130a_testdata_dir(), *path_components) 

-

61 

-

62 

-

63def _save_layout_preview(gds_path: str, 

-

64 output_png_path: str): 

-

65 kdb.Technology.clear_technologies() 

-

66 default_lyt_path = os.path.abspath(f"{_kpex_pdk_dir()}/sky130A.lyt") 

-

67 tech = kdb.Technology.create_technology('sky130A') 

-

68 tech.load(default_lyt_path) 

-

69 

-

70 lv = klay.LayoutView() 

-

71 lv.load_layout(gds_path) 

-

72 lv.max_hier() 

-

73 lv.set_config('background-color', '#000000') 

-

74 lv.set_config('bitmap-oversampling', '1') 

-

75 lv.set_config('default-font-size', '4') 

-

76 lv.set_config('default-text-size', '0.1') 

-

77 lv.save_image_with_options( 

-

78 output_png_path, 

-

79 width=4096, height=2160 

-

80 # , 

-

81 # linewidth=2, 

-

82 # resolution=0.25 # 4x as large fonts 

-

83 ) 

-

84 

-

85def _run_rcx25d_single_cell(*path_components) -> Tuple[CellExtractionResults, CSVPath, PNGPath]: 

-

86 gds_path = _gds(*path_components) 

-

87 

-

88 preview_png_path = tempfile.mktemp(prefix=f"layout_preview_", suffix=".png") 

-

89 _save_layout_preview(gds_path, preview_png_path) 

-

90 tech_json_path = os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

91 'build', 'sky130A_tech.pb.json')) 

-

92 output_dir_path = os.path.realpath(os.path.join(__file__, '..', '..', '..', 'output_sky130A')) 

-

93 cli = KpexCLI() 

-

94 cli.main(['main', 

-

95 '--tech', tech_json_path, 

-

96 '--gds', gds_path, 

-

97 '--out_dir', output_dir_path, 

-

98 '--2.5D', 'y']) 

-

99 assert cli.rcx25_extraction_results is not None 

-

100 assert len(cli.rcx25_extraction_results.cell_extraction_results) == 1 # assume single cell test 

-

101 results = list(cli.rcx25_extraction_results.cell_extraction_results.values())[0] 

-

102 assert results.cell_name == path_components[-1][:-len('.gds.gz')] 

-

103 return results, cli.rcx25_extracted_csv_path, preview_png_path 

-

104 

-

105 

-

106def assert_expected_matches_obtained(*path_components, 

-

107 expected_csv_content: str) -> CellExtractionResults: 

-

108 result, csv, preview_png = _run_rcx25d_single_cell(*path_components) 

-

109 allure.attach.file(csv, name='pex_obtained.csv', attachment_type=allure.attachment_type.CSV) 

-

110 allure.attach.file(preview_png, name='📸 layout_preview.png', attachment_type=allure.attachment_type.PNG) 

-

111 expected_csv = csv_diff.load_csv(io.StringIO(expected_csv_content), key='Device') 

-

112 with open(csv, 'r') as f: 

-

113 obtained_csv = csv_diff.load_csv(f, key='Device') 

-

114 diff = csv_diff.compare(expected_csv, obtained_csv, show_unchanged=False) 

-

115 human_diff = csv_diff.human_text( 

-

116 diff, current=obtained_csv, extras=(('Net1','{Net1}'),('Net2','{Net2}')) 

-

117 ) 

-

118 allure.attach(expected_csv_content, name='pex_expected.csv', attachment_type=allure.attachment_type.CSV) 

-

119 allure.attach(json.dumps(diff, sort_keys=True, indent=' ').encode("utf8"), 

-

120 name='pex_diff.json', attachment_type=allure.attachment_type.JSON) 

-

121 allure.attach(human_diff.encode("utf8"), name='‼️ pex_diff.txt', attachment_type=allure.attachment_type.TEXT) 

-

122 # assert diff['added'] == [] 

-

123 # assert diff['removed'] == [] 

-

124 # assert diff['changed'] == [] 

-

125 # assert diff['columns_added'] == [] 

-

126 # assert diff['columns_removed'] == [] 

-

127 assert human_diff == '', 'Diff detected' 

-

128 return result 

-

129 

-

130@allure.parent_suite(parent_suite) 

-

131@allure.tag(*tags) 

-

132def test_single_plate_100um_x_100um_li1_over_substrate(): 

-

133 # MAGIC GIVES (8.3 revision 485): 

-

134 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

135 # C0 PLATE VSUBS 0.38618p 

-

136 assert_expected_matches_obtained( 

-

137 'test_patterns', 'single_plate_100um_x_100um_li1_over_substrate.gds.gz', 

-

138 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

139C1;PLATE;VSUBS;386.18""" 

-

140 ) 

-

141 

-

142 

-

143@allure.parent_suite(parent_suite) 

-

144@allure.tag(*tags) 

-

145def test_overlap_plates_100um_x_100um_li1_m1(): 

-

146 # MAGIC GIVES (8.3 revision 485): 

-

147 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

148 # C2 LOWER VSUBS 0.38618p 

-

149 # C0 UPPER LOWER 0.294756p 

-

150 # C1 UPPER VSUBS 0.205833p 

-

151 #_______________________________ NOTE: with halo=50µm __________________________________ 

-

152 # C2 LOWER VSUBS 0.38618p 

-

153 # C0 LOWER UPPER 0.294867p 

-

154 # C1 UPPER VSUBS 0.205621p 

-

155 # NOTE: magic with --magic_halo=50 (µm) gives UPPER-VSUBS of 0.205621p 

-

156 # which is due to the handling of https://github.com/martinjankoehler/magic/issues/1 

-

157 assert_expected_matches_obtained( 

-

158 'test_patterns', 'overlap_plates_100um_x_100um_li1_m1.gds.gz', 

-

159 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

160C1;LOWER;VSUBS;386.18 

-

161C2;LOWER;UPPER;294.592 

-

162C3;UPPER;VSUBS;205.52""" 

-

163 ) 

-

164 

-

165@allure.parent_suite(parent_suite) 

-

166@allure.tag(*tags) 

-

167def test_overlap_plates_100um_x_100um_li1_m1_m2_m3(): 

-

168 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

169 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

170 # C7 li1 VSUBS 0.38618p 

-

171 # C3 li1 met1 0.294756p # DIFFERS a bit !!! TODO 

-

172 # C6 met1 VSUBS 0.205833p # DIFFERS a bit !!! TODO 

-

173 # C0 met1 met2 0.680652p # DIFFERS a bit !!! TODO 

-

174 # C2 li1 met2 99.3128f # DIFFERS a bit !!! TODO 

-

175 # C5 met2 VSUBS 52.151802f 

-

176 # C4 met3 VSUBS 0.136643p 

-

177 # C1 li1 met3 5.59194f 

-

178 #_______________________________ NOTE: with halo=50µm __________________________________ 

-

179 # C9 li1 VSUBS 0.38618p 

-

180 # C5 li1 met1 0.294867p # DIFFERS a bit !!! TODO 

-

181 # C8 met1 VSUBS 0.205621p # DIFFERS, but that's a MAGIC issue (see test_overlap_plates_100um_x_100um_li1_m1) 

-

182 # C2 met1 met2 0.680769p 

-

183 # C4 li1 met2 99.518005f # DIFFERS a bit !!! TODO 

-

184 # C7 met2 VSUBS 51.5767f # DIFFERS a bit !!! TODO 

-

185 # C3 li1 met3 6.01281f # DIFFERS !!! TODO 

-

186 # C0 met2 met3 0.0422f # we don't have that?! !!! TODO 

-

187 # C6 met3 VSUBS 0.136103p # DIFFERS a bit !!! TODO 

-

188 # C1 met1 met3 0.012287f # NOTE: we don't have that, due to halo=8µm 

-

189 

-

190 assert_expected_matches_obtained( 

-

191 'test_patterns', 'overlap_plates_100um_x_100um_li1_m1_m2_m3.gds.gz', 

-

192 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

193C1;VSUBS;li1;386.18 

-

194C2;li1;met1;294.592 

-

195C3;VSUBS;met1;205.52 

-

196C4;met1;met2;680.482 

-

197C5;li1;met2;99.015 

-

198C6;VSUBS;met2;51.302 

-

199C7;VSUBS;met3;135.996 

-

200C8;li1;met3;5.031""" 

-

201 ) 

-

202 

-

203 

-

204@allure.parent_suite(parent_suite) 

-

205@allure.tag(*tags) 

-

206def test_sidewall_100um_x_100um_distance_200nm_li1(): 

-

207 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

208 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

209 # C2 C VSUBS 8.231f 

-

210 # C4 A VSUBS 8.231f 

-

211 # C3 B VSUBS 4.54159f 

-

212 # C0 B C 7.5f 

-

213 # C1 A B 7.5f 

-

214 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

215 # (same!) 

-

216 

-

217 assert_expected_matches_obtained( 

-

218 'test_patterns', 'sidewall_100um_x_100um_distance_200nm_li1.gds.gz', 

-

219 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

220C1;C;VSUBS;11.92 # TODO: magic=8.231f  

-

221C2;A;VSUBS;11.92 # TODO: magic=8.231f 

-

222C3;B;VSUBS;11.92 # TODO: magic=4.452f 

-

223C4;B;C;7.5 

-

224C5;A;B;7.5""" 

-

225 ) 

-

226 

-

227 

-

228@allure.parent_suite(parent_suite) 

-

229@allure.tag(*tags) 

-

230def test_sidewall_net_uturn_l1_redux(): 

-

231 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

232 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

233 # C2 C0 VSUBS 38.1255f 

-

234 # C1 C1 VSUBS 12.5876f 

-

235 # C0 C0 C1 1.87386f 

-

236 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

237 # (same!) 

-

238 

-

239 assert_expected_matches_obtained( 

-

240 'test_patterns', 'sidewall_net_uturn_l1_redux.gds.gz', 

-

241 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

242C1;C0;VSUBS;40.642 

-

243C2;C1;VSUBS;15.079 

-

244C3;C0;C1;0.019 TODO, MAGIC=1.87386 fF""" 

-

245 ) 

-

246 

-

247 

-

248@allure.parent_suite(parent_suite) 

-

249@allure.tag(*tags) 

-

250def test_sidewall_cap_vpp_04p4x04p6_l1_redux(): 

-

251 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

252 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

253 # C2 C0 VSUBS 0.300359f 

-

254 # C1 C1 VSUBS 0.086832f 

-

255 # C0 C0 C1 0.286226f 

-

256 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

257 # (same!) 

-

258 

-

259 assert_expected_matches_obtained( 

-

260 'test_patterns', 'sidewall_cap_vpp_04p4x04p6_l1_redux.gds.gz', 

-

261 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

262C1;C0;VSUBS;0.447 TODO 

-

263C2;C1;VSUBS;0.223 TODO 

-

264C3;C0;C1;0.145 TODO""" 

-

265 ) 

-

266 

-

267 

-

268@allure.parent_suite(parent_suite) 

-

269@allure.tag(*tags) 

-

270def test_near_body_shield_li1_m1(): 

-

271 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

272 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

273 # C5 BOTTOM VSUBS 0.405082p 

-

274 # C1 BOTTOM TOPB 0.215823p # DIFFERS marginally <0,1fF 

-

275 # C2 BOTTOM TOPA 0.215823p # DIFFERS marginally <0,1fF 

-

276 # C0 TOPA TOPB 0.502857f 

-

277 # C3 TOPB VSUBS 0.737292f # DIFFERS, but that's a MAGIC issue (see test_overlap_plates_100um_x_100um_li1_m1) 

-

278 # C4 TOPA VSUBS 0.737292f # DIFFERS, but that's a MAGIC issue (see test_overlap_plates_100um_x_100um_li1_m1) 

-

279 #_______________________________ NOTE: with halo=50µm __________________________________ 

-

280 # NOTE: with halo=50µm, C3/C4 becomes 0.29976f 

-

281 # see https://github.com/martinjankoehler/magic/issues/2 

-

282 

-

283 assert_expected_matches_obtained( 

-

284 'test_patterns', 'near_body_shield_li1_m1.gds.gz', 

-

285 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

286C1;BOTTOM;VSUBS;405.082 

-

287C2;BOTTOM;TOPA;215.898 

-

288C3;BOTTOM;TOPB;215.898 

-

289C4;TOPA;TOPB;0.503""" 

-

290 ) 

-

291 

-

292 

-

293@allure.parent_suite(parent_suite) 

-

294@allure.tag(*tags) 

-

295def test_sideoverlap_simple_plates_li1_m1(): 

-

296 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

297 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

298 # C2 li1 VSUBS 7.931799f 

-

299 # C1 met1 VSUBS 0.248901p 

-

300 # C0 li1 met1 0.143335f 

-

301 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

302 # C2 li1 VSUBS 7.931799f 

-

303 # C1 met1 VSUBS 0.248901p 

-

304 # C0 li1 met1 0.156859f 

-

305 

-

306 assert_expected_matches_obtained( 

-

307 'test_patterns', 'sideoverlap_simple_plates_li1_m1.gds.gz', 

-

308 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

309C1;VSUBS;li1;7.932 

-

310C2;VSUBS;met1;249.059 

-

311C3;li1;met1;0.125 TODO""" 

-

312 ) 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46___init___py.html b/pycov/z_2ea764e3f741ac46___init___py.html deleted file mode 100644 index c25bfa7d..00000000 --- a/pycov/z_2ea764e3f741ac46___init___py.html +++ /dev/null @@ -1,124 +0,0 @@ - - - - - Coverage for kpex\klayout\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\klayout\__init__.py: - 100% -

- -

- 1 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from .lvsdb_extractor import ( 

-

25 KLayoutExtractedLayerInfo, 

-

26 KLayoutExtractionContext 

-

27) 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46_lvs_runner_py.html b/pycov/z_2ea764e3f741ac46_lvs_runner_py.html deleted file mode 100644 index b62dce6b..00000000 --- a/pycov/z_2ea764e3f741ac46_lvs_runner_py.html +++ /dev/null @@ -1,194 +0,0 @@ - - - - - Coverage for kpex\klayout\lvs_runner.py: 31% - - - - - -
-
-

- Coverage for kpex\klayout\lvs_runner.py: - 31% -

- -

- 26 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import os 

-

27import subprocess 

-

28import time 

-

29 

-

30from kpex.log import ( 

-

31 debug, 

-

32 info, 

-

33 warning, 

-

34 error, 

-

35 subproc, 

-

36 rule 

-

37) 

-

38 

-

39 

-

40class LVSRunner: 

-

41 @staticmethod 

-

42 def run_klayout_lvs(exe_path: str, 

-

43 lvs_script: str, 

-

44 gds_path: str, 

-

45 schematic_path: str, 

-

46 log_path: str, 

-

47 lvsdb_path: str): 

-

48 args = [ 

-

49 exe_path, 

-

50 '-b', 

-

51 '-r', lvs_script, 

-

52 '-rd', f"input={os.path.abspath(gds_path)}", 

-

53 '-rd', f"report={os.path.abspath(lvsdb_path)}", 

-

54 '-rd', f"schematic={os.path.abspath(schematic_path)}", 

-

55 '-rd', 'thr=22', 

-

56 '-rd', 'run_mode=deep', 

-

57 '-rd', 'spice_net_names=true', 

-

58 '-rd', 'spice_comments=false', 

-

59 '-rd', 'scale=false', 

-

60 '-rd', 'verbose=true', 

-

61 '-rd', 'schematic_simplify=false', 

-

62 '-rd', 'net_only=false', 

-

63 '-rd', 'top_lvl_pins=true', 

-

64 '-rd', 'combine=false', 

-

65 '-rd', 'combine_devices=false', # IHP 

-

66 '-rd', 'purge=false', 

-

67 '-rd', 'purge_nets=false', 

-

68 '-rd', 'no_simplify=true', # IHP 

-

69 ] 

-

70 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

71 rule() 

-

72 start = time.time() 

-

73 

-

74 proc = subprocess.Popen(args, 

-

75 stdin=subprocess.DEVNULL, 

-

76 stdout=subprocess.PIPE, 

-

77 stderr=subprocess.STDOUT, 

-

78 universal_newlines=True, 

-

79 text=True) 

-

80 with open(log_path, 'w') as f: 

-

81 while True: 

-

82 line = proc.stdout.readline() 

-

83 if not line: 

-

84 break 

-

85 subproc(line[:-1]) # remove newline 

-

86 f.writelines([line]) 

-

87 proc.wait() 

-

88 

-

89 duration = time.time() - start 

-

90 

-

91 rule() 

-

92 

-

93 if proc.returncode == 0: 

-

94 info(f"klayout LVS succeeded after {'%.4g' % duration}s") 

-

95 else: 

-

96 warning(f"klayout LVS failed with status code {proc.returncode} after {'%.4g' % duration}s, " 

-

97 f"see log file: {log_path}") 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46_lvsdb_extractor_py.html b/pycov/z_2ea764e3f741ac46_lvsdb_extractor_py.html deleted file mode 100644 index f6401851..00000000 --- a/pycov/z_2ea764e3f741ac46_lvsdb_extractor_py.html +++ /dev/null @@ -1,374 +0,0 @@ - - - - - Coverage for kpex\klayout\lvsdb_extractor.py: 67% - - - - - -
-
-

- Coverage for kpex\klayout\lvsdb_extractor.py: - 67% -

- -

- 132 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import tempfile 

-

27from typing import * 

-

28from dataclasses import dataclass 

-

29from rich.pretty import pprint 

-

30 

-

31import klayout.db as kdb 

-

32 

-

33import tech_pb2 

-

34from ..log import ( 

-

35 console, 

-

36 debug, 

-

37 info, 

-

38 warning, 

-

39 error, 

-

40 rule 

-

41) 

-

42 

-

43from ..tech_info import TechInfo 

-

44 

-

45 

-

46GDSPair = Tuple[int, int] 

-

47 

-

48 

-

49@dataclass 

-

50class KLayoutExtractedLayerInfo: 

-

51 index: int 

-

52 lvs_layer_name: str # NOTE: this can be computed, so gds_pair is preferred 

-

53 gds_pair: GDSPair 

-

54 region: kdb.Region 

-

55 

-

56 

-

57@dataclass 

-

58class KLayoutMergedExtractedLayerInfo: 

-

59 source_layers: List[KLayoutExtractedLayerInfo] 

-

60 gds_pair: GDSPair 

-

61 

-

62 

-

63@dataclass 

-

64class KLayoutExtractionContext: 

-

65 lvsdb: kdb.LayoutToNetlist 

-

66 dbu: float 

-

67 top_cell: kdb.Cell 

-

68 layer_map: Dict[int, kdb.LayerInfo] 

-

69 cell_mapping: kdb.CellMapping 

-

70 target_layout: kdb.Layout 

-

71 extracted_layers: Dict[GDSPair, KLayoutMergedExtractedLayerInfo] 

-

72 unnamed_layers: List[KLayoutExtractedLayerInfo] 

-

73 

-

74 @classmethod 

-

75 def prepare_extraction(cls, 

-

76 lvsdb: kdb.LayoutToNetlist, 

-

77 top_cell: str, 

-

78 tech: TechInfo, 

-

79 blackbox_devices: bool) -> KLayoutExtractionContext: 

-

80 dbu = lvsdb.internal_layout().dbu 

-

81 target_layout = kdb.Layout() 

-

82 target_layout.dbu = dbu 

-

83 top_cell = target_layout.create_cell(top_cell) 

-

84 

-

85 # CellMapping 

-

86 # mapping of internal layout to target layout for the circuit mapping 

-

87 # https://www.klayout.de/doc-qt5/code/class_CellMapping.html 

-

88 # --- 

-

89 # https://www.klayout.de/doc-qt5/code/class_LayoutToNetlist.html#method18 

-

90 # Creates a cell mapping for copying shapes from the internal layout to the given target layout 

-

91 cm = lvsdb.cell_mapping_into(target_layout, # target layout 

-

92 top_cell, 

-

93 not blackbox_devices) # with_device_cells 

-

94 

-

95 lm = cls.build_LVS_layer_map(target_layout=target_layout, 

-

96 lvsdb=lvsdb, 

-

97 tech=tech, 

-

98 blackbox_devices=blackbox_devices) 

-

99 

-

100 net_name_prop_num = 1 

-

101 

-

102 # Build a full hierarchical representation of the nets 

-

103 # https://www.klayout.de/doc-qt5/code/class_LayoutToNetlist.html#method14 

-

104 # hier_mode = None 

-

105 hier_mode = kdb.LayoutToNetlist.BuildNetHierarchyMode.BNH_Flatten 

-

106 # hier_mode = kdb.LayoutToNetlist.BuildNetHierarchyMode.BNH_SubcircuitCells 

-

107 

-

108 lvsdb.build_all_nets( 

-

109 cmap=cm, # mapping of internal layout to target layout for the circuit mapping 

-

110 target=target_layout, # target layout 

-

111 lmap=lm, # maps: target layer index => net regions 

-

112 hier_mode=hier_mode, # hier mode 

-

113 netname_prop=net_name_prop_num, # property name to which to attach the net name 

-

114 circuit_cell_name_prefix="CIRCUIT_", 

-

115 device_cell_name_prefix=None # "DEVICE_" 

-

116 ) 

-

117 

-

118 extracted_layers, unnamed_layers = cls.nonempty_extracted_layers(lvsdb=lvsdb, 

-

119 tech=tech, 

-

120 blackbox_devices=blackbox_devices) 

-

121 

-

122 rule('Non-empty layers in LVS database:') 

-

123 for gds_pair, layer_info in extracted_layers.items(): 

-

124 names = [l.lvs_layer_name for l in layer_info.source_layers] 

-

125 info(f"{gds_pair} -> ({' '.join(names)})") 

-

126 

-

127 return KLayoutExtractionContext( 

-

128 lvsdb=lvsdb, 

-

129 dbu=dbu, 

-

130 top_cell=top_cell, 

-

131 layer_map=lm, 

-

132 cell_mapping=cm, 

-

133 target_layout=target_layout, 

-

134 extracted_layers=extracted_layers, 

-

135 unnamed_layers=unnamed_layers 

-

136 ) 

-

137 

-

138 @staticmethod 

-

139 def build_LVS_layer_map(target_layout: kdb.Layout, 

-

140 lvsdb: kdb.LayoutToNetlist, 

-

141 tech: TechInfo, 

-

142 blackbox_devices: bool) -> Dict[int, kdb.LayerInfo]: 

-

143 # NOTE: currently, the layer numbers are auto-assigned 

-

144 # by the sequence they occur in the LVS script, hence not well defined! 

-

145 # build a layer map for the layers that correspond to original ones. 

-

146 

-

147 # https://www.klayout.de/doc-qt5/code/class_LayerInfo.html 

-

148 lm: Dict[int, kdb.LayerInfo] = {} 

-

149 

-

150 if not hasattr(lvsdb, "layer_indexes"): 

-

151 raise Exception("Needs at least KLayout version 0.29.2") 

-

152 

-

153 for layer_index in lvsdb.layer_indexes(): 

-

154 lname = lvsdb.layer_name(layer_index) 

-

155 

-

156 computed_layer_info = tech.computed_layer_info_by_name.get(lname, None) 

-

157 if computed_layer_info and blackbox_devices: 

-

158 match computed_layer_info.kind: 

-

159 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_RESISTOR: 

-

160 continue 

-

161 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_CAPACITOR: 

-

162 continue 

-

163 

-

164 gds_pair = tech.gds_pair_for_computed_layer_name.get(lname, None) 

-

165 if not gds_pair: 

-

166 li = lvsdb.internal_layout().get_info(layer_index) 

-

167 if li != kdb.LayerInfo(): 

-

168 gds_pair = (li.layer, li.datatype) 

-

169 

-

170 if gds_pair is not None: 

-

171 target_layer_index = target_layout.layer(*gds_pair) # Creates a new internal layer! 

-

172 region = lvsdb.layer_by_index(layer_index) 

-

173 lm[target_layer_index] = region 

-

174 

-

175 return lm 

-

176 

-

177 @staticmethod 

-

178 def nonempty_extracted_layers(lvsdb: kdb.LayoutToNetlist, 

-

179 tech: TechInfo, 

-

180 blackbox_devices: bool) -> Tuple[Dict[GDSPair, KLayoutMergedExtractedLayerInfo], List[KLayoutExtractedLayerInfo]]: 

-

181 # https://www.klayout.de/doc-qt5/code/class_LayoutToNetlist.html#method18 

-

182 nonempty_layers: Dict[GDSPair, KLayoutMergedExtractedLayerInfo] = {} 

-

183 

-

184 unnamed_layers: List[KLayoutExtractedLayerInfo] = [] 

-

185 

-

186 for idx, ln in enumerate(lvsdb.layer_names()): 

-

187 layer = lvsdb.layer_by_name(ln) 

-

188 if layer.count() >= 1: 

-

189 computed_layer_info = tech.computed_layer_info_by_name.get(ln, None) 

-

190 if not computed_layer_info: 

-

191 warning(f"Unable to find info about extracted LVS layer '{ln}'") 

-

192 gds_pair = (1000 + idx, 20) 

-

193 linfo = KLayoutExtractedLayerInfo( 

-

194 index=idx, 

-

195 lvs_layer_name=ln, 

-

196 gds_pair=gds_pair, 

-

197 region=layer 

-

198 ) 

-

199 unnamed_layers.append(linfo) 

-

200 continue 

-

201 

-

202 if blackbox_devices: 

-

203 match computed_layer_info.kind: 

-

204 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_RESISTOR: 

-

205 continue 

-

206 case tech_pb2.ComputedLayerInfo.Kind.KIND_DEVICE_CAPACITOR: 

-

207 continue 

-

208 

-

209 gds_pair = (computed_layer_info.layer_info.gds_layer, computed_layer_info.layer_info.gds_datatype) 

-

210 

-

211 linfo = KLayoutExtractedLayerInfo( 

-

212 index=idx, 

-

213 lvs_layer_name=ln, 

-

214 gds_pair=gds_pair, 

-

215 region=layer 

-

216 ) 

-

217 

-

218 entry = nonempty_layers.get(gds_pair, None) 

-

219 if entry: 

-

220 entry.source_layers.append(linfo) 

-

221 else: 

-

222 nonempty_layers[gds_pair] = KLayoutMergedExtractedLayerInfo( 

-

223 source_layers=[linfo], 

-

224 gds_pair=gds_pair, 

-

225 ) 

-

226 

-

227 return nonempty_layers, unnamed_layers 

-

228 

-

229 def top_cell_bbox(self) -> kdb.Box: 

-

230 b1: kdb.Box = self.target_layout.top_cell().bbox() 

-

231 b2: kdb.Box = self.lvsdb.internal_layout().top_cell().bbox() 

-

232 if b1.area() > b2.area(): 

-

233 return b1 

-

234 else: 

-

235 return b2 

-

236 

-

237 def shapes_of_net(self, gds_pair: GDSPair, net: kdb.Net) -> Optional[kdb.Region]: 

-

238 lyr = self.extracted_layers.get(gds_pair, None) 

-

239 if not lyr: 

-

240 return None 

-

241 

-

242 shapes: kdb.Region 

-

243 

-

244 match len(lyr.source_layers): 

-

245 case 0: 

-

246 raise AssertionError('Internal error: Empty list of source_layers') 

-

247 case 1: 

-

248 shapes = self.lvsdb.shapes_of_net(net, lyr.source_layers[0].region, True) 

-

249 case _: 

-

250 shapes = kdb.Region() 

-

251 for sl in lyr.source_layers: 

-

252 shapes += self.lvsdb.shapes_of_net(net, sl.region, True) 

-

253 # shapes.merge() 

-

254 

-

255 return shapes 

-

256 

-

257 def shapes_of_layer(self, gds_pair: GDSPair) -> Optional[kdb.Region]: 

-

258 lyr = self.extracted_layers.get(gds_pair, None) 

-

259 if not lyr: 

-

260 return None 

-

261 

-

262 shapes: kdb.Region 

-

263 

-

264 match len(lyr.source_layers): 

-

265 case 0: 

-

266 raise AssertionError('Internal error: Empty list of source_layers') 

-

267 case 1: 

-

268 shapes = lyr.source_layers[0].region 

-

269 case _: 

-

270 shapes = kdb.Region() 

-

271 for sl in lyr.source_layers: 

-

272 shapes += sl.region 

-

273 # shapes.merge() 

-

274 

-

275 return shapes 

-

276 

-

277 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46_netlist_csv_py.html b/pycov/z_2ea764e3f741ac46_netlist_csv_py.html deleted file mode 100644 index 1f2b1533..00000000 --- a/pycov/z_2ea764e3f741ac46_netlist_csv_py.html +++ /dev/null @@ -1,156 +0,0 @@ - - - - - Coverage for kpex\klayout\netlist_csv.py: 26% - - - - - -
-
-

- Coverage for kpex\klayout\netlist_csv.py: - 26% -

- -

- 23 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import klayout.db as kdb 

-

27 

-

28from kpex.log import ( 

-

29 info, 

-

30) 

-

31 

-

32 

-

33class NetlistCSVWriter: 

-

34 @staticmethod 

-

35 def write_csv(netlist: kdb.Netlist, 

-

36 top_cell_name: str, 

-

37 output_path: str): 

-

38 with open(output_path, 'w') as f: 

-

39 f.write('Device;Net1;Net2;Capacitance [F];Capacitance [fF]\n') 

-

40 

-

41 top_circuit: kdb.Circuit = netlist.circuit_by_name(top_cell_name) 

-

42 

-

43 # NOTE: only caps for now 

-

44 for d in top_circuit.each_device(): 

-

45 # https://www.klayout.de/doc-qt5/code/class_Device.html 

-

46 dc = d.device_class() 

-

47 if isinstance(dc, kdb.DeviceClassCapacitor): 

-

48 dn = d.expanded_name() or d.name 

-

49 if dc.name != 'PEX_CAP': 

-

50 info(f"Ignoring device {dn}") 

-

51 continue 

-

52 param_defs = dc.parameter_definitions() 

-

53 params = {p.name: d.parameter(p.id()) for p in param_defs} 

-

54 d: kdb.Device 

-

55 net1 = d.net_for_terminal('A') 

-

56 net2 = d.net_for_terminal('B') 

-

57 cap = params['C'] 

-

58 cap_femto = round(cap * 1e15, 2) 

-

59 f.write(f"{dn};{net1.name};{net2.name};{'%.12g' % cap};{cap_femto}f\n") 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46_netlist_expander_py.html b/pycov/z_2ea764e3f741ac46_netlist_expander_py.html deleted file mode 100644 index eba72765..00000000 --- a/pycov/z_2ea764e3f741ac46_netlist_expander_py.html +++ /dev/null @@ -1,245 +0,0 @@ - - - - - Coverage for kpex\klayout\netlist_expander.py: 97% - - - - - -
-
-

- Coverage for kpex\klayout\netlist_expander.py: - 97% -

- -

- 70 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import re 

-

27from typing import * 

-

28 

-

29import klayout.db as kdb 

-

30 

-

31from kpex.log import ( 

-

32 info, 

-

33 warning, 

-

34) 

-

35from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

36 

-

37 

-

38class NetlistExpander: 

-

39 @staticmethod 

-

40 def expand(extracted_netlist: kdb.Netlist, 

-

41 top_cell_name: str, 

-

42 cap_matrix: CapacitanceMatrix, 

-

43 blackbox_devices: bool) -> kdb.Netlist: 

-

44 expanded_netlist: kdb.Netlist = extracted_netlist.dup() 

-

45 top_circuit: kdb.Circuit = expanded_netlist.circuit_by_name(top_cell_name) 

-

46 

-

47 if not blackbox_devices: 

-

48 for d in top_circuit.each_device(): 

-

49 name = d.name or d.expanded_name() 

-

50 info(f"Removing whiteboxed device {name}") 

-

51 top_circuit.remove_device(d) 

-

52 

-

53 # create capacitor class 

-

54 cap = kdb.DeviceClassCapacitor() 

-

55 cap.name = 'PEX_CAP' 

-

56 cap.description = "Extracted by kpex/FasterCap PEX" 

-

57 expanded_netlist.add(cap) 

-

58 

-

59 fc_gnd_net = top_circuit.create_net('FC_GND') # create GROUND net 

-

60 vsubs_net = top_circuit.create_net("VSUBS") 

-

61 nets: List[kdb.Net] = [] 

-

62 

-

63 # build table: name -> net 

-

64 name2net: Dict[str, kdb.Net] = {n.expanded_name(): n for n in top_circuit.each_net()} 

-

65 

-

66 # find nets for the matrix axes 

-

67 pattern = re.compile(r'^g\d+_(.*)$') 

-

68 for idx, nn in enumerate(cap_matrix.conductor_names): 

-

69 m = pattern.match(nn) 

-

70 nn = m.group(1) 

-

71 if nn not in name2net: 

-

72 raise Exception(f"No net found with name {nn}, net names are: {list(name2net.keys())}") 

-

73 n = name2net[nn] 

-

74 nets.append(n) 

-

75 

-

76 cap_threshold = 0.0 

-

77 

-

78 def add_parasitic_cap(i: int, 

-

79 j: int, 

-

80 net1: kdb.Net, 

-

81 net2: kdb.Net, 

-

82 cap_value: float): 

-

83 if cap_value > cap_threshold: 

-

84 c: kdb.Device = top_circuit.create_device(cap, f"Cext_{i}_{j}") 

-

85 c.connect_terminal('A', net1) 

-

86 c.connect_terminal('B', net2) 

-

87 c.set_parameter('C', cap_value) 

-

88 if net1 == net2: 

-

89 raise Exception(f"Invalid attempt to create cap {c.name} between " 

-

90 f"same net {net1} with value {'%.12g' % cap_value}") 

-

91 else: 

-

92 warning(f"Ignoring capacitance matrix cell [{i},{j}], " 

-

93 f"{'%.12g' % cap_value} is below threshold {'%.12g' % cap_threshold}") 

-

94 

-

95 # ------------------------------------------------------------- 

-

96 # Example capacitance matrix: 

-

97 # [C11+C12+C13 -C12 -C13] 

-

98 # [-C21 C21+C22+C23 -C23] 

-

99 # [-C31 -C32 C31+C32+C33] 

-

100 # ------------------------------------------------------------- 

-

101 # 

-

102 # - Diagonal elements m[i][i] contain the capacitance over GND (Cii), 

-

103 # but in a sum including all the other values of the row 

-

104 # 

-

105 # https://www.fastfieldsolvers.com/Papers/The_Maxwell_Capacitance_Matrix_WP110301_R03.pdf 

-

106 # 

-

107 for i in range(0, cap_matrix.dimension): 

-

108 row = cap_matrix[i] 

-

109 cap_ii = row[i] 

-

110 for j in range(0, cap_matrix.dimension): 

-

111 if i == j: 

-

112 continue 

-

113 cap_value = -row[j] # off-diagonals are always stored as negative values 

-

114 cap_ii -= cap_value # subtract summands to filter out Cii 

-

115 if j > i: 

-

116 add_parasitic_cap(i=i, j=j, 

-

117 net1=nets[i], net2=nets[j], 

-

118 cap_value=cap_value) 

-

119 if i > 0: 

-

120 add_parasitic_cap(i=i, j=i, 

-

121 net1=nets[i], net2=nets[0], 

-

122 cap_value=cap_ii) 

-

123 

-

124 # Short VSUBS and FC_GND together 

-

125 # VSUBS ... substrate block 

-

126 # FC_GND ... FasterCap's GND, i.e. the diagonal Cii elements 

-

127 # create capacitor class 

-

128 

-

129 res = kdb.DeviceClassResistor() 

-

130 res.name = 'PEX_RES' 

-

131 res.description = "Extracted by kpex/FasterCap PEX" 

-

132 expanded_netlist.add(res) 

-

133 

-

134 gnd_net = name2net.get('GND', None) 

-

135 if not gnd_net: 

-

136 gnd_net = top_circuit.create_net('GND') # create GROUND net 

-

137 

-

138 c: kdb.Device = top_circuit.create_device(res, f"Rext_FC_GND_GND") 

-

139 c.connect_terminal('A', fc_gnd_net) 

-

140 c.connect_terminal('B', gnd_net) 

-

141 c.set_parameter('R', 0) 

-

142 

-

143 c: kdb.Device = top_circuit.create_device(res, f"Rext_VSUBS_GND") 

-

144 c.connect_terminal('A', vsubs_net) 

-

145 c.connect_terminal('B', gnd_net) 

-

146 c.set_parameter('R', 0) 

-

147 

-

148 return expanded_netlist 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46_netlist_reducer_py.html b/pycov/z_2ea764e3f741ac46_netlist_reducer_py.html deleted file mode 100644 index 516335db..00000000 --- a/pycov/z_2ea764e3f741ac46_netlist_reducer_py.html +++ /dev/null @@ -1,160 +0,0 @@ - - - - - Coverage for kpex\klayout\netlist_reducer.py: 82% - - - - - -
-
-

- Coverage for kpex\klayout\netlist_reducer.py: - 82% -

- -

- 22 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from typing import * 

-

25 

-

26import klayout.db as kdb 

-

27 

-

28from ..log import ( 

-

29 info, 

-

30) 

-

31 

-

32 

-

33class NetlistReducer: 

-

34 @staticmethod 

-

35 def reduce(netlist: kdb.Netlist, 

-

36 top_cell_name: str, 

-

37 cap_threshold: float = 0.05e-15) -> kdb.Netlist: 

-

38 reduced_netlist: kdb.Netlist = netlist.dup() 

-

39 reduced_netlist.combine_devices() # merge C/R 

-

40 

-

41 top_circuit: kdb.Circuit = reduced_netlist.circuit_by_name(top_cell_name) 

-

42 

-

43 devices_to_remove: List[kdb.Device] = [] 

-

44 

-

45 for d in top_circuit.each_device(): 

-

46 d: kdb.Device 

-

47 dc = d.device_class() 

-

48 if isinstance(dc, kdb.DeviceClassCapacitor): 

-

49 # net_a = d.net_for_terminal('A') 

-

50 # net_b = d.net_for_terminal('B') 

-

51 c_value = d.parameter('C') 

-

52 if c_value < cap_threshold: 

-

53 devices_to_remove.append(d) 

-

54 

-

55 elif isinstance(dc, kdb.DeviceClassResistor): 

-

56 # TODO 

-

57 pass 

-

58 

-

59 for d in devices_to_remove: 

-

60 info(f"Removed device {d.name} {d.parameter('C')}") 

-

61 top_circuit.remove_device(d) 

-

62 

-

63 return reduced_netlist 

-
- - - diff --git a/pycov/z_2ea764e3f741ac46_repair_rdb_py.html b/pycov/z_2ea764e3f741ac46_repair_rdb_py.html deleted file mode 100644 index 1ad31483..00000000 --- a/pycov/z_2ea764e3f741ac46_repair_rdb_py.html +++ /dev/null @@ -1,234 +0,0 @@ - - - - - Coverage for kpex\klayout\repair_rdb.py: 16% - - - - - -
-
-

- Coverage for kpex\klayout\repair_rdb.py: - 16% -

- -

- 79 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import gzip 

-

25import io 

-

26import os.path 

-

27import shutil 

-

28import sys 

-

29from typing import * 

-

30import xml.etree.ElementTree as ET 

-

31 

-

32import klayout.rdb as rdb 

-

33from kpex.log import ( 

-

34 LogLevel, 

-

35 set_log_level, 

-

36 register_additional_handler, 

-

37 deregister_additional_handler, 

-

38 # console, 

-

39 # debug, 

-

40 info, 

-

41 warning, 

-

42 subproc, 

-

43 error, 

-

44 rule 

-

45) 

-

46 

-

47 

-

48def parse_category_path(category_path: str) -> List[str]: 

-

49 within_escaped = False 

-

50 within_backslash = False 

-

51 current_word = '' 

-

52 path_list = [] 

-

53 for c in category_path: 

-

54 match c: 

-

55 case '.': 

-

56 if within_backslash: 

-

57 current_word += c 

-

58 within_backslash = False 

-

59 elif within_escaped: 

-

60 current_word += c 

-

61 else: 

-

62 path_list.append(current_word) 

-

63 current_word = '' 

-

64 case '\\': 

-

65 if within_backslash: 

-

66 current_word += c 

-

67 within_backslash = False 

-

68 else: 

-

69 within_backslash = True 

-

70 case '\'': 

-

71 if within_backslash: 

-

72 current_word += c 

-

73 within_backslash = False 

-

74 else: 

-

75 within_escaped = not within_escaped 

-

76 case _: 

-

77 current_word += c 

-

78 if len(current_word) >= 1: 

-

79 path_list.append(current_word) 

-

80 return path_list 

-

81 

-

82def repair_rdb_xml(xml_file: io.IOBase, new_xml_path: str): 

-

83 et = ET.parse(xml_file) 

-

84 root = et.getroot() 

-

85 

-

86 categories: Set[str] = set( 

-

87 [e.text for e in root.findall('./items/item/category')] 

-

88 ) 

-

89 category_paths = [parse_category_path(c) for c in categories] 

-

90 category_paths.sort() 

-

91 # print(category_paths) 

-

92 for p in category_paths: 

-

93 elem = root 

-

94 for c in p: 

-

95 elemcats = elem.find("./categories") 

-

96 subelem = elemcats.find("./category/name[.='{0}']/..".format(c)) 

-

97 if subelem is None: 

-

98 warning(f"In category path {p}, can't find element for component {c}") 

-

99 new_category = ET.SubElement(elemcats, "category") 

-

100 new_cname = ET.SubElement(new_category, "name") 

-

101 new_cname.text = c 

-

102 ET.SubElement(new_category, 'description') 

-

103 ET.SubElement(new_category, 'categories') 

-

104 elem = new_category 

-

105 else: 

-

106 elem = subelem 

-

107 

-

108 et.write(new_xml_path) 

-

109 

-

110 

-

111def repair_rdb(rdb_path: str): 

-

112 rdb_file: io.IOBase 

-

113 suffix = os.path.splitext(rdb_path)[-1] 

-

114 new_xml_path = rdb_path + '.repair.xml' 

-

115 

-

116 if suffix == '.gz': 

-

117 with gzip.open(rdb_path, 'r') as f: 

-

118 repair_rdb_xml(f, new_xml_path) 

-

119 else: 

-

120 with open(rdb_path, 'r') as f: 

-

121 repair_rdb_xml(f, new_xml_path) 

-

122 

-

123 report = rdb.ReportDatabase('') 

-

124 try: 

-

125 report.load(new_xml_path) 

-

126 info(f"Succeeded in repairing broken marker database {rdb_path} under {new_xml_path}") 

-

127 

-

128 except Exception as e: 

-

129 error(f"Failed to repair broken marker database {rdb_path} due to exception: {e}") 

-

130 

-

131 

-

132if __name__ == "__main__": 

-

133 if len(sys.argv) < 2: 

-

134 print(f"Usage: {sys.argv[0]} file.rdb.gz") 

-

135 sys.exit(1) 

-

136 

-

137 repair_rdb(sys.argv[1]) 

-
- - - diff --git a/pycov/z_31e83241eddb0cfa___init___py.html b/pycov/z_31e83241eddb0cfa___init___py.html deleted file mode 100644 index a3b0ac51..00000000 --- a/pycov/z_31e83241eddb0cfa___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_31e83241eddb0cfa_kpex_cli_py.html b/pycov/z_31e83241eddb0cfa_kpex_cli_py.html deleted file mode 100644 index 7771564e..00000000 --- a/pycov/z_31e83241eddb0cfa_kpex_cli_py.html +++ /dev/null @@ -1,853 +0,0 @@ - - - - - Coverage for kpex\kpex_cli.py: 44% - - - - - -
-
-

- Coverage for kpex\kpex_cli.py: - 44% -

- -

- 412 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26import argparse 

-

27from datetime import datetime 

-

28from enum import StrEnum 

-

29import logging 

-

30import os 

-

31import os.path 

-

32import rich.console 

-

33import rich.markdown 

-

34import rich.text 

-

35from rich_argparse import RichHelpFormatter 

-

36import shlex 

-

37import shutil 

-

38import sys 

-

39from typing import * 

-

40 

-

41import klayout.db as kdb 

-

42import klayout.rdb as rdb 

-

43 

-

44from .fastercap.fastercap_input_builder import FasterCapInputBuilder 

-

45from .fastercap.fastercap_model_generator import FasterCapModelGenerator 

-

46from .fastercap.fastercap_runner import run_fastercap, fastercap_parse_capacitance_matrix 

-

47from .fastcap.fastcap_runner import run_fastcap, fastcap_parse_capacitance_matrix 

-

48from .klayout.lvs_runner import LVSRunner 

-

49from .klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo 

-

50from .klayout.netlist_expander import NetlistExpander 

-

51from .klayout.netlist_csv import NetlistCSVWriter 

-

52from .klayout.netlist_reducer import NetlistReducer 

-

53from .klayout.repair_rdb import repair_rdb 

-

54from .log import ( 

-

55 LogLevel, 

-

56 set_log_level, 

-

57 register_additional_handler, 

-

58 deregister_additional_handler, 

-

59 # console, 

-

60 # debug, 

-

61 info, 

-

62 warning, 

-

63 subproc, 

-

64 error, 

-

65 rule 

-

66) 

-

67from .magic.magic_runner import MagicPEXMode, run_magic, prepare_magic_script 

-

68from .rcx25.extractor import RCExtractor, ExtractionResults 

-

69from .tech_info import TechInfo 

-

70from .util.multiple_choice import MultipleChoicePattern 

-

71from .util.argparse_helpers import render_enum_help, true_or_false 

-

72from .version import __version__ 

-

73 

-

74 

-

75# ------------------------------------------------------------------------------------ 

-

76 

-

77PROGRAM_NAME = "kpex" 

-

78 

-

79 

-

80class InputMode(StrEnum): 

-

81 LVSDB = "lvsdb" 

-

82 GDS = "gds" 

-

83 

-

84 

-

85class KpexCLI: 

-

86 @staticmethod 

-

87 def parse_args(arg_list: List[str] = None) -> argparse.Namespace: 

-

88 # epilog = f"See '{PROGRAM_NAME} <subcommand> -h' for help on subcommand" 

-

89 epilog = """ 

-

90| Variable | Example | Description | 

-

91| -------- | -------------------- | --------------------------------------- | 

-

92| PDKPATH | (e.g. $HOME/.volare) | Optional (required for default magigrc) | 

-

93| PDK | (e.g. sky130A) | Optional (required for default magigrc) | 

-

94""" 

-

95 epilog_md = rich.console.Group( 

-

96 rich.text.Text('Environmental variables:', style='argparse.groups'), 

-

97 rich.markdown.Markdown(epilog, style='argparse.text') 

-

98 ) 

-

99 main_parser = argparse.ArgumentParser(description=f"{PROGRAM_NAME}: " 

-

100 f"KLayout-integrated Parasitic Extraction Tool", 

-

101 epilog=epilog_md, 

-

102 add_help=False, 

-

103 formatter_class=RichHelpFormatter) 

-

104 

-

105 group_special = main_parser.add_argument_group("Special options") 

-

106 group_special.add_argument("--help", "-h", action='help', help="show this help message and exit") 

-

107 group_special.add_argument("--version", "-v", action='version', version=f'{PROGRAM_NAME} {__version__}') 

-

108 group_special.add_argument("--log_level", dest='log_level', default='subprocess', 

-

109 help=render_enum_help(topic='log_level', enum_cls=LogLevel)) 

-

110 group_special.add_argument("--threads", dest='num_threads', type=int, 

-

111 default=os.cpu_count() * 4, 

-

112 help="number of threads (e.g. for FasterCap) (default is %(default)s)") 

-

113 group_special.add_argument('--klayout', dest='klayout_exe_path', default='klayout', 

-

114 help="Path to klayout executable (default is '%(default)s')") 

-

115 

-

116 group_pex = main_parser.add_argument_group("Parasitic Extraction Setup") 

-

117 group_pex.add_argument("--tech", "-t", dest="tech_pbjson_path", required=True, 

-

118 help="Technology Protocol Buffer path (*.pb.json)") 

-

119 

-

120 group_pex.add_argument("--out_dir", "-o", dest="output_dir_base_path", default=".", 

-

121 help="Output directory path (default is '%(default)s')") 

-

122 

-

123 group_pex_input = main_parser.add_argument_group("Parasitic Extraction Input", 

-

124 description="Either LVS is run, or an existing LVSDB is used") 

-

125 group_pex_input.add_argument("--gds", "-g", dest="gds_path", help="GDS path (for LVS)") 

-

126 group_pex_input.add_argument("--schematic", "-s", dest="schematic_path", 

-

127 help="Schematic SPICE netlist path (for LVS)") 

-

128 group_pex_input.add_argument("--lvsdb", "-l", dest="lvsdb_path", help="KLayout LVSDB path (bypass LVS)") 

-

129 group_pex_input.add_argument("--cell", "-c", dest="cell_name", default=None, 

-

130 help="Cell (default is the top cell)") 

-

131 default_lvs_script_path = os.path.realpath(os.path.join(__file__, '..', '..', 'pdk', 'sky130A', 'kpex', 'sky130.lvs')) 

-

132 

-

133 group_pex_input.add_argument("--lvs_script", dest="lvs_script_path", 

-

134 default=default_lvs_script_path, 

-

135 help=f"Path to KLayout LVS script (default is %(default)s)") 

-

136 group_pex_input.add_argument("--cache-lvs", dest="cache_lvs", 

-

137 type=true_or_false, default=True, 

-

138 help="Used cached LVSDB (for given input GDS) (default is %(default)s)") 

-

139 

-

140 group_pex_options = main_parser.add_argument_group("Parasitic Extraction Options") 

-

141 group_pex_options.add_argument("--blackbox", dest="blackbox_devices", 

-

142 type=true_or_false, default=False, # TODO: in the future this should be True by default 

-

143 help="Blackbox devices like MIM/MOM caps, as they are handled by SPICE models " 

-

144 "(default is %(default)s for testing now)") 

-

145 group_pex_options.add_argument("--fastercap", dest="run_fastercap", 

-

146 type=true_or_false, default=False, 

-

147 help="Run FasterCap engine (default is %(default)s)") 

-

148 group_pex_options.add_argument("--fastcap", dest="run_fastcap", 

-

149 type=true_or_false, default=False, 

-

150 help="Run FastCap2 engine (default is %(default)s)") 

-

151 group_pex_options.add_argument("--magic", dest="run_magic", 

-

152 type=true_or_false, default=False, 

-

153 help="Run MAGIC engine (default is %(default)s)") 

-

154 group_pex_options.add_argument("--2.5D", dest="run_2_5D", 

-

155 type=true_or_false, default=False, 

-

156 help="Run 2.5D analytical engine (default is %(default)s)") 

-

157 

-

158 group_fastercap = main_parser.add_argument_group("FasterCap options") 

-

159 group_fastercap.add_argument("--k_void", "-k", dest="k_void", 

-

160 type=float, default=3.9, 

-

161 help="Dielectric constant of void (default is %(default)s)") 

-

162 group_fastercap.add_argument("--delaunay_amax", "-a", dest="delaunay_amax", 

-

163 type=float, default=50, 

-

164 help="Delaunay triangulation maximum area (default is %(default)s)") 

-

165 group_fastercap.add_argument("--delaunay_b", "-b", dest="delaunay_b", 

-

166 type=float, default=0.5, 

-

167 help="Delaunay triangulation b (default is %(default)s)") 

-

168 group_fastercap.add_argument("--geo_check", dest="geometry_check", 

-

169 type=true_or_false, default=False, 

-

170 help=f"Validate geometries before passing to FasterCap " 

-

171 f"(default is False)") 

-

172 group_fastercap.add_argument("--diel", dest="dielectric_filter", 

-

173 type=str, default="all", 

-

174 help=f"Comma separated list of dielectric filter patterns. " 

-

175 f"Allowed patterns are: (none, all, -dielname1, +dielname2) " 

-

176 f"(default is %(default)s)") 

-

177 

-

178 group_fastercap.add_argument("--tolerance", dest="fastercap_tolerance", 

-

179 type=float, default=0.05, 

-

180 help="FasterCap -aX error tolerance (default is %(default)s)") 

-

181 group_fastercap.add_argument("--d_coeff", dest="fastercap_d_coeff", 

-

182 type=float, default=0.5, 

-

183 help=f"FasterCap -d direct potential interaction coefficient to mesh refinement " 

-

184 f"(default is %(default)s)") 

-

185 group_fastercap.add_argument("--mesh", dest="fastercap_mesh_refinement_value", 

-

186 type=float, default=0.5, 

-

187 help="FasterCap -m Mesh relative refinement value (default is %(default)s)") 

-

188 group_fastercap.add_argument("--ooc", dest="fastercap_ooc_condition", 

-

189 type=float, default=2, 

-

190 help="FasterCap -f out-of-core free memory to link memory condition " 

-

191 "(0 = don't go OOC, default is %(default)s)") 

-

192 group_fastercap.add_argument("--auto_precond", dest="fastercap_auto_preconditioner", 

-

193 type=true_or_false, default=True, 

-

194 help=f"FasterCap -ap Automatic preconditioner usage (default is %(default)s)") 

-

195 group_fastercap.add_argument("--galerkin", dest="fastercap_galerkin_scheme", 

-

196 action='store_true', default=False, 

-

197 help=f"FasterCap -g Use Galerkin scheme (default is %(default)s)") 

-

198 group_fastercap.add_argument("--jacobi", dest="fastercap_jacobi_preconditioner", 

-

199 action='store_true', default=False, 

-

200 help="FasterCap -pj Use Jacobi preconditioner (default is %(default)s)") 

-

201 

-

202 PDKPATH = os.environ.get('PDKPATH', None) 

-

203 default_magicrc_path = \ 

-

204 None if PDKPATH is None \ 

-

205 else os.path.abspath(f"{PDKPATH}/libs.tech/magic/{os.environ['PDK']}.magicrc") 

-

206 group_magic = main_parser.add_argument_group("MAGIC options") 

-

207 group_magic.add_argument('--magicrc', dest='magicrc_path', default=default_magicrc_path, 

-

208 help=f"Path to magicrc configuration file (default is '%(default)s')") 

-

209 group_magic.add_argument("--magic_mode", dest='magic_pex_mode', default='CC', 

-

210 help=render_enum_help(topic='log_level', enum_cls=MagicPEXMode)) 

-

211 group_magic.add_argument("--magic_cthresh", dest="magic_cthresh", 

-

212 type=float, default=0.01, 

-

213 help="Threshold for ignored parasitic capacitances (default is %(default)s)") 

-

214 group_magic.add_argument("--magic_rthresh", dest="magic_rthresh", 

-

215 type=float, default=100.0, 

-

216 help="Threshold for ignored parasitic resistances (default is %(default)s)") 

-

217 group_magic.add_argument("--magic_halo", dest="magic_halo", 

-

218 type=float, default=None, 

-

219 help="Custom sidewall halo distance in µm " 

-

220 "(MAGIC command: extract halo <value>) (default is no custom halo)") 

-

221 group_magic.add_argument('--magic_exe', dest='magic_exe_path', default='magic', 

-

222 help="Path to magic executable (default is '%(default)s')") 

-

223 

-

224 if arg_list is None: 

-

225 arg_list = sys.argv[1:] 

-

226 args = main_parser.parse_args(arg_list) 

-

227 return args 

-

228 

-

229 @staticmethod 

-

230 def validate_args(args: argparse.Namespace): 

-

231 found_errors = False 

-

232 

-

233 if not os.path.isfile(args.klayout_exe_path): 

-

234 path = shutil.which(args.klayout_exe_path) 

-

235 if not path: 

-

236 error(f"Can't locate KLayout executable at {args.klayout_exe_path}") 

-

237 found_errors = True 

-

238 

-

239 if not os.path.isfile(args.tech_pbjson_path): 

-

240 error(f"Can't read technology file at path {args.tech_pbjson_path}") 

-

241 found_errors = True 

-

242 

-

243 # input mode: LVS or existing LVSDB? 

-

244 if args.gds_path: 

-

245 info(f"GDS input file passed, running in LVS mode") 

-

246 args.input_mode = InputMode.GDS 

-

247 if not os.path.isfile(args.gds_path): 

-

248 error(f"Can't read GDS file (LVS input) at path {args.gds_path}") 

-

249 found_errors = True 

-

250 else: 

-

251 args.layout = kdb.Layout() 

-

252 args.layout.read(args.gds_path) 

-

253 

-

254 top_cells = args.layout.top_cells() 

-

255 

-

256 if args.cell_name: # explicit user-specified cell name 

-

257 args.effective_cell_name = args.cell_name 

-

258 

-

259 found_cell: Optional[kdb.Cell] = None 

-

260 for cell in args.layout.cells('*'): 

-

261 if cell.name == args.effective_cell_name: 

-

262 found_cell = cell 

-

263 break 

-

264 if not found_cell: 

-

265 error(f"Could not find cell {args.cell_name} in GDS {args.gds_path}") 

-

266 found_errors = True 

-

267 

-

268 is_only_top_cell = len(top_cells) == 1 and top_cells[0].name == args.cell_name 

-

269 if is_only_top_cell: 

-

270 info(f"Found cell {args.cell_name} in GDS {args.gds_path} (only top cell)") 

-

271 else: # there are other cells => extract the top cell to a tmp layout 

-

272 args.effective_gds_path = os.path.join(args.output_dir_path, f"{args.cell_name}_exported.gds.gz") 

-

273 info(f"Found cell {args.cell_name} in GDS {args.gds_path}, " 

-

274 f"but it is not the only top cell, " 

-

275 f"so layout is exported to: {args.effective_gds_path}") 

-

276 

-

277 found_cell.write(args.effective_gds_path) 

-

278 else: # find top cell 

-

279 if len(top_cells) == 1: 

-

280 args.effective_cell_name = top_cells[0].name 

-

281 info(f"No explicit top cell specified, using top cell '{args.effective_cell_name}'") 

-

282 else: 

-

283 args.effective_cell_name = 'TOP' 

-

284 error(f"Could not determine the default top cell in GDS {args.gds_path}, " 

-

285 f"there are multiple: {', '.join([c.name for c in top_cells])}. " 

-

286 f"Use --cell to specify the cell") 

-

287 found_errors = True 

-

288 

-

289 args.effective_gds_path = args.gds_path 

-

290 else: 

-

291 info(f"LVSDB input file passed, bypassing LVS") 

-

292 args.input_mode = InputMode.LVSDB 

-

293 if not hasattr(args, 'lvsdb_path'): 

-

294 error(f"LVSDB input path not specified (argument --lvsdb)") 

-

295 found_errors = True 

-

296 elif not os.path.isfile(args.lvsdb_path): 

-

297 error(f"Can't read KLayout LVSDB file at path {args.lvsdb_path}") 

-

298 found_errors = True 

-

299 else: 

-

300 lvsdb = kdb.LayoutVsSchematic() 

-

301 lvsdb.read(args.lvsdb_path) 

-

302 top_cell: kdb.Cell = lvsdb.internal_top_cell() 

-

303 args.effective_cell_name = top_cell.name 

-

304 

-

305 def input_file_stem(path: str): 

-

306 # could be *.gds, or *.gds.gz, so remove all extensions 

-

307 return os.path.basename(path).split(sep='.')[0] 

-

308 

-

309 if hasattr(args, 'effective_cell_name'): 

-

310 run_dir_id: str 

-

311 match args.input_mode: 

-

312 case InputMode.GDS: 

-

313 run_dir_id = f"{input_file_stem(args.gds_path)}__{args.effective_cell_name}" 

-

314 case InputMode.LVSDB: 

-

315 run_dir_id = f"{input_file_stem(args.lvsdb_path)}__{args.effective_cell_name}" 

-

316 case _: 

-

317 raise NotImplementedError(f"Unknown input mode {args.input_mode}") 

-

318 

-

319 args.output_dir_path = os.path.join(args.output_dir_base_path, run_dir_id) 

-

320 os.makedirs(args.output_dir_path, exist_ok=True) 

-

321 if args.input_mode == InputMode.GDS: 

-

322 if args.schematic_path: 

-

323 args.effective_schematic_path = args.schematic_path 

-

324 if not os.path.isfile(args.schematic_path): 

-

325 error(f"Can't read schematic (LVS input) at path {args.schematic_path}") 

-

326 found_errors = True 

-

327 else: 

-

328 info(f"LVS input schematic not specified (argument --schematic), using dummy schematic") 

-

329 args.effective_schematic_path = os.path.join(args.output_dir_path, 

-

330 f"{args.effective_cell_name}_dummy_schematic.spice") 

-

331 with open(args.effective_schematic_path, 'w') as f: 

-

332 f.writelines([ 

-

333 f".subckt {args.effective_cell_name} VDD VSS", 

-

334 '.ends', 

-

335 '.end' 

-

336 ]) 

-

337 

-

338 try: 

-

339 args.log_level = LogLevel[args.log_level.upper()] 

-

340 except KeyError: 

-

341 error(f"Requested log level {args.log_level.lower()} does not exist, " 

-

342 f"{render_enum_help(topic='log_level', enum_cls=LogLevel, print_default=False)}") 

-

343 found_errors = True 

-

344 

-

345 try: 

-

346 pattern_string: str = args.dielectric_filter 

-

347 args.dielectric_filter = MultipleChoicePattern(pattern=pattern_string) 

-

348 except ValueError as e: 

-

349 error("Failed to parse --diel arg", e) 

-

350 found_errors = True 

-

351 

-

352 if found_errors: 

-

353 raise Exception("Argument validation failed") 

-

354 

-

355 def build_fastercap_input(self, 

-

356 args: argparse.Namespace, 

-

357 pex_context: KLayoutExtractionContext, 

-

358 tech_info: TechInfo) -> str: 

-

359 rule('Process stackup') 

-

360 fastercap_input_builder = FasterCapInputBuilder(pex_context=pex_context, 

-

361 tech_info=tech_info, 

-

362 k_void=args.k_void, 

-

363 delaunay_amax=args.delaunay_amax, 

-

364 delaunay_b=args.delaunay_b) 

-

365 gen: FasterCapModelGenerator = fastercap_input_builder.build() 

-

366 

-

367 rule() 

-

368 faster_cap_input_dir_path = os.path.join(args.output_dir_path, 'FasterCap_Input_Files') 

-

369 os.makedirs(faster_cap_input_dir_path, exist_ok=True) 

-

370 

-

371 lst_file = gen.write_fastcap(output_dir_path=faster_cap_input_dir_path, prefix='FasterCap_Input_') 

-

372 

-

373 geometry_dir_path = os.path.join(args.output_dir_path, 'Geometries') 

-

374 os.makedirs(geometry_dir_path, exist_ok=True) 

-

375 gen.dump_stl(output_dir_path=geometry_dir_path, prefix='') 

-

376 

-

377 if args.geometry_check: 

-

378 gen.check() 

-

379 

-

380 return lst_file 

-

381 

-

382 

-

383 def run_fastercap_extraction(self, 

-

384 args: argparse.Namespace, 

-

385 pex_context: KLayoutExtractionContext, 

-

386 lst_file: str): 

-

387 info(f"Configure number of OpenMP threads (environmental variable OMP_NUM_THREADS) as {args.num_threads}") 

-

388 os.environ['OMP_NUM_THREADS'] = f"{args.num_threads}" 

-

389 

-

390 exe_path = "FasterCap" 

-

391 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Output.txt") 

-

392 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Raw.csv") 

-

393 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Result_Matrix_Avg.csv") 

-

394 expanded_netlist_path = os.path.join(args.output_dir_path, 

-

395 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.cir") 

-

396 expanded_netlist_csv_path = os.path.join(args.output_dir_path, 

-

397 f"{args.effective_cell_name}_FasterCap_Expanded_Netlist.csv") 

-

398 reduced_netlist_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FasterCap_Reduced_Netlist.cir") 

-

399 

-

400 run_fastercap(exe_path=exe_path, 

-

401 lst_file_path=lst_file, 

-

402 log_path=log_path, 

-

403 tolerance=args.fastercap_tolerance, 

-

404 d_coeff=args.fastercap_d_coeff, 

-

405 mesh_refinement_value=args.fastercap_mesh_refinement_value, 

-

406 ooc_condition=args.fastercap_ooc_condition, 

-

407 auto_preconditioner=args.fastercap_auto_preconditioner, 

-

408 galerkin_scheme=args.fastercap_galerkin_scheme, 

-

409 jacobi_preconditioner=args.fastercap_jacobi_preconditioner) 

-

410 

-

411 cap_matrix = fastercap_parse_capacitance_matrix(log_path) 

-

412 cap_matrix.write_csv(raw_csv_path) 

-

413 

-

414 cap_matrix = cap_matrix.averaged_off_diagonals() 

-

415 cap_matrix.write_csv(avg_csv_path) 

-

416 

-

417 netlist_expander = NetlistExpander() 

-

418 expanded_netlist = netlist_expander.expand( 

-

419 extracted_netlist=pex_context.lvsdb.netlist(), 

-

420 top_cell_name=pex_context.top_cell.name, 

-

421 cap_matrix=cap_matrix, 

-

422 blackbox_devices=args.blackbox_devices 

-

423 ) 

-

424 

-

425 # create a nice CSV for reports, useful for spreadsheets 

-

426 netlist_csv_writer = NetlistCSVWriter() 

-

427 netlist_csv_writer.write_csv(netlist=expanded_netlist, 

-

428 top_cell_name=pex_context.top_cell.name, 

-

429 output_path=expanded_netlist_csv_path) 

-

430 

-

431 rule("Extended netlist (CSV format):") 

-

432 with open(expanded_netlist_csv_path, 'r') as f: 

-

433 for line in f.readlines(): 

-

434 subproc(line[:-1]) # abusing subproc, simply want verbatim 

-

435 rule() 

-

436 

-

437 info(f"Wrote expanded netlist CSV to: {expanded_netlist_csv_path}") 

-

438 

-

439 spice_writer = kdb.NetlistSpiceWriter() 

-

440 spice_writer.use_net_names = True 

-

441 spice_writer.with_comments = False 

-

442 expanded_netlist.write(expanded_netlist_path, spice_writer) 

-

443 info(f"Wrote expanded netlist to: {expanded_netlist_path}") 

-

444 

-

445 netlist_reducer = NetlistReducer() 

-

446 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist, 

-

447 top_cell_name=pex_context.top_cell.name) 

-

448 reduced_netlist.write(reduced_netlist_path, spice_writer) 

-

449 info(f"Wrote reduced netlist to: {reduced_netlist_path}") 

-

450 

-

451 def run_magic_extraction(self, 

-

452 args: argparse.Namespace): 

-

453 if args.input_mode != InputMode.GDS: 

-

454 error(f"MAGIC engine only works with GDS input mode" 

-

455 f" (currently {args.input_mode})") 

-

456 return 

-

457 

-

458 magic_run_dir = os.path.join(args.output_dir_path, f"magic_{args.magic_pex_mode}") 

-

459 magic_log_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_CC_Output.txt") 

-

460 magic_script_path = os.path.join(magic_run_dir, f"{args.effective_cell_name}_MAGIC_CC_Script.tcl") 

-

461 

-

462 output_netlist_path = f"{magic_run_dir}/{args.effective_cell_name}.pex.spice" 

-

463 

-

464 os.makedirs(magic_run_dir, exist_ok=True) 

-

465 

-

466 prepare_magic_script(gds_path=args.effective_gds_path, 

-

467 cell_name=args.effective_cell_name, 

-

468 run_dir_path=magic_run_dir, 

-

469 script_path=magic_script_path, 

-

470 output_netlist_path=output_netlist_path, 

-

471 pex_mode=args.magic_pex_mode, 

-

472 c_threshold=args.magic_cthresh, 

-

473 r_threshold=args.magic_rthresh, 

-

474 halo=args.magic_halo) 

-

475 

-

476 run_magic(exe_path=args.magic_exe_path, 

-

477 magicrc_path=args.magicrc_path, 

-

478 script_path=magic_script_path, 

-

479 log_path=magic_log_path) 

-

480 

-

481 subproc(f"SPICE netlist saved at: {output_netlist_path}") 

-

482 rule("MAGIC PEX SPICE netlist") 

-

483 with open(output_netlist_path, 'r') as f: 

-

484 subproc(f.read()) 

-

485 rule() 

-

486 

-

487 def run_fastcap_extraction(self, 

-

488 args: argparse.Namespace, 

-

489 pex_context: KLayoutExtractionContext, 

-

490 lst_file: str): 

-

491 exe_path = "fastcap" 

-

492 log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Output.txt") 

-

493 raw_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Raw.csv") 

-

494 avg_csv_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_FastCap2_Result_Matrix_Avg.csv") 

-

495 expanded_netlist_path = os.path.join(args.output_dir_path, 

-

496 f"{args.effective_cell_name}_FastCap2_Expanded_Netlist.cir") 

-

497 reduced_netlist_path = os.path.join(args.output_dir_path, 

-

498 f"{args.effective_cell_name}_FastCap2_Reduced_Netlist.cir") 

-

499 

-

500 run_fastcap(exe_path=exe_path, 

-

501 lst_file_path=lst_file, 

-

502 log_path=log_path) 

-

503 

-

504 cap_matrix = fastcap_parse_capacitance_matrix(log_path) 

-

505 cap_matrix.write_csv(raw_csv_path) 

-

506 

-

507 cap_matrix = cap_matrix.averaged_off_diagonals() 

-

508 cap_matrix.write_csv(avg_csv_path) 

-

509 

-

510 netlist_expander = NetlistExpander() 

-

511 expanded_netlist = netlist_expander.expand( 

-

512 extracted_netlist=pex_context.lvsdb.netlist(), 

-

513 top_cell_name=pex_context.top_cell.name, 

-

514 cap_matrix=cap_matrix, 

-

515 blackbox_devices=args.blackbox_devices 

-

516 ) 

-

517 

-

518 spice_writer = kdb.NetlistSpiceWriter() 

-

519 spice_writer.use_net_names = True 

-

520 spice_writer.with_comments = False 

-

521 expanded_netlist.write(expanded_netlist_path, spice_writer) 

-

522 info(f"Wrote expanded netlist to: {expanded_netlist_path}") 

-

523 

-

524 netlist_reducer = NetlistReducer() 

-

525 reduced_netlist = netlist_reducer.reduce(netlist=expanded_netlist, 

-

526 top_cell_name=pex_context.top_cell.name) 

-

527 reduced_netlist.write(reduced_netlist_path, spice_writer) 

-

528 info(f"Wrote reduced netlist to: {reduced_netlist_path}") 

-

529 

-

530 def run_kpex_2_5d_engine(self, 

-

531 args: argparse.Namespace, 

-

532 pex_context: KLayoutExtractionContext, 

-

533 tech_info: TechInfo, 

-

534 report_path: str, 

-

535 netlist_csv_path: str): 

-

536 extractor = RCExtractor(pex_context=pex_context, 

-

537 tech_info=tech_info, 

-

538 report_path=report_path) 

-

539 extraction_results = extractor.extract() 

-

540 

-

541 with open(netlist_csv_path, 'w') as f: 

-

542 f.write('Device;Net1;Net2;Capacitance [fF]\n') 

-

543 # f.write('Device;Net1;Net2;Capacitance [F];Capacitance [fF]\n') 

-

544 summary = extraction_results.summarize() 

-

545 for idx, (key, cap_value) in enumerate(summary.capacitances.items()): 

-

546 # f.write(f"C{idx + 1};{key.net1};{key.net2};{cap_value / 1e15};{round(cap_value, 3)}\n") 

-

547 f.write(f"C{idx + 1};{key.net1};{key.net2};{round(cap_value, 3)}\n") 

-

548 

-

549 rule("kpex/2.5D extracted netlist (CSV format):") 

-

550 with open(netlist_csv_path, 'r') as f: 

-

551 for line in f.readlines(): 

-

552 subproc(line[:-1]) # abusing subproc, simply want verbatim 

-

553 

-

554 rule("Extracted netlist CSV") 

-

555 subproc(f"{netlist_csv_path}") 

-

556 

-

557 

-

558 # NOTE: there was a KLayout bug that some of the categories were lost, 

-

559 # so that the marker browser could not load the report file 

-

560 try: 

-

561 report = rdb.ReportDatabase('') 

-

562 report.load(report_path) # try loading rdb 

-

563 except Exception as e: 

-

564 rule("Repair broken marker DB") 

-

565 warning(f"Detected KLayout bug: RDB can't be loaded due to exception {e}") 

-

566 repair_rdb(report_path) 

-

567 

-

568 return extraction_results 

-

569 

-

570 def setup_logging(self, args: argparse.Namespace): 

-

571 def register_log_file_handler(log_path: str, 

-

572 formatter: Optional[logging.Formatter]) -> logging.Handler: 

-

573 handler = logging.FileHandler(log_path) 

-

574 handler.setLevel(LogLevel.SUBPROCESS) 

-

575 if formatter: 

-

576 handler.setFormatter(formatter) 

-

577 register_additional_handler(handler) 

-

578 return handler 

-

579 

-

580 def reregister_log_file_handler(handler: logging.Handler, 

-

581 log_path: str, 

-

582 formatter: Optional[logging.Formatter]): 

-

583 deregister_additional_handler(handler) 

-

584 handler.flush() 

-

585 handler.close() 

-

586 os.makedirs(args.output_dir_path, exist_ok=True) 

-

587 new_path = os.path.join(args.output_dir_path, os.path.basename(log_path)) 

-

588 if os.path.exists(new_path): 

-

589 ctime = os.path.getctime(new_path) 

-

590 dt = datetime.fromtimestamp(ctime) 

-

591 timestamp = dt.strftime('%Y-%m-%d_%H-%M-%S') 

-

592 backup_path = f"{new_path[:-4]}_{timestamp}.bak.log" 

-

593 shutil.move(new_path, backup_path) 

-

594 log_path = shutil.move(log_path, new_path) 

-

595 register_log_file_handler(log_path, formatter) 

-

596 

-

597 # setup preliminary logger 

-

598 cli_log_path_plain = os.path.join(args.output_dir_base_path, f"kpex_plain.log") 

-

599 cli_log_path_formatted = os.path.join(args.output_dir_base_path, f"kpex.log") 

-

600 formatter = logging.Formatter('[%(asctime)s] [%(levelname)s] %(message)s') 

-

601 file_handler_plain = register_log_file_handler(cli_log_path_plain, None) 

-

602 file_handler_formatted = register_log_file_handler(cli_log_path_formatted, formatter) 

-

603 try: 

-

604 self.validate_args(args) 

-

605 except Exception: 

-

606 if hasattr(args, 'output_dir_path'): 

-

607 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None) 

-

608 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter) 

-

609 sys.exit(1) 

-

610 reregister_log_file_handler(file_handler_plain, cli_log_path_plain, None) 

-

611 reregister_log_file_handler(file_handler_formatted, cli_log_path_formatted, formatter) 

-

612 

-

613 set_log_level(args.log_level) 

-

614 

-

615 @staticmethod 

-

616 def modification_date(filename: str) -> datetime: 

-

617 t = os.path.getmtime(filename) 

-

618 return datetime.fromtimestamp(t) 

-

619 

-

620 def create_lvsdb(self, args: argparse.Namespace) -> kdb.LayoutVsSchematic: 

-

621 lvsdb = kdb.LayoutVsSchematic() 

-

622 

-

623 match args.input_mode: 

-

624 case InputMode.LVSDB: 

-

625 lvsdb.read(args.lvsdb_path) 

-

626 case InputMode.GDS: 

-

627 lvs_log_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_lvs.log") 

-

628 lvsdb_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}.lvsdb.gz") 

-

629 

-

630 lvs_needed = True 

-

631 

-

632 if os.path.exists(lvsdb_path) and args.cache_lvs: 

-

633 if self.modification_date(lvsdb_path) > self.modification_date(args.gds_path): 

-

634 warning(f"Reusing cached LVSDB at {lvsdb_path}") 

-

635 lvs_needed = False 

-

636 

-

637 if lvs_needed: 

-

638 lvs_runner = LVSRunner() 

-

639 lvs_runner.run_klayout_lvs(exe_path=args.klayout_exe_path, 

-

640 lvs_script=args.lvs_script_path, 

-

641 gds_path=args.effective_gds_path, 

-

642 schematic_path=args.effective_schematic_path, 

-

643 log_path=lvs_log_path, 

-

644 lvsdb_path=lvsdb_path) 

-

645 lvsdb.read(lvsdb_path) 

-

646 return lvsdb 

-

647 

-

648 def main(self, argv: List[str]): 

-

649 info("Called with arguments:") 

-

650 info(' '.join(map(shlex.quote, sys.argv))) 

-

651 

-

652 args = self.parse_args(argv[1:]) 

-

653 

-

654 os.makedirs(args.output_dir_base_path, exist_ok=True) 

-

655 self.setup_logging(args) 

-

656 

-

657 tech_info = TechInfo.from_json(args.tech_pbjson_path, 

-

658 dielectric_filter=args.dielectric_filter) 

-

659 

-

660 if args.run_magic: 

-

661 rule("MAGIC") 

-

662 self.run_magic_extraction(args) 

-

663 

-

664 # no need to run LVS etc if only running magic engine 

-

665 if not (args.run_fastcap or args.run_fastercap or args.run_2_5D): 

-

666 return 

-

667 

-

668 lvsdb = self.create_lvsdb(args) 

-

669 

-

670 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=args.effective_cell_name, 

-

671 lvsdb=lvsdb, 

-

672 tech=tech_info, 

-

673 blackbox_devices=args.blackbox_devices) 

-

674 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_extracted.gds.gz") 

-

675 pex_context.target_layout.write(gds_path) 

-

676 

-

677 gds_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_l2n_internal.gds.gz") 

-

678 pex_context.lvsdb.internal_layout().write(gds_path) 

-

679 

-

680 def dump_layers(cell: str, 

-

681 layers: List[KLayoutExtractedLayerInfo], 

-

682 layout_dump_path: str): 

-

683 layout = kdb.Layout() 

-

684 layout.dbu = lvsdb.internal_layout().dbu 

-

685 

-

686 top_cell = layout.create_cell(cell) 

-

687 for ulyr in layers: 

-

688 li = kdb.LayerInfo(*ulyr.gds_pair) 

-

689 li.name = ulyr.lvs_layer_name 

-

690 layer = layout.insert_layer(li) 

-

691 layout.insert(top_cell.cell_index(), layer, ulyr.region.dup()) 

-

692 

-

693 layout.write(layout_dump_path) 

-

694 

-

695 if len(pex_context.unnamed_layers) >= 1: 

-

696 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_unnamed_LVS_layers.gds.gz") 

-

697 dump_layers(cell=args.effective_cell_name, 

-

698 layers=pex_context.unnamed_layers, 

-

699 layout_dump_path=layout_dump_path) 

-

700 

-

701 if len(pex_context.extracted_layers) >= 1: 

-

702 layout_dump_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_nonempty_LVS_layers.gds.gz") 

-

703 nonempty_layers = [l \ 

-

704 for layers in pex_context.extracted_layers.values() \ 

-

705 for l in layers.source_layers] 

-

706 dump_layers(cell=args.effective_cell_name, 

-

707 layers=nonempty_layers, 

-

708 layout_dump_path=layout_dump_path) 

-

709 else: 

-

710 error("No extracted layers found") 

-

711 sys.exit(1) 

-

712 

-

713 if args.run_fastcap or args.run_fastercap: 

-

714 lst_file = self.build_fastercap_input(args=args, 

-

715 pex_context=pex_context, 

-

716 tech_info=tech_info) 

-

717 if args.run_fastercap: 

-

718 self.run_fastercap_extraction(args=args, 

-

719 pex_context=pex_context, 

-

720 lst_file=lst_file) 

-

721 if args.run_fastcap: 

-

722 self.run_fastcap_extraction(args=args, 

-

723 pex_context=pex_context, 

-

724 lst_file=lst_file) 

-

725 

-

726 if args.run_2_5D: 

-

727 rule("kpex/2.5D PEX Engine") 

-

728 report_path = os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_report.rdb.gz") 

-

729 netlist_csv_path = os.path.abspath(os.path.join(args.output_dir_path, f"{args.effective_cell_name}_k25d_pex_netlist.csv")) 

-

730 

-

731 self._rcx25_extraction_results = self.run_kpex_2_5d_engine( # NOTE: store for test case 

-

732 args=args, 

-

733 pex_context=pex_context, 

-

734 tech_info=tech_info, 

-

735 report_path=report_path, 

-

736 netlist_csv_path=netlist_csv_path 

-

737 ) 

-

738 

-

739 self._rcx25_extracted_csv_path = netlist_csv_path 

-

740 

-

741 @property 

-

742 def rcx25_extraction_results(self) -> ExtractionResults: 

-

743 if not hasattr(self, '_rcx25_extraction_results'): 

-

744 raise Exception('rcx25_extraction_results is not initialized, was run_kpex_2_5d_engine called?') 

-

745 return self._rcx25_extraction_results 

-

746 

-

747 @property 

-

748 def rcx25_extracted_csv_path(self) -> str: 

-

749 if not hasattr(self, '_rcx25_extracted_csv_path'): 

-

750 raise Exception('rcx25_extracted_csv_path is not initialized, was run_kpex_2_5d_engine called?') 

-

751 return self._rcx25_extracted_csv_path 

-

752 

-

753 

-

754if __name__ == "__main__": 

-

755 cli = KpexCLI() 

-

756 cli.main(sys.argv) 

-
- - - diff --git a/pycov/z_31e83241eddb0cfa_tech_info_py.html b/pycov/z_31e83241eddb0cfa_tech_info_py.html deleted file mode 100644 index 700656ed..00000000 --- a/pycov/z_31e83241eddb0cfa_tech_info_py.html +++ /dev/null @@ -1,382 +0,0 @@ - - - - - Coverage for kpex\tech_info.py: 45% - - - - - -
-
-

- Coverage for kpex\tech_info.py: - 45% -

- -

- 157 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26from __future__ import annotations # allow class type hints within same class 

-

27from typing import * 

-

28from functools import cached_property 

-

29import google.protobuf.json_format 

-

30 

-

31from .util.multiple_choice import MultipleChoicePattern 

-

32import tech_pb2 

-

33import process_stack_pb2 

-

34import process_parasitics_pb2 

-

35 

-

36class TechInfo: 

-

37 """Helper class for Protocol Buffer tech_pb2.Technology""" 

-

38 

-

39 GDSPair = Tuple[int, int] 

-

40 

-

41 @staticmethod 

-

42 def parse_tech_def(jsonpb_path: str) -> tech_pb2.Technology: 

-

43 with open(jsonpb_path, 'r') as f: 

-

44 contents = f.read() 

-

45 tech = google.protobuf.json_format.Parse(contents, tech_pb2.Technology()) 

-

46 return tech 

-

47 

-

48 @classmethod 

-

49 def from_json(cls, 

-

50 jsonpb_path: str, 

-

51 dielectric_filter: Optional[MultipleChoicePattern]) -> TechInfo: 

-

52 tech = cls.parse_tech_def(jsonpb_path=jsonpb_path) 

-

53 return TechInfo(tech=tech, 

-

54 dielectric_filter=dielectric_filter) 

-

55 

-

56 def __init__(self, 

-

57 tech: tech_pb2.Technology, 

-

58 dielectric_filter: Optional[MultipleChoicePattern]): 

-

59 self.tech = tech 

-

60 self.dielectric_filter = dielectric_filter or MultipleChoicePattern(pattern='all') 

-

61 

-

62 @cached_property 

-

63 def gds_pair_for_computed_layer_name(self) -> Dict[str, GDSPair]: 

-

64 return {lyr.layer_info.name: (lyr.layer_info.gds_layer, lyr.layer_info.gds_datatype) 

-

65 for lyr in self.tech.lvs_computed_layers} 

-

66 

-

67 @cached_property 

-

68 def computed_layer_info_by_name(self) -> Dict[str, tech_pb2.ComputedLayerInfo]: 

-

69 return {lyr.layer_info.name: lyr for lyr in self.tech.lvs_computed_layers} 

-

70 

-

71 @cached_property 

-

72 def computed_layer_info_by_gds_pair(self) -> Dict[GDSPair, tech_pb2.ComputedLayerInfo]: 

-

73 return { 

-

74 (lyr.layer_info.gds_layer, lyr.layer_info.gds_datatype): lyr 

-

75 for lyr in self.tech.lvs_computed_layers 

-

76 } 

-

77 

-

78 @cached_property 

-

79 def canonical_layer_name_by_gds_pair(self) -> Dict[GDSPair, str]: 

-

80 return { 

-

81 (lyr.layer_info.gds_layer, lyr.layer_info.gds_datatype): lyr.original_layer_name 

-

82 for lyr in self.tech.lvs_computed_layers 

-

83 } 

-

84 

-

85 @cached_property 

-

86 def layer_info_by_name(self) -> Dict[str, tech_pb2.LayerInfo]: 

-

87 return {lyr.name: lyr for lyr in self.tech.layers} 

-

88 

-

89 @cached_property 

-

90 def gds_pair_for_layer_name(self) -> Dict[str, GDSPair]: 

-

91 return {lyr.name: (lyr.gds_layer, lyr.gds_datatype) for lyr in self.tech.layers} 

-

92 

-

93 @cached_property 

-

94 def layer_info_by_gds_pair(self) -> Dict[GDSPair, tech_pb2.LayerInfo]: 

-

95 return {(lyr.gds_layer, lyr.gds_datatype): lyr for lyr in self.tech.layers} 

-

96 

-

97 @cached_property 

-

98 def process_stack_layer_by_name(self) -> Dict[str, process_stack_pb2.ProcessStackInfo.LayerInfo]: 

-

99 return {lyr.name: lyr for lyr in self.tech.process_stack.layers} 

-

100 

-

101 @cached_property 

-

102 def process_stack_layer_by_gds_pair(self) -> Dict[GDSPair, process_stack_pb2.ProcessStackInfo.LayerInfo]: 

-

103 return { 

-

104 (lyr.gds_layer, lyr.gds_datatype): self.process_stack_layer_by_name[lyr.name] 

-

105 for lyr in self.tech.process_stack.layers 

-

106 } 

-

107 

-

108 @cached_property 

-

109 def process_substrate_layer(self) -> process_stack_pb2.ProcessStackInfo.LayerInfo: 

-

110 return list( 

-

111 filter(lambda lyr: lyr.layer_type is process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SUBSTRATE, 

-

112 self.tech.process_stack.layers) 

-

113 )[0] 

-

114 

-

115 @cached_property 

-

116 def process_diffusion_layers(self) -> List[process_stack_pb2.ProcessStackInfo.LayerInfo]: 

-

117 return list( 

-

118 filter(lambda lyr: lyr.layer_type is process_stack_pb2.ProcessStackInfo.LAYER_TYPE_DIFFUSION, 

-

119 self.tech.process_stack.layers) 

-

120 ) 

-

121 

-

122 @cached_property 

-

123 def gate_poly_layer(self) -> process_stack_pb2.ProcessStackInfo.LayerInfo: 

-

124 return self.process_metal_layers[0] 

-

125 

-

126 @cached_property 

-

127 def field_oxide_layer(self) -> process_stack_pb2.ProcessStackInfo.LayerInfo: 

-

128 return list( 

-

129 filter(lambda lyr: lyr.layer_type is process_stack_pb2.ProcessStackInfo.LAYER_TYPE_FIELD_OXIDE, 

-

130 self.tech.process_stack.layers) 

-

131 )[0] 

-

132 

-

133 @cached_property 

-

134 def process_metal_layers(self) -> List[process_stack_pb2.ProcessStackInfo.LayerInfo]: 

-

135 return list( 

-

136 filter(lambda lyr: lyr.layer_type == process_stack_pb2.ProcessStackInfo.LAYER_TYPE_METAL, 

-

137 self.tech.process_stack.layers) 

-

138 ) 

-

139 

-

140 @cached_property 

-

141 def filtered_dielectric_layers(self) -> List[process_stack_pb2.ProcessStackInfo.LayerInfo]: 

-

142 layers = [] 

-

143 for pl in self.tech.process_stack.layers: 

-

144 match pl.layer_type: 

-

145 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIMPLE_DIELECTRIC | \ 

-

146 process_stack_pb2.ProcessStackInfo.LAYER_TYPE_CONFORMAL_DIELECTRIC | \ 

-

147 process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIDEWALL_DIELECTRIC: 

-

148 if self.dielectric_filter.is_included(pl.name): 

-

149 layers.append(pl) 

-

150 return layers 

-

151 

-

152 @cached_property 

-

153 def dielectric_by_name(self) -> Dict[str, float]: 

-

154 diel_by_name = {} 

-

155 for pl in self.filtered_dielectric_layers: 

-

156 match pl.layer_type: 

-

157 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIMPLE_DIELECTRIC: 

-

158 diel_by_name[pl.name] = pl.simple_dielectric_layer.dielectric_k 

-

159 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_CONFORMAL_DIELECTRIC: 

-

160 diel_by_name[pl.name] = pl.conformal_dielectric_layer.dielectric_k 

-

161 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIDEWALL_DIELECTRIC: 

-

162 diel_by_name[pl.name] = pl.sidewall_dielectric_layer.dielectric_k 

-

163 return diel_by_name 

-

164 

-

165 def sidewall_dielectric_layer(self, layer_name: str) -> Optional[process_stack_pb2.ProcessStackInfo.LayerInfo]: 

-

166 found_layers: List[process_stack_pb2.ProcessStackInfo.LayerInfo] = [] 

-

167 for lyr in self.filtered_dielectric_layers: 

-

168 match lyr.layer_type: 

-

169 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIDEWALL_DIELECTRIC: 

-

170 if lyr.sidewall_dielectric_layer.reference == layer_name: 

-

171 found_layers.append(lyr) 

-

172 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_CONFORMAL_DIELECTRIC: 

-

173 if lyr.conformal_dielectric_layer.reference == layer_name: 

-

174 found_layers.append(lyr) 

-

175 case _: 

-

176 continue 

-

177 

-

178 if len(found_layers) == 0: 

-

179 return None 

-

180 if len(found_layers) >= 2: 

-

181 raise Exception(f"found multiple sidewall dielectric layers for {layer_name}") 

-

182 return found_layers[0] 

-

183 

-

184 def simple_dielectric_above_metal(self, layer_name: str) -> Tuple[Optional[process_stack_pb2.ProcessStackInfo.LayerInfo], float]: 

-

185 """ 

-

186 Returns a tuple of the dielectric layer and it's (maximum) height. 

-

187 Maximum would be the case where no metal and other dielectrics are present. 

-

188 """ 

-

189 found_layer: Optional[process_stack_pb2.ProcessStackInfo.LayerInfo] = None 

-

190 diel_lyr: Optional[process_stack_pb2.ProcessStackInfo.LayerInfo] = None 

-

191 for lyr in self.tech.process_stack.layers: 

-

192 if lyr.name == layer_name: 

-

193 found_layer = lyr 

-

194 elif found_layer: 

-

195 if not diel_lyr and lyr.layer_type == process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIMPLE_DIELECTRIC: 

-

196 if not self.dielectric_filter.is_included(lyr.name): 

-

197 return None, 0.0 

-

198 diel_lyr = lyr 

-

199 # search for next metal or end of stack 

-

200 if lyr.layer_type == process_stack_pb2.ProcessStackInfo.LAYER_TYPE_METAL: 

-

201 return diel_lyr, lyr.metal_layer.height - found_layer.metal_layer.height 

-

202 return diel_lyr, 5.0 # air TODO 

-

203 

-

204 @cached_property 

-

205 def substrate_cap_by_layer_name(self) -> Dict[str, process_parasitics_pb2.CapacitanceInfo.SubstrateCapacitance]: 

-

206 return {sc.layer_name: sc for sc in self.tech.process_parasitics.capacitance.substrates} 

-

207 

-

208 @cached_property 

-

209 def overlap_cap_by_layer_names(self) -> Dict[str, Dict[str, process_parasitics_pb2.CapacitanceInfo.OverlapCapacitance]]: 

-

210 """ 

-

211 usage: dict[top_layer_name][bottom_layer_name] 

-

212 """ 

-

213 

-

214 def convert_substrate_to_overlap_cap(sc: process_parasitics_pb2.CapacitanceInfo.SubstrateCapacitance) \ 

-

215 -> process_parasitics_pb2.CapacitanceInfo.OverlapCapacitance: 

-

216 oc = process_parasitics_pb2.CapacitanceInfo.OverlapCapacitance() 

-

217 oc.top_layer_name = sc.layer_name 

-

218 oc.bottom_layer_name = self.internal_substrate_layer_name 

-

219 oc.capacitance = sc.area_capacitance 

-

220 return oc 

-

221 

-

222 d = { 

-

223 ln: { 

-

224 self.internal_substrate_layer_name: convert_substrate_to_overlap_cap(sc) 

-

225 } for ln, sc in self.substrate_cap_by_layer_name.items() 

-

226 } 

-

227 

-

228 d2 = { 

-

229 oc.top_layer_name: { 

-

230 oc_bot.bottom_layer_name: oc_bot 

-

231 for oc_bot in self.tech.process_parasitics.capacitance.overlaps if oc_bot.top_layer_name == oc.top_layer_name 

-

232 } 

-

233 for oc in self.tech.process_parasitics.capacitance.overlaps 

-

234 } 

-

235 

-

236 for k1, ve in d2.items(): 

-

237 for k2, v in ve.items(): 

-

238 if k1 not in d: 

-

239 d[k1] = {k2: v} 

-

240 else: 

-

241 d[k1][k2] = v 

-

242 return d 

-

243 

-

244 @cached_property 

-

245 def sidewall_cap_by_layer_name(self) -> Dict[str, process_parasitics_pb2.CapacitanceInfo.SidewallCapacitance]: 

-

246 return {sc.layer_name: sc for sc in self.tech.process_parasitics.capacitance.sidewalls} 

-

247 

-

248 @classmethod 

-

249 @property 

-

250 def internal_substrate_layer_name(cls) -> str: 

-

251 return 'VSUBS' 

-

252 

-

253 @cached_property 

-

254 def side_overlap_cap_by_layer_names(self) -> Dict[str, Dict[str, process_parasitics_pb2.CapacitanceInfo.SideOverlapCapacitance]]: 

-

255 """ 

-

256 usage: dict[in_layer_name][out_layer_name] 

-

257 """ 

-

258 

-

259 def convert_substrate_to_side_overlap_cap(sc: process_parasitics_pb2.CapacitanceInfo.SubstrateCapacitance) \ 

-

260 -> process_parasitics_pb2.CapacitanceInfo.SideOverlapCapacitance: 

-

261 soc = process_parasitics_pb2.CapacitanceInfo.SideOverlapCapacitance() 

-

262 soc.in_layer_name = sc.layer_name 

-

263 soc.out_layer_name = self.internal_substrate_layer_name 

-

264 soc.capacitance = sc.perimeter_capacitance 

-

265 return soc 

-

266 

-

267 d = { 

-

268 ln: { 

-

269 self.internal_substrate_layer_name: convert_substrate_to_side_overlap_cap(sc) 

-

270 } for ln, sc in self.substrate_cap_by_layer_name.items() 

-

271 } 

-

272 

-

273 d2 = { 

-

274 oc.in_layer_name: { 

-

275 oc_bot.out_layer_name: oc_bot 

-

276 for oc_bot in self.tech.process_parasitics.capacitance.sideoverlaps if oc_bot.in_layer_name == oc.in_layer_name 

-

277 } 

-

278 for oc in self.tech.process_parasitics.capacitance.sideoverlaps 

-

279 } 

-

280 

-

281 for k1, ve in d2.items(): 

-

282 for k2, v in ve.items(): 

-

283 d[k1][k2] = v 

-

284 

-

285 return d 

-
- - - diff --git a/pycov/z_31e83241eddb0cfa_version_py.html b/pycov/z_31e83241eddb0cfa_version_py.html deleted file mode 100644 index b6dea939..00000000 --- a/pycov/z_31e83241eddb0cfa_version_py.html +++ /dev/null @@ -1,121 +0,0 @@ - - - - - Coverage for kpex\version.py: 100% - - - - - -
-
-

- Coverage for kpex\version.py: - 100% -

- -

- 1 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24__version__ = 0.1 

-
- - - diff --git a/pycov/z_45b499fe6cab3296___init___py.html b/pycov/z_45b499fe6cab3296___init___py.html deleted file mode 100644 index e0c1c3d5..00000000 --- a/pycov/z_45b499fe6cab3296___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for tests/fastercap/__init__.py: 100% - - - - - -
-
-

- Coverage for tests/fastercap/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_45b499fe6cab3296_fastercap_model_generator_test_py.html b/pycov/z_45b499fe6cab3296_fastercap_model_generator_test_py.html deleted file mode 100644 index 768e0ffb..00000000 --- a/pycov/z_45b499fe6cab3296_fastercap_model_generator_test_py.html +++ /dev/null @@ -1,201 +0,0 @@ - - - - - Coverage for tests/fastercap/fastercap_model_generator_test.py: 100% - - - - - -
-
-

- Coverage for tests/fastercap/fastercap_model_generator_test.py: - 100% -

- -

- 60 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import klayout.db as kdb 

-

27from kpex.fastercap.fastercap_model_generator import FasterCapModelBuilder 

-

28 

-

29 

-

30@allure.parent_suite("Unit Tests") 

-

31@allure.tag("Capacitance", "FasterCap") 

-

32def test_fastercap_model_generator(tmp_path): 

-

33 ly = kdb.Layout() 

-

34 net1 = ly.create_cell("Net1") 

-

35 net2 = ly.create_cell("Net2") 

-

36 net_cells = [net1, net2] 

-

37 

-

38 top = ly.create_cell("TOP") 

-

39 for cell in net_cells: 

-

40 top.insert(kdb.CellInstArray(cell.cell_index(), kdb.Trans())) 

-

41 

-

42 m1 = ly.layer(1, 0) # metal1 

-

43 v1 = ly.layer(2, 0) # via1 

-

44 m2 = ly.layer(3, 0) # metal3 

-

45 

-

46 net1.shapes(m1).insert(kdb.Box(0, 0, 2000, 500)) 

-

47 net1.shapes(v1).insert(kdb.Box(1600, 100, 1900, 400)) 

-

48 net1.shapes(m2).insert(kdb.Box(1500, 0, 3000, 500)) 

-

49 

-

50 net2.shapes(m1).insert(kdb.Box(-1000, 0, -600, 400)) 

-

51 net2.shapes(v1).insert(kdb.Box(-900, 100, -700, 300)) 

-

52 net2.shapes(m2).insert(kdb.Box(-1000, 0, -600, 400)) 

-

53 

-

54 fcm = FasterCapModelBuilder(dbu=ly.dbu, 

-

55 k_void=3.5, 

-

56 delaunay_amax=0.5, 

-

57 delaunay_b=0.5) 

-

58 

-

59 fcm.add_material(name='nit', k=4.5) 

-

60 fcm.add_material(name='nit2', k=7) 

-

61 

-

62 z = 0.0 

-

63 h = 0.5 

-

64 hnit = 0.1 

-

65 

-

66 layer = m1 

-

67 for cell in net_cells: 

-

68 nn = cell.name 

-

69 r = kdb.Region(cell.begin_shapes_rec(layer)) 

-

70 fcm.add_conductor(net_name=nn, layer=r, z=z, height=h) 

-

71 rnit = r.sized(100) 

-

72 fcm.add_dielectric(material_name='nit', layer=rnit, z=z, height=h + hnit) 

-

73 rnit2 = r.sized(200) 

-

74 fcm.add_dielectric(material_name='nit2', layer=rnit2, z=z, height=h + hnit) 

-

75 

-

76 z += h 

-

77 

-

78 layer = v1 

-

79 for cell in net_cells: 

-

80 nn = cell.name 

-

81 r = kdb.Region(cell.begin_shapes_rec(layer)) 

-

82 fcm.add_conductor(net_name=nn, layer=r, z=z, height=h) 

-

83 

-

84 z += h 

-

85 

-

86 layer = m2 

-

87 for cell in net_cells: 

-

88 nn = cell.name 

-

89 r = kdb.Region(cell.begin_shapes_rec(layer)) 

-

90 fcm.add_conductor(net_name=nn, layer=r, z=z, height=h) 

-

91 rnit = r.sized(100) 

-

92 fcm.add_dielectric(material_name='nit', layer=rnit, z=z, height=h + hnit) 

-

93 

-

94 gen = fcm.generate() 

-

95 

-

96 # self-check 

-

97 gen.check() 

-

98 

-

99 output_dir_path_fc = os.path.join(tmp_path, 'FasterCap') 

-

100 output_dir_path_stl = os.path.join(tmp_path, 'STL') 

-

101 os.makedirs(output_dir_path_fc) 

-

102 os.makedirs(output_dir_path_stl) 

-

103 gen.write_fastcap(prefix='test', output_dir_path=output_dir_path_fc) 

-

104 gen.dump_stl(prefix='test', output_dir_path=output_dir_path_stl) 

-
- - - diff --git a/pycov/z_45b499fe6cab3296_fastercap_runner_test_py.html b/pycov/z_45b499fe6cab3296_fastercap_runner_test_py.html deleted file mode 100644 index bc430f00..00000000 --- a/pycov/z_45b499fe6cab3296_fastercap_runner_test_py.html +++ /dev/null @@ -1,149 +0,0 @@ - - - - - Coverage for tests/fastercap/fastercap_runner_test.py: 100% - - - - - -
-
-

- Coverage for tests/fastercap/fastercap_runner_test.py: - 100% -

- -

- 21 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import unittest 

-

27 

-

28from kpex.fastercap.fastercap_runner import fastercap_parse_capacitance_matrix 

-

29 

-

30 

-

31@allure.parent_suite("Unit Tests") 

-

32@allure.tag("Capacitance", "FasterCap") 

-

33class Test(unittest.TestCase): 

-

34 @property 

-

35 def fastercap_testdata_dir(self) -> str: 

-

36 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 'testdata', 'fastercap')) 

-

37 

-

38 def test_fastercap_parse_capacitance_matrix(self): 

-

39 testdata_path = os.path.join(self.fastercap_testdata_dir, 'nmos_diode2_FasterCap_Output.txt') 

-

40 obtained_matrix = fastercap_parse_capacitance_matrix(log_path=testdata_path) 

-

41 self.assertEqual(3, len(obtained_matrix.rows)) 

-

42 self.assertEqual(3, len(obtained_matrix.rows[0])) 

-

43 self.assertEqual(3, len(obtained_matrix.rows[1])) 

-

44 self.assertEqual(3, len(obtained_matrix.rows[2])) 

-

45 self.assertEqual( 

-

46 ['g1_VSUBS', 'g1_VDD', 'g1_VSS'], 

-

47 obtained_matrix.conductor_names 

-

48 ) 

-

49 

-

50 output_path = os.path.join(self.fastercap_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

51 obtained_matrix.write_csv(output_path=output_path, separator=';') 

-

52 allure.attach.file(output_path, attachment_type=allure.attachment_type.CSV) 

-
- - - diff --git a/pycov/z_4832265eea321c21___init___py.html b/pycov/z_4832265eea321c21___init___py.html deleted file mode 100644 index 1964a74f..00000000 --- a/pycov/z_4832265eea321c21___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex/magic/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/magic/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_4832265eea321c21_magic_runner_py.html b/pycov/z_4832265eea321c21_magic_runner_py.html deleted file mode 100644 index 253bbf73..00000000 --- a/pycov/z_4832265eea321c21_magic_runner_py.html +++ /dev/null @@ -1,250 +0,0 @@ - - - - - Coverage for kpex/magic/magic_runner.py: 34% - - - - - -
-
-

- Coverage for kpex/magic/magic_runner.py: - 34% -

- -

- 44 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from enum import StrEnum 

-

25import re 

-

26import time 

-

27from typing import * 

-

28 

-

29import os 

-

30import subprocess 

-

31import unittest 

-

32 

-

33from kpex.log import ( 

-

34 info, 

-

35 # warning, 

-

36 rule, 

-

37 subproc, 

-

38) 

-

39from kpex.version import __version__ 

-

40 

-

41 

-

42class MagicPEXMode(StrEnum): 

-

43 CC = "CC" 

-

44 RC = "RC" 

-

45 DEFAULT = "CC" 

-

46 

-

47 

-

48def prepare_magic_script(gds_path: str, 

-

49 cell_name: str, 

-

50 run_dir_path: str, 

-

51 script_path: str, 

-

52 output_netlist_path: str, 

-

53 pex_mode: MagicPEXMode, 

-

54 c_threshold: float, 

-

55 r_threshold: float, 

-

56 halo: Optional[float]): 

-

57 gds_path = os.path.abspath(gds_path) 

-

58 run_dir_path = os.path.abspath(run_dir_path) 

-

59 output_netlist_path = os.path.abspath(output_netlist_path) 

-

60 

-

61 halo_scale = 200.0 

-

62 

-

63 script: str = "" 

-

64 match pex_mode: 

-

65 case MagicPEXMode.CC: 

-

66 script = f"""# Generated by kpex {__version__} 

-

67crashbackups stop 

-

68drc off 

-

69gds read {gds_path} 

-

70load {cell_name} 

-

71select top cell 

-

72flatten {cell_name}_flat 

-

73load {cell_name}_flat 

-

74cellname delete {cell_name} -noprompt 

-

75cellname rename {cell_name}_flat {cell_name} 

-

76select top cell 

-

77extract path {run_dir_path}{'' if halo is None else f"\nextract halo {round(halo * halo_scale)}"} 

-

78extract all 

-

79ext2spice cthresh {c_threshold} 

-

80ext2spice format ngspice 

-

81ext2spice -p {run_dir_path} -o {output_netlist_path} 

-

82quit -noprompt""" 

-

83 case MagicPEXMode.RC: 

-

84 script = f"""# Generated by kpex {__version__} 

-

85crashbackups stop 

-

86drc off 

-

87gds read {gds_path} 

-

88load {cell_name} 

-

89select top cell 

-

90flatten {cell_name}_flat 

-

91load {cell_name}_flat 

-

92cellname delete {cell_name} -noprompt 

-

93cellname rename {cell_name}_flat {cell_name} 

-

94select top cell 

-

95extract path {run_dir_path}{'' if halo is None else f"\nextract halo {round(halo * halo_scale)}"} 

-

96extract do resistance 

-

97extract all 

-

98ext2sim labels on 

-

99ext2sim 

-

100extresist tolerance {r_threshold} 

-

101extresist all 

-

102ext2spice cthresh {c_threshold} 

-

103ext2spice rthresh {r_threshold} 

-

104ext2spice extresist on 

-

105ext2spice format ngspice 

-

106ext2spice -p {run_dir_path} -o {output_netlist_path} 

-

107quit -noprompt 

-

108""" 

-

109 with open(script_path, 'w') as f: 

-

110 f.write(script) 

-

111 

-

112def run_magic(exe_path: str, 

-

113 magicrc_path: str, 

-

114 script_path: str, 

-

115 log_path: str): 

-

116 args = [ 

-

117 exe_path, 

-

118 '-dnull', # 

-

119 '-noconsole', # 

-

120 '-rcfile', # 

-

121 magicrc_path, # 

-

122 script_path, # TCL script 

-

123 ] 

-

124 

-

125 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

126 

-

127 start = time.time() 

-

128 

-

129 proc = subprocess.Popen(args, 

-

130 stdin=subprocess.DEVNULL, 

-

131 stdout=subprocess.PIPE, 

-

132 stderr=subprocess.STDOUT, 

-

133 universal_newlines=True, 

-

134 text=True) 

-

135 with open(log_path, 'w') as f: 

-

136 while True: 

-

137 line = proc.stdout.readline() 

-

138 if not line: 

-

139 break 

-

140 subproc(line[:-1]) # remove newline 

-

141 f.writelines([line]) 

-

142 proc.wait() 

-

143 

-

144 duration = time.time() - start 

-

145 

-

146 rule() 

-

147 

-

148 if proc.returncode == 0: 

-

149 info(f"MAGIC succeeded after {'%.4g' % duration}s") 

-

150 else: 

-

151 raise Exception(f"MAGIC failed with status code {proc.returncode} after {'%.4g' % duration}s", 

-

152 f"see log file: {log_path}") 

-

153 

-
- - - diff --git a/pycov/z_48f13015c956926b___init___py.html b/pycov/z_48f13015c956926b___init___py.html deleted file mode 100644 index 91ceef69..00000000 --- a/pycov/z_48f13015c956926b___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for tests\fastercap\__init__.py: 100% - - - - - -
-
-

- Coverage for tests\fastercap\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_48f13015c956926b_fastercap_model_generator_test_py.html b/pycov/z_48f13015c956926b_fastercap_model_generator_test_py.html deleted file mode 100644 index 11507236..00000000 --- a/pycov/z_48f13015c956926b_fastercap_model_generator_test_py.html +++ /dev/null @@ -1,201 +0,0 @@ - - - - - Coverage for tests\fastercap\fastercap_model_generator_test.py: 98% - - - - - -
-
-

- Coverage for tests\fastercap\fastercap_model_generator_test.py: - 98% -

- -

- 60 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import klayout.db as kdb 

-

27from kpex.fastercap.fastercap_model_generator import FasterCapModelBuilder 

-

28 

-

29 

-

30@allure.parent_suite("Unit Tests") 

-

31@allure.tag("Capacitance", "FasterCap") 

-

32def test_fastercap_model_generator(tmp_path): 

-

33 ly = kdb.Layout() 

-

34 net1 = ly.create_cell("Net1") 

-

35 net2 = ly.create_cell("Net2") 

-

36 net_cells = [net1, net2] 

-

37 

-

38 top = ly.create_cell("TOP") 

-

39 for cell in net_cells: 

-

40 top.insert(kdb.CellInstArray(cell.cell_index(), kdb.Trans())) 

-

41 

-

42 m1 = ly.layer(1, 0) # metal1 

-

43 v1 = ly.layer(2, 0) # via1 

-

44 m2 = ly.layer(3, 0) # metal3 

-

45 

-

46 net1.shapes(m1).insert(kdb.Box(0, 0, 2000, 500)) 

-

47 net1.shapes(v1).insert(kdb.Box(1600, 100, 1900, 400)) 

-

48 net1.shapes(m2).insert(kdb.Box(1500, 0, 3000, 500)) 

-

49 

-

50 net2.shapes(m1).insert(kdb.Box(-1000, 0, -600, 400)) 

-

51 net2.shapes(v1).insert(kdb.Box(-900, 100, -700, 300)) 

-

52 net2.shapes(m2).insert(kdb.Box(-1000, 0, -600, 400)) 

-

53 

-

54 fcm = FasterCapModelBuilder(dbu=ly.dbu, 

-

55 k_void=3.5, 

-

56 delaunay_amax=0.5, 

-

57 delaunay_b=0.5) 

-

58 

-

59 fcm.add_material(name='nit', k=4.5) 

-

60 fcm.add_material(name='nit2', k=7) 

-

61 

-

62 z = 0.0 

-

63 h = 0.5 

-

64 hnit = 0.1 

-

65 

-

66 layer = m1 

-

67 for cell in net_cells: 

-

68 nn = cell.name 

-

69 r = kdb.Region(cell.begin_shapes_rec(layer)) 

-

70 fcm.add_conductor(net_name=nn, layer=r, z=z, height=h) 

-

71 rnit = r.sized(100) 

-

72 fcm.add_dielectric(material_name='nit', layer=rnit, z=z, height=h + hnit) 

-

73 rnit2 = r.sized(200) 

-

74 fcm.add_dielectric(material_name='nit2', layer=rnit2, z=z, height=h + hnit) 

-

75 

-

76 z += h 

-

77 

-

78 layer = v1 

-

79 for cell in net_cells: 

-

80 nn = cell.name 

-

81 r = kdb.Region(cell.begin_shapes_rec(layer)) 

-

82 fcm.add_conductor(net_name=nn, layer=r, z=z, height=h) 

-

83 

-

84 z += h 

-

85 

-

86 layer = m2 

-

87 for cell in net_cells: 

-

88 nn = cell.name 

-

89 r = kdb.Region(cell.begin_shapes_rec(layer)) 

-

90 fcm.add_conductor(net_name=nn, layer=r, z=z, height=h) 

-

91 rnit = r.sized(100) 

-

92 fcm.add_dielectric(material_name='nit', layer=rnit, z=z, height=h + hnit) 

-

93 

-

94 gen = fcm.generate() 

-

95 

-

96 # self-check 

-

97 gen.check() 

-

98 

-

99 output_dir_path_fc = os.path.join(tmp_path, 'FasterCap') 

-

100 output_dir_path_stl = os.path.join(tmp_path, 'STL') 

-

101 os.makedirs(output_dir_path_fc) 

-

102 os.makedirs(output_dir_path_stl) 

-

103 gen.write_fastcap(prefix='test', output_dir_path=output_dir_path_fc) 

-

104 gen.dump_stl(prefix='test', output_dir_path=output_dir_path_stl) 

-
- - - diff --git a/pycov/z_48f13015c956926b_fastercap_runner_test_py.html b/pycov/z_48f13015c956926b_fastercap_runner_test_py.html deleted file mode 100644 index 47413f38..00000000 --- a/pycov/z_48f13015c956926b_fastercap_runner_test_py.html +++ /dev/null @@ -1,149 +0,0 @@ - - - - - Coverage for tests\fastercap\fastercap_runner_test.py: 100% - - - - - -
-
-

- Coverage for tests\fastercap\fastercap_runner_test.py: - 100% -

- -

- 21 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import unittest 

-

27 

-

28from kpex.fastercap.fastercap_runner import fastercap_parse_capacitance_matrix 

-

29 

-

30 

-

31@allure.parent_suite("Unit Tests") 

-

32@allure.tag("Capacitance", "FasterCap") 

-

33class Test(unittest.TestCase): 

-

34 @property 

-

35 def fastercap_testdata_dir(self) -> str: 

-

36 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 'testdata', 'fastercap')) 

-

37 

-

38 def test_fastercap_parse_capacitance_matrix(self): 

-

39 testdata_path = os.path.join(self.fastercap_testdata_dir, 'nmos_diode2_FasterCap_Output.txt') 

-

40 obtained_matrix = fastercap_parse_capacitance_matrix(log_path=testdata_path) 

-

41 self.assertEqual(3, len(obtained_matrix.rows)) 

-

42 self.assertEqual(3, len(obtained_matrix.rows[0])) 

-

43 self.assertEqual(3, len(obtained_matrix.rows[1])) 

-

44 self.assertEqual(3, len(obtained_matrix.rows[2])) 

-

45 self.assertEqual( 

-

46 ['g1_VSUBS', 'g1_VDD', 'g1_VSS'], 

-

47 obtained_matrix.conductor_names 

-

48 ) 

-

49 

-

50 output_path = os.path.join(self.fastercap_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

51 obtained_matrix.write_csv(output_path=output_path, separator=';') 

-

52 allure.attach.file(output_path, attachment_type=allure.attachment_type.CSV) 

-
- - - diff --git a/pycov/z_4c73bcae445d81c6___init___py.html b/pycov/z_4c73bcae445d81c6___init___py.html deleted file mode 100644 index f02b503b..00000000 --- a/pycov/z_4c73bcae445d81c6___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for tests\common\__init__.py: 100% - - - - - -
-
-

- Coverage for tests\common\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_4c73bcae445d81c6_capacitance_matrix_test_py.html b/pycov/z_4c73bcae445d81c6_capacitance_matrix_test_py.html deleted file mode 100644 index c9ce8b98..00000000 --- a/pycov/z_4c73bcae445d81c6_capacitance_matrix_test_py.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Coverage for tests\common\capacitance_matrix_test.py: 100% - - - - - -
-
-

- Coverage for tests\common\capacitance_matrix_test.py: - 100% -

- -

- 36 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import allure 

-

27import os 

-

28import tempfile 

-

29from typing import * 

-

30import unittest 

-

31 

-

32from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

33 

-

34 

-

35@allure.parent_suite("Unit Tests") 

-

36@allure.tag("Capacitance", "FasterCap") 

-

37class Test(unittest.TestCase): 

-

38 @property 

-

39 def klayout_testdata_dir(self) -> str: 

-

40 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

41 'testdata', 'fastercap')) 

-

42 

-

43 def test_parse_csv(self): 

-

44 csv_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

45 parsed_matrix = CapacitanceMatrix.parse_csv(path=csv_path, separator=';') 

-

46 self.assertEqual(3, len(parsed_matrix.rows)) 

-

47 self.assertEqual(3, len(parsed_matrix.rows[0])) 

-

48 self.assertEqual(3, len(parsed_matrix.rows[1])) 

-

49 self.assertEqual(3, len(parsed_matrix.rows[2])) 

-

50 self.assertEqual( 

-

51 ['g1_VSUBS', 'g1_VDD', 'g1_VSS'], 

-

52 parsed_matrix.conductor_names 

-

53 ) 

-

54 

-

55 def test_write_csv(self): 

-

56 csv_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

57 parsed_matrix = CapacitanceMatrix.parse_csv(path=csv_path, separator=';') 

-

58 out_path = tempfile.mktemp(prefix='fastercap_matrix_raw__', suffix='.csv') 

-

59 parsed_matrix.write_csv(output_path=out_path, separator=';') 

-

60 parsed_matrix2 = CapacitanceMatrix.parse_csv(path=out_path, separator=';') 

-

61 self.assertEqual(parsed_matrix, parsed_matrix2) 

-

62 

-

63 def test_averaged_off_diagonals(self): 

-

64 csv_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

65 parsed_matrix = CapacitanceMatrix.parse_csv(path=csv_path, separator=';') 

-

66 avg_matrix = parsed_matrix.averaged_off_diagonals() 

-

67 out_path = tempfile.mktemp(prefix='fastercap_matrix_avg__', suffix='.csv') 

-

68 avg_matrix.write_csv(output_path=out_path, separator=';') 

-

69 allure.attach.file(out_path, attachment_type=allure.attachment_type.CSV) 

-

70 print(f"averaged matrix stored in {out_path}") 

-
- - - diff --git a/pycov/z_52482777700ec44a___init___py.html b/pycov/z_52482777700ec44a___init___py.html deleted file mode 100644 index 891b7863..00000000 --- a/pycov/z_52482777700ec44a___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex\util\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\util\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_52482777700ec44a_argparse_helpers_py.html b/pycov/z_52482777700ec44a_argparse_helpers_py.html deleted file mode 100644 index c318d236..00000000 --- a/pycov/z_52482777700ec44a_argparse_helpers_py.html +++ /dev/null @@ -1,149 +0,0 @@ - - - - - Coverage for kpex\util\argparse_helpers.py: 70% - - - - - -
-
-

- Coverage for kpex\util\argparse_helpers.py: - 70% -

- -

- 20 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26import argparse 

-

27from enum import Enum 

-

28from typing import * 

-

29 

-

30 

-

31def render_enum_help(topic: str, 

-

32 enum_cls: Type[Enum], 

-

33 print_default: bool = True) -> str: 

-

34 if not hasattr(enum_cls, 'DEFAULT'): 

-

35 raise ValueError("Enum must declare case 'DEFAULT'") 

-

36 enum_help = f"{topic}{set([name.lower() for name, member in enum_cls.__members__.items()])}" 

-

37 if print_default: 

-

38 enum_help += f".\nDefaults to '{enum_cls.DEFAULT.name.lower()}'" 

-

39 return enum_help 

-

40 

-

41 

-

42def true_or_false(arg) -> bool: 

-

43 if isinstance(arg, bool): 

-

44 return arg 

-

45 

-

46 match str(arg).lower(): 

-

47 case 'yes' | 'true' | 't' | 'y' | 1: 

-

48 return True 

-

49 case 'no' | 'false' | 'f' | 'n' | 0: 

-

50 return False 

-

51 case _: 

-

52 raise argparse.ArgumentTypeError('Boolean value expected.') 

-
- - - diff --git a/pycov/z_52482777700ec44a_multiple_choice_py.html b/pycov/z_52482777700ec44a_multiple_choice_py.html deleted file mode 100644 index a02bad79..00000000 --- a/pycov/z_52482777700ec44a_multiple_choice_py.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Coverage for kpex\util\multiple_choice.py: 61% - - - - - -
-
-

- Coverage for kpex\util\multiple_choice.py: - 61% -

- -

- 28 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25from functools import cached_property 

-

26from typing import * 

-

27 

-

28 

-

29class MultipleChoicePattern: 

-

30 def __init__(self, pattern: str): 

-

31 """ 

-

32 Multiple Choice pattern, allows blacklisting and whitelisting. 

-

33 For example, given a list of dielectric, let the user decide which of them to include or exclude. 

-

34 Allowed patterns: 

-

35 - all (default): complete list of choices included 

-

36 - none: no choices included at all 

-

37 - +dielname: include choice named 'dielname' 

-

38 - -dielname: exclude choice named 'dielname' 

-

39 Examples: 

-

40 - all,-nild5,-nild6 

-

41 - include all dielectrics except nild5 and nild6 

-

42 - none,+nild5,+capild 

-

43 - include only dielectrics named nild5 and capild 

-

44 """ 

-

45 self.pattern = pattern 

-

46 

-

47 components = pattern.split(sep=',') 

-

48 components = [c.lower().strip() for c in components] 

-

49 self.has_all = 'all' in components 

-

50 self.has_none = 'none' in components 

-

51 self.included = [c[1:] for c in components if c.startswith('+')] 

-

52 self.excluded = [c[1:] for c in components if c.startswith('-')] 

-

53 if self.has_none and self.has_all: 

-

54 raise ValueError("Multiple choice pattern can't have both subpatterns all and none") 

-

55 if self.has_none and len(self.excluded) >= 1: 

-

56 raise ValueError("Multiple choice pattern based on none can only have inclusive (+) subpatterns") 

-

57 if self.has_all and len(self.included) >= 1: 

-

58 raise ValueError("Multiple choice pattern based on all can only have exclusive (-) subpatterns") 

-

59 

-

60 def filter(self, choices: List[str]) -> List[str]: 

-

61 if self.has_all: 

-

62 return [c for c in choices if c not in self.excluded] 

-

63 return [c for c in choices if c in self.included] 

-

64 

-

65 def is_included(self, choice: str) -> bool: 

-

66 if self.has_none: 

-

67 return choice in self.included 

-

68 if self.has_all: 

-

69 return choice not in self.excluded 

-

70 return False 

-
- - - diff --git a/pycov/z_588bb9d7e9b47fd3___init___py.html b/pycov/z_588bb9d7e9b47fd3___init___py.html deleted file mode 100644 index 31994e67..00000000 --- a/pycov/z_588bb9d7e9b47fd3___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex\rcx25\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\rcx25\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_588bb9d7e9b47fd3_extraction_results_py.html b/pycov/z_588bb9d7e9b47fd3_extraction_results_py.html deleted file mode 100644 index d4cd21ab..00000000 --- a/pycov/z_588bb9d7e9b47fd3_extraction_results_py.html +++ /dev/null @@ -1,261 +0,0 @@ - - - - - Coverage for kpex\rcx25\extraction_results.py: 81% - - - - - -
-
-

- Coverage for kpex\rcx25\extraction_results.py: - 81% -

- -

- 91 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25from collections import defaultdict 

-

26from dataclasses import dataclass, field 

-

27from typing import * 

-

28 

-

29import process_parasitics_pb2 

-

30 

-

31 

-

32NetName = str 

-

33LayerName = str 

-

34CellName = str 

-

35 

-

36 

-

37@dataclass 

-

38class NodeRegion: 

-

39 layer_name: LayerName 

-

40 net_name: NetName 

-

41 cap_to_gnd: float 

-

42 perimeter: float 

-

43 area: float 

-

44 

-

45 

-

46@dataclass(frozen=True) 

-

47class SidewallKey: 

-

48 layer: LayerName 

-

49 net1: NetName 

-

50 net2: NetName 

-

51 

-

52 

-

53@dataclass 

-

54class SidewallCap: # see Magic EdgeCap, extractInt.c L444 

-

55 key: SidewallKey 

-

56 cap_value: float # femto farad 

-

57 distance: float # distance in µm 

-

58 length: float # length in µm 

-

59 tech_spec: process_parasitics_pb2.CapacitanceInfo.SidewallCapacitance 

-

60 

-

61 

-

62@dataclass(frozen=True) 

-

63class OverlapKey: 

-

64 layer_top: LayerName 

-

65 net_top: NetName 

-

66 layer_bot: LayerName 

-

67 net_bot: NetName 

-

68 

-

69 

-

70@dataclass 

-

71class OverlapCap: 

-

72 key: OverlapKey 

-

73 cap_value: float # femto farad 

-

74 shielded_area: float # in µm^2 

-

75 unshielded_area: float # in µm^2 

-

76 tech_spec: process_parasitics_pb2.CapacitanceInfo.OverlapCapacitance 

-

77 

-

78 

-

79@dataclass(frozen=True) 

-

80class SideOverlapKey: 

-

81 layer_inside: LayerName 

-

82 net_inside: NetName 

-

83 layer_outside: LayerName 

-

84 net_outside: NetName 

-

85 

-

86 def __repr__(self) -> str: 

-

87 return f"{self.layer_inside}({self.net_inside})-"\ 

-

88 f"{self.layer_outside}({self.net_outside})" 

-

89 

-

90 

-

91@dataclass 

-

92class SideOverlapCap: 

-

93 key: SideOverlapKey 

-

94 cap_value: float # femto farad 

-

95 

-

96 def __str__(self) -> str: 

-

97 return f"(Side Overlap): {self.key} = {round(self.cap_value, 6)}fF" 

-

98 

-

99 

-

100@dataclass(frozen=True) 

-

101class NetCoupleKey: 

-

102 net1: NetName 

-

103 net2: NetName 

-

104 

-

105 def __repr__(self) -> str: 

-

106 return f"{self.net1}-{self.net2}" 

-

107 

-

108 # NOTE: we norm net names alphabetically 

-

109 def normed(self) -> NetCoupleKey: 

-

110 if self.net1 < self.net2: 

-

111 return self 

-

112 else: 

-

113 return NetCoupleKey(self.net2, self.net1) 

-

114 

-

115 

-

116@dataclass 

-

117class ExtractionSummary: 

-

118 capacitances: Dict[NetCoupleKey, float] 

-

119 

-

120 @classmethod 

-

121 def merged(cls, summaries: List[ExtractionSummary]) -> ExtractionSummary: 

-

122 merged_capacitances = defaultdict(float) 

-

123 for s in summaries: 

-

124 for couple_key, cap in s.capacitances.items(): 

-

125 merged_capacitances[couple_key.normed()] += cap 

-

126 return ExtractionSummary(merged_capacitances) 

-

127 

-

128 

-

129@dataclass 

-

130class CellExtractionResults: 

-

131 cell_name: CellName 

-

132 

-

133 overlap_coupling: Dict[OverlapKey, OverlapCap] = field(default_factory=dict) 

-

134 sidewall_table: Dict[SidewallKey, SidewallCap] = field(default_factory=dict) 

-

135 sideoverlap_table: Dict[SideOverlapKey, SideOverlapCap] = field(default_factory=dict) 

-

136 

-

137 def summarize(self) -> ExtractionSummary: 

-

138 overlap_summary = ExtractionSummary({ 

-

139 NetCoupleKey(key.net_top, key.net_bot): cap.cap_value 

-

140 for key, cap in self.overlap_coupling.items() 

-

141 }) 

-

142 

-

143 sidewall_summary = ExtractionSummary({ 

-

144 NetCoupleKey(key.net1, key.net2): cap.cap_value 

-

145 for key, cap in self.sidewall_table.items() 

-

146 }) 

-

147 

-

148 sideoverlap_summary = ExtractionSummary({ 

-

149 NetCoupleKey(key.net_inside, key.net_outside): cap.cap_value 

-

150 for key, cap in self.sideoverlap_table.items() 

-

151 }) 

-

152 

-

153 return ExtractionSummary.merged([ 

-

154 overlap_summary, sidewall_summary, sideoverlap_summary 

-

155 ]) 

-

156 

-

157 

-

158@dataclass 

-

159class ExtractionResults: 

-

160 cell_extraction_results: Dict[CellName, CellExtractionResults] = field(default_factory=dict) 

-

161 

-

162 def summarize(self) -> ExtractionSummary: 

-

163 subsummaries = [s.summarize() for s in self.cell_extraction_results.values()] 

-

164 return ExtractionSummary.merged(subsummaries) 

-
- - - diff --git a/pycov/z_588bb9d7e9b47fd3_extractor_py.html b/pycov/z_588bb9d7e9b47fd3_extractor_py.html deleted file mode 100644 index 64f3a440..00000000 --- a/pycov/z_588bb9d7e9b47fd3_extractor_py.html +++ /dev/null @@ -1,840 +0,0 @@ - - - - - Coverage for kpex\rcx25\extractor.py: 5% - - - - - -
-
-

- Coverage for kpex\rcx25\extractor.py: - 5% -

- -

- 399 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26import math 

-

27from collections import defaultdict 

-

28from typing import * 

-

29 

-

30import klayout.db as kdb 

-

31import klayout.rdb as rdb 

-

32 

-

33from ..klayout.lvsdb_extractor import KLayoutExtractionContext, GDSPair 

-

34from ..log import ( 

-

35 console, 

-

36 debug, 

-

37 info, 

-

38 warning, 

-

39 error 

-

40) 

-

41from ..tech_info import TechInfo 

-

42from .extraction_results import * 

-

43import process_stack_pb2 

-

44 

-

45 

-

46EdgeInterval = Tuple[float, float] 

-

47ChildIndex = int 

-

48EdgeNeighborhood = List[Tuple[EdgeInterval, Dict[ChildIndex, List[kdb.Polygon]]]] 

-

49 

-

50 

-

51class RCExtractor: 

-

52 def __init__(self, 

-

53 pex_context: KLayoutExtractionContext, 

-

54 tech_info: TechInfo, 

-

55 report_path: str): 

-

56 self.pex_context = pex_context 

-

57 self.tech_info = tech_info 

-

58 self.report_path = report_path 

-

59 

-

60 def gds_pair(self, layer_name) -> Optional[GDSPair]: 

-

61 gds_pair = self.tech_info.gds_pair_for_computed_layer_name.get(layer_name, None) 

-

62 if not gds_pair: 

-

63 gds_pair = self.tech_info.gds_pair_for_layer_name.get(layer_name, None) 

-

64 if not gds_pair: 

-

65 warning(f"Can't find GDS pair for layer {layer_name}") 

-

66 return None 

-

67 return gds_pair 

-

68 

-

69 def shapes_of_net(self, layer_name: str, net: kdb.Net) -> Optional[kdb.Region]: 

-

70 gds_pair = self.gds_pair(layer_name=layer_name) 

-

71 if not gds_pair: 

-

72 return None 

-

73 

-

74 shapes = self.pex_context.shapes_of_net(gds_pair=gds_pair, net=net) 

-

75 if not shapes: 

-

76 debug(f"Nothing extracted for layer {layer_name}") 

-

77 return shapes 

-

78 

-

79 def shapes_of_layer(self, layer_name: str) -> Optional[kdb.Region]: 

-

80 gds_pair = self.gds_pair(layer_name=layer_name) 

-

81 if not gds_pair: 

-

82 return None 

-

83 

-

84 shapes = self.pex_context.shapes_of_layer(gds_pair=gds_pair) 

-

85 if not shapes: 

-

86 debug(f"Nothing extracted for layer {layer_name}") 

-

87 return shapes 

-

88 

-

89 def extract(self) -> ExtractionResults: 

-

90 extraction_results = ExtractionResults() 

-

91 

-

92 # TODO: for now, we always flatten and have only 1 cell 

-

93 cell_name = self.pex_context.top_cell.name 

-

94 report = rdb.ReportDatabase(f"PEX {cell_name}") 

-

95 cell_extraction_result = self.extract_cell(cell_name=cell_name, report=report) 

-

96 extraction_results.cell_extraction_results[cell_name] = cell_extraction_result 

-

97 

-

98 report.save(self.report_path) 

-

99 

-

100 return extraction_results 

-

101 

-

102 def extract_cell(self, 

-

103 cell_name: CellName, 

-

104 report: rdb.ReportDatabase) -> CellExtractionResults: 

-

105 lvsdb = self.pex_context.lvsdb 

-

106 netlist: kdb.Netlist = lvsdb.netlist() 

-

107 dbu = self.pex_context.dbu 

-

108 

-

109 extraction_results = CellExtractionResults(cell_name=cell_name) 

-

110 

-

111 rdb_cell = report.create_cell(cell_name) 

-

112 rdb_cat_common = report.create_category("Common") 

-

113 rdb_cat_sidewall_old = report.create_category("Sidewall (legacy space_check)") 

-

114 rdb_cat_sidewall = report.create_category("Sidewall (EdgeNeighborhoodVisitor)") 

-

115 rdb_cat_overlap = report.create_category("Overlap") 

-

116 rdb_cat_fringe = report.create_category("Fringe / Side Overlap") 

-

117 

-

118 def rdb_output(parent_category: rdb.RdbCategory, 

-

119 category_name: str, 

-

120 shapes: kdb.Shapes | kdb.Region | List[kdb.Edge]): 

-

121 rdb_cat = report.create_category(parent_category, category_name) 

-

122 report.create_items(rdb_cell.rdb_id(), ## TODO: if later hierarchical mode is introduced 

-

123 rdb_cat.rdb_id(), 

-

124 kdb.CplxTrans(mag=dbu), 

-

125 shapes) 

-

126 

-

127 circuit = netlist.circuit_by_name(self.pex_context.top_cell.name) 

-

128 # https://www.klayout.de/doc-qt5/code/class_Circuit.html 

-

129 if not circuit: 

-

130 circuits = [c.name for c in netlist.each_circuit()] 

-

131 raise Exception(f"Expected circuit called {self.pex_context.top_cell.name} in extracted netlist, " 

-

132 f"only available circuits are: {circuits}") 

-

133 

-

134 #---------------------------------------------------------------------------------------- 

-

135 layer2net2regions = defaultdict(lambda: defaultdict(kdb.Region)) 

-

136 net2layer2regions = defaultdict(lambda: defaultdict(kdb.Region)) 

-

137 layer_by_name: Dict[LayerName, process_stack_pb2.ProcessStackInfo.LayerInfo] = {} 

-

138 

-

139 layer_regions_by_name: Dict[LayerName, kdb.Region] = defaultdict(kdb.Region) 

-

140 all_region = kdb.Region() 

-

141 regions_below_layer: Dict[LayerName, kdb.Region] = defaultdict(kdb.Region) 

-

142 regions_below_and_including_layer: Dict[LayerName, kdb.Region] = defaultdict(kdb.Region) 

-

143 all_layer_names: List[LayerName] = [] 

-

144 layer_names_below: Dict[LayerName, List[LayerName]] = {} 

-

145 shielding_layer_names: Dict[Tuple[LayerName, LayerName], List[LayerName]] = defaultdict(list) 

-

146 previous_layer_name: Optional[str] = None 

-

147 

-

148 substrate_region = kdb.Region() 

-

149 substrate_region.insert(self.pex_context.top_cell_bbox().enlarged(8.0 / dbu)) # 8 µm halo 

-

150 substrate_layer_name = self.tech_info.internal_substrate_layer_name 

-

151 layer_names_below[substrate_layer_name] = [] 

-

152 all_layer_names.append(substrate_layer_name) 

-

153 layer2net2regions[substrate_layer_name][substrate_layer_name] = substrate_region 

-

154 net2layer2regions[substrate_layer_name][substrate_layer_name] = substrate_region 

-

155 layer_regions_by_name[substrate_layer_name] = substrate_region 

-

156 # NOTE: substrate not needed for 

-

157 # - all_region 

-

158 # - regions_below_layer 

-

159 # - regions_below_and_including_layer 

-

160 

-

161 for metal_layer in self.tech_info.process_metal_layers: 

-

162 layer_name = metal_layer.name 

-

163 gds_pair = self.gds_pair(layer_name) 

-

164 canonical_layer_name = self.tech_info.canonical_layer_name_by_gds_pair[gds_pair] 

-

165 

-

166 all_layer_shapes = self.shapes_of_layer(layer_name) or kdb.Region() 

-

167 layer_regions_by_name[canonical_layer_name] += all_layer_shapes 

-

168 # NOTE: multiple LVS layers can be mapped to the same canonical name 

-

169 if previous_layer_name != canonical_layer_name: 

-

170 regions_below_layer[canonical_layer_name] += all_region 

-

171 layer_names_below[canonical_layer_name] = list(all_layer_names) 

-

172 for ln in all_layer_names: 

-

173 lp = (canonical_layer_name, ln) 

-

174 shielding_layer_names[lp] = [l for l in all_layer_names 

-

175 if l != ln and l not in layer_names_below[ln]] 

-

176 shielding_layer_names[ln, canonical_layer_name] = shielding_layer_names[lp] 

-

177 all_layer_names.append(canonical_layer_name) 

-

178 all_region += all_layer_shapes 

-

179 regions_below_and_including_layer[canonical_layer_name] += all_region 

-

180 

-

181 previous_layer_name = canonical_layer_name 

-

182 

-

183 for net in circuit.each_net(): 

-

184 net_name = net.expanded_name() 

-

185 

-

186 shapes = self.shapes_of_net(layer_name=layer_name, net=net) 

-

187 if shapes: 

-

188 layer2net2regions[canonical_layer_name][net_name] += shapes 

-

189 net2layer2regions[net_name][canonical_layer_name] += shapes 

-

190 layer_by_name[canonical_layer_name] = metal_layer 

-

191 

-

192 shielded_regions_between_layers: Dict[Tuple[LayerName, LayerName], kdb.Region] = {} 

-

193 for top_layer_name in layer2net2regions.keys(): 

-

194 for bot_layer_name in reversed(layer_names_below[top_layer_name]): 

-

195 shielded_region = kdb.Region() 

-

196 shielding_layers = shielding_layer_names.get((top_layer_name, bot_layer_name), None) 

-

197 if shielding_layers: 

-

198 for sl in shielding_layers: 

-

199 shielded_region += layer_regions_by_name[sl] 

-

200 shielded_region.merge() 

-

201 shielded_regions_between_layers[(top_layer_name, bot_layer_name)] = shielded_region 

-

202 shielded_regions_between_layers[(bot_layer_name, top_layer_name)] = shielded_region 

-

203 if shielded_region: 

-

204 rdb_output(rdb_cat_common, f"Shielded ({top_layer_name}-{bot_layer_name})", shielded_region) 

-

205 

-

206 #---------------------------------------------------------------------------------------- 

-

207 

-

208 side_halo_um = self.tech_info.tech.process_parasitics.side_halo 

-

209 side_halo_dbu = int(side_halo_um / dbu) + 1 # add 1 nm to halo 

-

210 

-

211 space_markers_by_layer_name: Dict[LayerName, kdb.Region] = {} 

-

212 rdb_cat_space_markers = report.create_category(rdb_cat_sidewall_old, "All Space Markers") 

-

213 

-

214 for layer_name in layer2net2regions.keys(): 

-

215 if layer_name == substrate_layer_name: 

-

216 continue 

-

217 

-

218 space_markers = layer_regions_by_name[layer_name].space_check( 

-

219 d=side_halo_dbu, # min space in um 

-

220 whole_edges=True, # whole edges 

-

221 metrics=kdb.Metrics.Projection, # metrics 

-

222 ignore_angle=None, # ignore angle 

-

223 min_projection=None, # min projection 

-

224 max_projection=None, # max projection 

-

225 shielded=True, # shielding 

-

226 opposite_filter=kdb.Region.NoOppositeFilter, # error filter for opposite sides 

-

227 rect_filter=kdb.Region.NoRectFilter, # error filter for rect input shapes 

-

228 negative=False, # negative 

-

229 property_constraint=kdb.Region.IgnoreProperties, # property_constraint 

-

230 zero_distance_mode=kdb.Region.IncludeZeroDistanceWhenTouching # zero distance mode 

-

231 ) 

-

232 space_markers_by_layer_name[layer_name] = space_markers 

-

233 rdb_output(rdb_cat_space_markers, f"layer={layer_name}", space_markers) 

-

234 

-

235 # 

-

236 # (1) OVERLAP CAPACITANCE 

-

237 # 

-

238 for top_layer_name in layer2net2regions.keys(): 

-

239 if top_layer_name == substrate_layer_name: 

-

240 continue 

-

241 

-

242 top_net2regions = layer2net2regions.get(top_layer_name, None) 

-

243 if not top_net2regions: 

-

244 continue 

-

245 

-

246 top_overlap_specs = self.tech_info.overlap_cap_by_layer_names.get(top_layer_name, None) 

-

247 if not top_overlap_specs: 

-

248 warning(f"No overlap cap specified for layer top={top_layer_name}") 

-

249 continue 

-

250 

-

251 rdb_cat_top_layer = report.create_category(rdb_cat_overlap, f"top_layer={top_layer_name}") 

-

252 

-

253 shapes_top_layer = layer_regions_by_name[top_layer_name] 

-

254 

-

255 for bot_layer_name in reversed(layer_names_below[top_layer_name]): 

-

256 bot_net2regions = layer2net2regions.get(bot_layer_name, None) 

-

257 if not bot_net2regions: 

-

258 continue 

-

259 

-

260 overlap_cap_spec = top_overlap_specs.get(bot_layer_name, None) 

-

261 if not overlap_cap_spec: 

-

262 warning(f"No overlap cap specified for layer top={top_layer_name}/bottom={bot_layer_name}") 

-

263 continue 

-

264 

-

265 rdb_cat_bot_layer = report.create_category(rdb_cat_top_layer, f"bot_layer={bot_layer_name}") 

-

266 

-

267 shielded_region = shielded_regions_between_layers[(top_layer_name, bot_layer_name)].and_(shapes_top_layer) 

-

268 rdb_output(rdb_cat_bot_layer, "Shielded Between Layers Region", shielded_region) 

-

269 

-

270 for net_top in top_net2regions.keys(): 

-

271 shapes_top_net: kdb.Region = top_net2regions[net_top].dup() 

-

272 

-

273 for net_bot in bot_net2regions.keys(): 

-

274 if net_top == net_bot: 

-

275 continue 

-

276 

-

277 shapes_bot_net: kdb.Region = bot_net2regions[net_bot] 

-

278 

-

279 overlapping_shapes = shapes_top_net.__and__(shapes_bot_net) 

-

280 if overlapping_shapes: 

-

281 rdb_cat_nets = report.create_category(rdb_cat_bot_layer, f"{net_top}{net_bot}") 

-

282 rdb_output(rdb_cat_nets, "Overlapping Shapes", overlapping_shapes) 

-

283 

-

284 shielded_net_shapes = overlapping_shapes.__and__(shielded_region) 

-

285 rdb_output(rdb_cat_nets, "Shielded Shapes", shielded_net_shapes) 

-

286 

-

287 unshielded_net_shapes = overlapping_shapes - shielded_net_shapes 

-

288 rdb_output(rdb_cat_nets, "Unshielded Shapes", unshielded_net_shapes) 

-

289 

-

290 area_um2 = overlapping_shapes.area() * dbu**2 

-

291 shielded_area_um2 = shielded_net_shapes.area() * dbu**2 

-

292 unshielded_area_um2 = area_um2 - shielded_area_um2 

-

293 cap_femto = unshielded_area_um2 * overlap_cap_spec.capacitance / 1000.0 

-

294 shielded_cap_femto = shielded_area_um2 * overlap_cap_spec.capacitance / 1000.0 

-

295 info(f"(Overlap): {top_layer_name}({net_top})-{bot_layer_name}({net_bot}): " 

-

296 f"Unshielded area: {unshielded_area_um2} µm^2, " 

-

297 f"cap: {round(cap_femto, 2)} fF") 

-

298 if cap_femto > 0.0: 

-

299 ovk = OverlapKey(layer_top=top_layer_name, 

-

300 net_top=net_top, 

-

301 layer_bot=bot_layer_name, 

-

302 net_bot=net_bot) 

-

303 cap = OverlapCap(key=ovk, 

-

304 cap_value=cap_femto, 

-

305 shielded_area=shielded_area_um2, 

-

306 unshielded_area=unshielded_area_um2, 

-

307 tech_spec=overlap_cap_spec) 

-

308 report.create_category( # used as info text 

-

309 rdb_cat_nets, 

-

310 f"{round(cap_femto, 3)} fF " 

-

311 f"({round(shielded_cap_femto, 3)} fF shielded " 

-

312 f"of total {round(cap_femto+shielded_cap_femto, 3)} fF)" 

-

313 ) 

-

314 extraction_results.overlap_coupling[ovk] = cap 

-

315 

-

316 # (2) SIDEWALL CAPACITANCE 

-

317 # 

-

318 for layer_name in layer2net2regions.keys(): 

-

319 if layer_name == substrate_layer_name: 

-

320 continue 

-

321 

-

322 sidewall_cap_spec = self.tech_info.sidewall_cap_by_layer_name.get(layer_name, None) 

-

323 if not sidewall_cap_spec: 

-

324 warning(f"No sidewall cap specified for layer {layer_name}") 

-

325 continue 

-

326 

-

327 net2regions = layer2net2regions.get(layer_name, None) 

-

328 if not net2regions: 

-

329 continue 

-

330 

-

331 rdb_cat_sw_layer = report.create_category(rdb_cat_sidewall_old, f"layer={layer_name}") 

-

332 

-

333 space_markers = space_markers_by_layer_name[layer_name] 

-

334 

-

335 for i, net1 in enumerate(net2regions.keys()): 

-

336 for j, net2 in enumerate(net2regions.keys()): 

-

337 if i < j: 

-

338 

-

339 # info(f"Sidewall on {layer_name}: Nets {net1} <-> {net2}") 

-

340 shapes1: kdb.Region = net2regions[net1] 

-

341 shapes2: kdb.Region = net2regions[net2] 

-

342 

-

343 markers_net1 = space_markers.interacting(shapes1) 

-

344 sidewall_edge_pairs = markers_net1.interacting(shapes2) 

-

345 

-

346 if not sidewall_edge_pairs: 

-

347 continue 

-

348 

-

349 rdb_cat_sw_nets = report.create_category(rdb_cat_sidewall_old, f"{net1} - {net2}") 

-

350 rdb_output(rdb_cat_sw_nets, f"Shapes {net1}", shapes1) 

-

351 rdb_output(rdb_cat_sw_nets, f"Shapes {net2}", shapes2) 

-

352 rdb_output(rdb_cat_sw_nets, f"Markers interacting {net1}", markers_net1) 

-

353 rdb_output(rdb_cat_sw_nets, f"Markers interacting {net1}-{net2}", sidewall_edge_pairs) 

-

354 

-

355 for idx, pair in enumerate(sidewall_edge_pairs): 

-

356 edge1: kdb.Edge = pair.first 

-

357 edge2: kdb.Edge = pair.second 

-

358 

-

359 # TODO: support non-parallel situations 

-

360 # avg_length = (edge1.length() + edge2.length()) / 2.0 

-

361 # avg_distance = (pair.polygon(0).perimeter() - edge1.length() - edge2.length()) / 2.0 

-

362 avg_length = min(edge1.length(), edge2.length()) 

-

363 avg_distance = pair.distance() 

-

364 

-

365 debug(f"Edge pair distance {avg_distance}, symmetric? {pair.symmetric}, " 

-

366 f"perimeter {pair.perimeter()}, parallel? {edge1.is_parallel(edge2)}") 

-

367 

-

368 # (3) SIDEWALL CAPACITANCE 

-

369 # 

-

370 # C = Csidewall * l * t / s 

-

371 # C = Csidewall * l / s 

-

372 

-

373 length_um = avg_length * dbu 

-

374 distance_um = avg_distance * dbu 

-

375 

-

376 # NOTE: this is automatically bidirectional, 

-

377 # whereas MAGIC counts 2 sidewall contributions (one for each side of the cap) 

-

378 cap_femto = (length_um * sidewall_cap_spec.capacitance) / \ 

-

379 (distance_um + sidewall_cap_spec.offset) / 1000.0 

-

380 

-

381 rdb_output(rdb_cat_sw_nets, f"Edge Pair {idx}: {round(cap_femto, 3)} fF", pair) 

-

382 

-

383 info(f"(Sidewall) layer {layer_name}: Nets {net1} <-> {net2}: {round(cap_femto, 5)} fF") 

-

384 

-

385 swk = SidewallKey(layer=layer_name, net1=net1, net2=net2) 

-

386 sw_cap = SidewallCap(key=swk, 

-

387 cap_value=cap_femto, 

-

388 distance=distance_um, 

-

389 length=length_um, 

-

390 tech_spec=sidewall_cap_spec) 

-

391 extraction_results.sidewall_table[swk] = sw_cap 

-

392 

-

393 # 

-

394 # (3) FRINGE / SIDE OVERLAP CAPACITANCE 

-

395 # 

-

396 

-

397 class FringeEdgeNeighborhoodVisitor(kdb.EdgeNeighborhoodVisitor): 

-

398 def __init__(self, 

-

399 inside_layer_name: str, 

-

400 inside_net_name: str, 

-

401 outside_layer_name: str, 

-

402 child_names: List[str], 

-

403 tech_info: TechInfo, 

-

404 report_category: rdb.RdbCategory): 

-

405 self.inside_layer_name = inside_layer_name 

-

406 self.inside_net_name = inside_net_name 

-

407 self.outside_layer_name = outside_layer_name 

-

408 self.child_names = child_names 

-

409 # NOTE: child_names[0] is the inside net (foreign) 

-

410 # child_names[1] is the shielded net (between layers) 

-

411 # child_names[2:] are the outside nets 

-

412 self.tech_info = tech_info 

-

413 self.report_category = report_category 

-

414 

-

415 # NOTE: overlap_cap_by_layer_names is top/bot (dict is not symmetric) 

-

416 self.overlap_cap_spec = tech_info.overlap_cap_by_layer_names[inside_layer_name].get(outside_layer_name, None) 

-

417 if not self.overlap_cap_spec: 

-

418 self.overlap_cap_spec = tech_info.overlap_cap_by_layer_names[outside_layer_name][inside_layer_name] 

-

419 

-

420 self.substrate_cap_spec = tech_info.substrate_cap_by_layer_name[inside_layer_name] 

-

421 self.sideoverlap_cap_spec = tech_info.side_overlap_cap_by_layer_names[inside_layer_name][outside_layer_name] 

-

422 

-

423 self.sidewall_cap_spec = tech_info.sidewall_cap_by_layer_name[inside_layer_name] 

-

424 

-

425 self.category_name_counter: Dict[str, int] = defaultdict(int) 

-

426 self.sidewall_counter = 0 

-

427 

-

428 def begin_polygon(self, 

-

429 layout: kdb.Layout, 

-

430 cell: kdb.Cell, 

-

431 polygon: kdb.Polygon): 

-

432 debug(f"----------------------------------------") 

-

433 debug(f"Polygon: {polygon}") 

-

434 

-

435 def end_polygon(self): 

-

436 debug(f"End of polygon") 

-

437 

-

438 def on_edge(self, 

-

439 layout: kdb.Layout, 

-

440 cell: kdb.Cell, 

-

441 edge: kdb.Edge, 

-

442 neighborhood: EdgeNeighborhood): 

-

443 # 

-

444 # NOTE: this complex operation will automatically rotate every edge to be on the x-axis 

-

445 # going from 0 to edge.length 

-

446 # so we only have to consider the y-axis to get the near and far distances 

-

447 # 

-

448 

-

449 # TODO: consider z-shielding! 

-

450 

-

451 debug(f"inside_layer={self.inside_layer_name}, " 

-

452 f"inside_net={self.inside_net_name}, " 

-

453 f"outside_layer={self.outside_layer_name}, " 

-

454 f"edge = {edge}") 

-

455 

-

456 rdb_inside_layer = report.create_category(rdb_cat_sidewall, f"layer={self.inside_layer_name}") 

-

457 rdb_sidewall_inside_net = report.create_category(rdb_inside_layer, f"inside={self.inside_net_name}") 

-

458 

-

459 for (x1, x2), polygons_by_net in neighborhood: 

-

460 if not polygons_by_net: 

-

461 continue 

-

462 

-

463 edge_interval_length = x2 - x1 

-

464 edge_interval_length_um = edge_interval_length * dbu 

-

465 

-

466 edge_interval_original = (self.to_original_trans(edge) * 

-

467 kdb.Edge(kdb.Point(x1, 0), kdb.Point(x2, 0))) 

-

468 transformed_category_name = f"Edge interval {(x1, x2)}" 

-

469 self.category_name_counter[transformed_category_name] += 1 

-

470 rdb_cat_edge_interval = report.create_category( 

-

471 self.report_category, 

-

472 f"{transformed_category_name} ({self.category_name_counter[transformed_category_name]})" 

-

473 ) 

-

474 rdb_output(rdb_cat_edge_interval, f"Original Section {edge_interval_original}", edge_interval_original) 

-

475 

-

476 polygons_on_same_layer = polygons_by_net.get(1, None) 

-

477 shielded_region_lateral = kdb.Region() 

-

478 if polygons_on_same_layer: 

-

479 shielded_region_lateral.insert(polygons_on_same_layer) 

-

480 rdb_output(rdb_cat_edge_interval, 'Laterally nearby shapes', 

-

481 kdb.Region([self.to_original_trans(edge) * p for p in shielded_region_lateral])) 

-

482 

-

483 # NOTE: first lateral nearby shape blocks everything beyond (like sidewall situation) up to halo 

-

484 def distance_near(p: kdb.Polygon) -> float: 

-

485 bbox: kdb.Box = p.bbox() 

-

486 

-

487 if not p.is_box(): 

-

488 warning(f"Side overlap, outside polygon {p} is not a box. " 

-

489 f"Currently, only boxes are supported, will be using bounding box {bbox}") 

-

490 ## distance_near = (bbox.p1.y + bbox.p2.y) / 2.0 

-

491 distance_near = min(bbox.p1.y, bbox.p2.y) 

-

492 if distance_near < 0: 

-

493 distance_near = 0 

-

494 return distance_near 

-

495 

-

496 nearest_lateral_shape = (math.inf, polygons_on_same_layer[0]) 

-

497 for p in polygons_on_same_layer: 

-

498 dnear = distance_near(p) 

-

499 if dnear < nearest_lateral_shape[0]: 

-

500 nearest_lateral_shape = (dnear, p) 

-

501 

-

502 rdb_output(rdb_cat_edge_interval, 'Closest nearby shape', 

-

503 kdb.Region(self.to_original_trans(edge) * nearest_lateral_shape[1])) 

-

504 

-

505 # NOTE: this method is always called for a single nearest edge (line), so the 

-

506 # polygons have 4 points. 

-

507 # Polygons points are sorted clockwise, so the edge 

-

508 # that goes from right-to-left is the nearest edge 

-

509 nearby_opposing_edge = [e for e in nearest_lateral_shape[1].each_edge() if e.d().x < 0][-1] 

-

510 nearby_opposing_edge_trans = self.to_original_trans(edge) * nearby_opposing_edge 

-

511 

-

512 opposing_net = '<unknown>' 

-

513 # find the opposing net 

-

514 for other_net, region in layer2net2regions[self.inside_layer_name].items(): 

-

515 if other_net == self.inside_net_name: 

-

516 continue 

-

517 if region.interacting(nearby_opposing_edge_trans).count() >= 1: 

-

518 # we found the other net! 

-

519 opposing_net = other_net 

-

520 break 

-

521 

-

522 rdb_output(rdb_cat_edge_interval, 

-

523 f"Closest nearby edge (net {opposing_net})", [nearby_opposing_edge_trans]) 

-

524 

-

525 sidewall_edge_pair = [nearby_opposing_edge_trans, edge_interval_original] 

-

526 distance_um = nearest_lateral_shape[0] * dbu 

-

527 sidewall_cap_femto = (edge_interval_length_um * self.sidewall_cap_spec.capacitance) / \ 

-

528 (distance_um + self.sidewall_cap_spec.offset) / 1000.0 / 2.0 

-

529 

-

530 rdb_sidewall_outside_net = report.create_category(rdb_sidewall_inside_net, 

-

531 f"outside={opposing_net}") 

-

532 self.sidewall_counter += 1 

-

533 rdb_output(rdb_sidewall_outside_net, 

-

534 f"#{self.sidewall_counter}: " 

-

535 f"len {round(edge_interval_length_um, 3)} µm, distance {round(distance_um, 3)} µm, " 

-

536 f"{round(sidewall_cap_femto, 3)} fF", 

-

537 sidewall_edge_pair) 

-

538 

-

539 nearby_shield = kdb.Polygon([nearby_opposing_edge.p1, 

-

540 nearby_opposing_edge.p2, 

-

541 kdb.Point(x1, side_halo_dbu), 

-

542 kdb.Point(x2, side_halo_dbu)]) 

-

543 

-

544 rdb_output(rdb_cat_edge_interval, 'Nearby shield', 

-

545 kdb.Region(self.to_original_trans(edge) * nearby_shield)) 

-

546 

-

547 shielded_region_between = kdb.Region() 

-

548 shielded_polygons = polygons_by_net.get(2, None) # shielded from layers between 

-

549 if shielded_polygons: 

-

550 shielded_region_between.insert(shielded_polygons) 

-

551 

-

552 for net_index, polygons in polygons_by_net.items(): 

-

553 if net_index == 0: # laterally shielded 

-

554 continue 

-

555 elif net_index == 1: # ignore "shielded" 

-

556 continue 

-

557 

-

558 if not polygons: 

-

559 continue 

-

560 

-

561 unshielded_region: kdb.Region = kdb.Region(polygons) - shielded_region_between 

-

562 if not unshielded_region: 

-

563 continue 

-

564 

-

565 net_name = self.child_names[net_index] 

-

566 rdb_cat_outside_net = report.create_category(rdb_cat_edge_interval, 

-

567 f"outside_net={net_name}") 

-

568 

-

569 rdb_output(rdb_cat_outside_net, 'Unshielded', 

-

570 kdb.Region([self.to_original_trans(edge) * p for p in unshielded_region])) 

-

571 

-

572 for p in unshielded_region: 

-

573 bbox: kdb.Box = p.bbox() 

-

574 

-

575 if not p.is_box(): 

-

576 warning(f"Side overlap, outside polygon {p} is not a box. " 

-

577 f"Currently, only boxes are supported, will be using bounding box {bbox}") 

-

578 distance_near = bbox.p1.y #+ 1 

-

579 if distance_near < 0: 

-

580 distance_near = 0 

-

581 distance_far = bbox.p2.y #- 2 

-

582 if distance_far < 0: 

-

583 distance_far = 0 

-

584 try: 

-

585 assert distance_near >= 0 

-

586 assert distance_far >= distance_near 

-

587 except AssertionError: 

-

588 print() 

-

589 raise 

-

590 

-

591 if distance_far == distance_near: 

-

592 continue 

-

593 

-

594 distance_near_um = distance_near * dbu 

-

595 distance_far_um = distance_far * dbu 

-

596 

-

597 # NOTE: overlap scaling is 1/50 (see MAGIC ExtTech) 

-

598 alpha_scale_factor = 0.02 * 0.01 * 0.5 * 200.0 

-

599 alpha_c = self.overlap_cap_spec.capacitance * alpha_scale_factor 

-

600 

-

601 # see Magic ExtCouple.c L1164 

-

602 cnear = (2.0 / math.pi) * math.atan(alpha_c * distance_near_um) 

-

603 cfar = (2.0 / math.pi) * math.atan(alpha_c * distance_far_um) 

-

604 

-

605 # "cfrac" is the fractional portion of the fringe cap seen 

-

606 # by tile tp along its length. This is independent of the 

-

607 # portion of the boundary length that tile tp occupies. 

-

608 cfrac = cfar - cnear 

-

609 

-

610 # The fringe portion extracted from the substrate will be 

-

611 # different than the portion added to the coupling layer. 

-

612 sfrac: float 

-

613 

-

614 # see Magic ExtCouple.c L1198 

-

615 alpha_s = self.substrate_cap_spec.area_capacitance / alpha_scale_factor 

-

616 if alpha_s != alpha_c: 

-

617 snear = (2.0 / math.pi) * math.atan(alpha_s * distance_near_um) 

-

618 sfar = (2.0 / math.pi) * math.atan(alpha_s * distance_far_um) 

-

619 sfrac = sfar - snear 

-

620 else: 

-

621 sfrac = cfrac 

-

622 

-

623 if outside_layer_name == substrate_layer_name: 

-

624 cfrac = sfrac 

-

625 

-

626 cap_femto = (cfrac * edge_interval_length_um * 

-

627 self.sideoverlap_cap_spec.capacitance / 1000.0) 

-

628 if cap_femto > 0.0: 

-

629 report.create_category(rdb_cat_outside_net, f"{round(cap_femto, 3)} fF") # used as info text 

-

630 

-

631 sok = SideOverlapKey(layer_inside=self.inside_layer_name, 

-

632 net_inside=self.inside_net_name, 

-

633 layer_outside=self.outside_layer_name, 

-

634 net_outside=net_name) 

-

635 sov = extraction_results.sideoverlap_table.get(sok, None) 

-

636 if sov: 

-

637 sov.cap_value += cap_femto 

-

638 else: 

-

639 sov = SideOverlapCap(key=sok, cap_value=cap_femto) 

-

640 extraction_results.sideoverlap_table[sok] = sov 

-

641 

-

642 # efflength = (cfrac - sov.so_coupfrac) * (double) length; 

-

643 # cap += e->ec_cap * efflength; 

-

644 # 

-

645 # subfrac += sov.so_subfrac; / *Just add the shielded fraction * / 

-

646 # efflength = (sfrac - subfrac) * (double) length; 

-

647 # 

-

648 # subcap = ExtCurStyle->exts_perimCap[ta][0] * efflength; 

-

649 

-

650 # TODO: shielding lateral 

-

651 

-

652 # TODO: fringe portion extracted from substrate 

-

653 

-

654 for inside_layer_name in layer2net2regions.keys(): 

-

655 if inside_layer_name == substrate_layer_name: 

-

656 continue 

-

657 

-

658 inside_net2regions = layer2net2regions.get(inside_layer_name, None) 

-

659 if not inside_net2regions: 

-

660 continue 

-

661 

-

662 inside_fringe_specs = self.tech_info.side_overlap_cap_by_layer_names.get(inside_layer_name, None) 

-

663 if not inside_fringe_specs: 

-

664 warning(f"No fringe / side overlap cap specified for layer inside={inside_layer_name}") 

-

665 continue 

-

666 

-

667 shapes_inside_layer = layer_regions_by_name[inside_layer_name] 

-

668 fringe_halo_inside = shapes_inside_layer.sized(side_halo_dbu) - shapes_inside_layer 

-

669 

-

670 rdb_cat_inside_layer = report.create_category(rdb_cat_fringe, f"inside_layer={inside_layer_name}") 

-

671 rdb_output(rdb_cat_inside_layer, "fringe_halo_inside", fringe_halo_inside) 

-

672 

-

673 # Side Overlap: metal <-> metal (additionally, substrate) 

-

674 for outside_layer_name in layer2net2regions.keys(): 

-

675 if inside_layer_name == outside_layer_name: 

-

676 continue 

-

677 

-

678 outside_net2regions = layer2net2regions.get(outside_layer_name, None) 

-

679 if not outside_net2regions: 

-

680 continue 

-

681 

-

682 cap_spec = inside_fringe_specs.get(outside_layer_name, None) 

-

683 if not cap_spec: 

-

684 warning(f"No side overlap cap specified for " 

-

685 f"layer inside={inside_layer_name}/outside={outside_layer_name}") 

-

686 continue 

-

687 

-

688 shapes_outside_layer = layer_regions_by_name[outside_layer_name] 

-

689 if not shapes_outside_layer: 

-

690 continue 

-

691 

-

692 shapes_outside_layer_within_halo = shapes_outside_layer.__and__(fringe_halo_inside) 

-

693 if not shapes_outside_layer_within_halo: 

-

694 continue 

-

695 

-

696 rdb_cat_outside_layer = report.create_category(rdb_cat_inside_layer, 

-

697 f"outside_layer={outside_layer_name}") 

-

698 

-

699 shielded_regions_between = shielded_regions_between_layers[(inside_layer_name, outside_layer_name)] 

-

700 rdb_output(rdb_cat_outside_layer, 'Shielded between layers', shielded_regions_between) 

-

701 

-

702 for net_inside in inside_net2regions.keys(): 

-

703 shapes_inside_net: kdb.Region = inside_net2regions[net_inside] 

-

704 if not shapes_inside_net: 

-

705 continue 

-

706 

-

707 rdb_cat_inside_net = report.create_category(rdb_cat_outside_layer, 

-

708 f"inside_net={net_inside}") 

-

709 

-

710 visitor = FringeEdgeNeighborhoodVisitor( 

-

711 inside_layer_name=inside_layer_name, 

-

712 inside_net_name=net_inside, 

-

713 outside_layer_name=outside_layer_name, 

-

714 child_names=[net_inside, 'NEARBY_SHAPES', 'SHIELD_BETWEEN'] + 

-

715 [k for k in outside_net2regions.keys() if k != net_inside], 

-

716 tech_info=self.tech_info, 

-

717 report_category=rdb_cat_inside_net 

-

718 ) 

-

719 

-

720 nearby_shapes = shapes_inside_layer - shapes_inside_net 

-

721 # children = [kdb.CompoundRegionOperationNode.new_secondary(shapes_inside_net), 

-

722 children = [kdb.CompoundRegionOperationNode.new_foreign(), 

-

723 kdb.CompoundRegionOperationNode.new_secondary(nearby_shapes), 

-

724 kdb.CompoundRegionOperationNode.new_secondary(shielded_regions_between)] + \ 

-

725 [kdb.CompoundRegionOperationNode.new_secondary(region) 

-

726 for net, region in list(outside_net2regions.items()) 

-

727 if net != net_inside] 

-

728 

-

729 node = kdb.CompoundRegionOperationNode.new_edge_neighborhood( 

-

730 children, 

-

731 visitor, 

-

732 0, # bext 

-

733 0, # eext, 

-

734 0, # din 

-

735 side_halo_dbu # dout 

-

736 ) 

-

737 

-

738 shapes_inside_net.complex_op(node) 

-

739 

-

740 for so in extraction_results.sideoverlap_table.values(): 

-

741 info(so) 

-

742 

-

743 return extraction_results 

-
- - - diff --git a/pycov/z_5aed77b868240c56___init___py.html b/pycov/z_5aed77b868240c56___init___py.html deleted file mode 100644 index b4cf29cd..00000000 --- a/pycov/z_5aed77b868240c56___init___py.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - Coverage for kpex/log/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/log/__init__.py: - 100% -

- -

- 1 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24""" 

-

25The Logging Module 

-

26------------------ 

-

27 

-

28kpex log handler, 

-

29implemented using the ``log`` and ``rich`` libraries. 

-

30""" 

-

31 

-

32from .logger import ( 

-

33 LogLevel, 

-

34 set_log_level, 

-

35 register_additional_handler, 

-

36 deregister_additional_handler, 

-

37 console, 

-

38 debug, 

-

39 rule, 

-

40 subproc, 

-

41 info, 

-

42 warning, 

-

43 error 

-

44) 

-
- - - diff --git a/pycov/z_5aed77b868240c56_logger_py.html b/pycov/z_5aed77b868240c56_logger_py.html deleted file mode 100644 index 835c76a6..00000000 --- a/pycov/z_5aed77b868240c56_logger_py.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - Coverage for kpex/log/logger.py: 95% - - - - - -
-
-

- Coverage for kpex/log/logger.py: - 95% -

- -

- 79 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25from enum import IntEnum 

-

26from functools import cached_property 

-

27import logging 

-

28import rich.console 

-

29import rich.logging 

-

30from typing import * 

-

31 

-

32 

-

33class LogLevel(IntEnum): 

-

34 ALL = 0 

-

35 DEBUG = 10 

-

36 SUBPROCESS = 12 

-

37 VERBOSE = 15 

-

38 INFO = 20 

-

39 WARNING = 30 

-

40 ERROR = 40 

-

41 CRITICAL = 50 

-

42 DEFAULT = SUBPROCESS 

-

43 

-

44 @classmethod 

-

45 @cached_property 

-

46 def level_by_name(cls) -> Dict[str, LogLevel]: 

-

47 return {e.name: e for e in cls} 

-

48 

-

49 

-

50class LogLevelFormatter(logging.Formatter): 

-

51 def format(self, record: logging.LogRecord) -> str: 

-

52 msg = record.getMessage() 

-

53 match record.levelno: 

-

54 case LogLevel.WARNING.value: return f"[yellow]{msg}" 

-

55 case LogLevel.ERROR.value: return f"[red]{msg}" 

-

56 case _: 

-

57 return msg 

-

58 

-

59 

-

60class LogLevelFilter(logging.Filter): 

-

61 def __init__(self, levels: Iterable[str], invert: bool = False): 

-

62 super().__init__() 

-

63 self.levels = levels 

-

64 self.invert = invert 

-

65 

-

66 def filter(self, record: logging.LogRecord) -> bool: 

-

67 if self.invert: 

-

68 return record.levelname not in self.levels 

-

69 else: 

-

70 return record.levelname in self.levels 

-

71 

-

72 

-

73console = rich.console.Console() 

-

74__logger = logging.getLogger("__kpex__") 

-

75 

-

76 

-

77def set_log_level(log_level: LogLevel): 

-

78 __logger.setLevel(log_level) 

-

79 

-

80 

-

81def register_additional_handler(handler: logging.Handler): 

-

82 """ 

-

83 Adds a new handler to the default logger. 

-

84 

-

85 :param handler: The new handler. Must be of type ``logging.Handler`` 

-

86 or its subclasses. 

-

87 """ 

-

88 __logger.addHandler(handler) 

-

89 

-

90 

-

91def deregister_additional_handler(handler: logging.Handler): 

-

92 """ 

-

93 Removes a registered handler from the default logger. 

-

94 

-

95 :param handler: The handler. If not registered, the behavior 

-

96 of this function is undefined. 

-

97 """ 

-

98 __logger.removeHandler(handler) 

-

99 

-

100 

-

101 

-

102def configure_logger(): 

-

103 global __logger, console 

-

104 

-

105 for level in LogLevel: 

-

106 logging.addLevelName(level=level.value, levelName=level.name) 

-

107 

-

108 subprocess_rich_handler = rich.logging.RichHandler( 

-

109 console=console, 

-

110 show_time=False, 

-

111 omit_repeated_times=False, 

-

112 show_level=False, 

-

113 show_path=False, 

-

114 enable_link_path=False, 

-

115 markup=False, 

-

116 tracebacks_word_wrap=False, 

-

117 keywords=[] 

-

118 ) 

-

119 subprocess_rich_handler.addFilter(LogLevelFilter(['SUBPROCESS'])) 

-

120 

-

121 rich_handler = rich.logging.RichHandler( 

-

122 console=console, 

-

123 omit_repeated_times=False, 

-

124 show_level=True, 

-

125 markup=True, 

-

126 rich_tracebacks=True, 

-

127 tracebacks_suppress=[], 

-

128 keywords=[] 

-

129 ) 

-

130 

-

131 rich_handler.setFormatter(LogLevelFormatter(fmt='%(message)s', datefmt='[%X]')) 

-

132 rich_handler.addFilter(LogLevelFilter(['SUBPROCESS'], invert=True)) 

-

133 

-

134 set_log_level(LogLevel.SUBPROCESS) 

-

135 

-

136 __logger.handlers.clear() 

-

137 __logger.addHandler(subprocess_rich_handler) 

-

138 __logger.addHandler(rich_handler) 

-

139 

-

140 

-

141def debug(*args, **kwargs): 

-

142 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

143 kwargs['stacklevel'] = 2 

-

144 __logger.debug(*args, **kwargs) 

-

145 

-

146 

-

147def subproc(msg: object, **kwargs): 

-

148 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

149 kwargs['stacklevel'] = 2 

-

150 __logger.log(LogLevel.SUBPROCESS, msg, **kwargs) 

-

151 

-

152 

-

153def rule(title: str = '', **kwargs): # pragma: no cover 

-

154 """ 

-

155 Prints a horizontal line on the terminal enclosing the first argument 

-

156 if the log level is <= INFO. 

-

157 

-

158 Kwargs are passed to https://rich.readthedocs.io/en/stable/reference/console.html#rich.console.Console.rule 

-

159 

-

160 :param title: A title string to enclose in the console rule 

-

161 """ 

-

162 console.rule(title) 

-

163 

-

164 

-

165def info(*args, **kwargs): 

-

166 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

167 kwargs['stacklevel'] = 2 

-

168 __logger.info(*args, **kwargs) 

-

169 

-

170 

-

171def warning(*args, **kwargs): 

-

172 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

173 kwargs['stacklevel'] = 2 

-

174 __logger.warning(*args, **kwargs) 

-

175 

-

176 

-

177def error(*args, **kwargs): 

-

178 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

179 kwargs['stacklevel'] = 2 

-

180 __logger.error(*args, **kwargs) 

-

181 

-

182 

-

183configure_logger() 

-
- - - diff --git a/pycov/z_5f30060c77e65d78_lvs_runner_test_py.html b/pycov/z_5f30060c77e65d78_lvs_runner_test_py.html deleted file mode 100644 index 72cff73d..00000000 --- a/pycov/z_5f30060c77e65d78_lvs_runner_test_py.html +++ /dev/null @@ -1,160 +0,0 @@ - - - - - Coverage for tests/klayout/lvs_runner_test.py: 54% - - - - - -
-
-

- Coverage for tests/klayout/lvs_runner_test.py: - 54% -

- -

- 24 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import allure 

-

27import os 

-

28import tempfile 

-

29import unittest 

-

30 

-

31from kpex.klayout.lvs_runner import LVSRunner 

-

32 

-

33 

-

34@allure.parent_suite("Unit Tests") 

-

35@allure.tag("LVS", "KLayout") 

-

36@unittest.skip # NOTE: this is relatively long running! 

-

37class Test(unittest.TestCase): 

-

38 @property 

-

39 def testdata_dir(self) -> str: 

-

40 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 'testdata', 'klayout', 'lvs')) 

-

41 

-

42 def test_run_klayout_lvs(self): 

-

43 gds_path = os.path.join(self.testdata_dir, 'nmos_diode2', 'nmos_diode2.gds.gz') 

-

44 schematic_path = os.path.join(self.testdata_dir, 'nmos_diode2', 'nmos_diode2.spice') 

-

45 

-

46 tmp_dir = tempfile.mkdtemp(prefix="lvs_run_") 

-

47 log_path = os.path.join(tmp_dir, "out.log") 

-

48 lvsdb_path = os.path.join(tmp_dir, "out.lvsdb.gz") 

-

49 

-

50 # TODO! 

-

51 # lvs_script = os.path.join(os.environ['PDKPATH'], 'libs.tech', 'klayout', 'lvs', 'sky130.lvs') 

-

52 lvs_script = os.path.join(os.environ['HOME'], '.klayout', 'salt', 'sky130A_el', 

-

53 'lvs', 'core', 'sky130.lvs') 

-

54 

-

55 runner = LVSRunner() 

-

56 runner.run_klayout_lvs(exe_path="klayout", 

-

57 lvs_script=lvs_script, 

-

58 gds_path=gds_path, 

-

59 schematic_path=schematic_path, 

-

60 log_path=log_path, 

-

61 lvsdb_path=lvsdb_path) 

-

62 print(f"LVS log file: {log_path}") 

-

63 print(f"LVSDB file: {lvsdb_path}") 

-
- - - diff --git a/pycov/z_5f30060c77e65d78_netlist_expander_test_py.html b/pycov/z_5f30060c77e65d78_netlist_expander_test_py.html deleted file mode 100644 index 9d156d7d..00000000 --- a/pycov/z_5f30060c77e65d78_netlist_expander_test_py.html +++ /dev/null @@ -1,182 +0,0 @@ - - - - - Coverage for tests/klayout/netlist_expander_test.py: 100% - - - - - -
-
-

- Coverage for tests/klayout/netlist_expander_test.py: - 100% -

- -

- 37 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import allure 

-

27import os 

-

28import tempfile 

-

29import unittest 

-

30 

-

31import klayout.db as kdb 

-

32 

-

33from kpex.klayout.lvsdb_extractor import KLayoutExtractionContext 

-

34from kpex.klayout.netlist_expander import NetlistExpander 

-

35from kpex.log import ( 

-

36 debug, 

-

37) 

-

38from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

39from kpex.tech_info import TechInfo 

-

40 

-

41 

-

42@allure.parent_suite("Unit Tests") 

-

43@allure.tag("Netlist", "Netlist Expansion") 

-

44class Test(unittest.TestCase): 

-

45 @property 

-

46 def klayout_testdata_dir(self) -> str: 

-

47 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

48 'testdata', 'fastercap')) 

-

49 

-

50 @property 

-

51 def tech_info_json_path(self) -> str: 

-

52 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

53 'build', 'sky130A_tech.pb.json')) 

-

54 

-

55 def test_netlist_expansion(self): 

-

56 exp = NetlistExpander() 

-

57 

-

58 cell_name = 'nmos_diode2' 

-

59 

-

60 lvsdb = kdb.LayoutVsSchematic() 

-

61 lvsdb_path = os.path.join(self.klayout_testdata_dir, f"{cell_name}.lvsdb.gz") 

-

62 lvsdb.read(lvsdb_path) 

-

63 

-

64 csv_path = os.path.join(self.klayout_testdata_dir, f"{cell_name}_FasterCap_Result_Matrix.csv") 

-

65 

-

66 cap_matrix = CapacitanceMatrix.parse_csv(csv_path, separator=';') 

-

67 

-

68 tech = TechInfo.from_json(self.tech_info_json_path, 

-

69 dielectric_filter=None) 

-

70 

-

71 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=cell_name, 

-

72 lvsdb=lvsdb, 

-

73 tech=tech, 

-

74 blackbox_devices=False) 

-

75 expanded_netlist = exp.expand(extracted_netlist=pex_context.lvsdb.netlist(), 

-

76 top_cell_name=pex_context.top_cell.name, 

-

77 cap_matrix=cap_matrix, 

-

78 blackbox_devices=False) 

-

79 out_path = tempfile.mktemp(prefix=f"{cell_name}_expanded_netlist_", suffix=".cir") 

-

80 spice_writer = kdb.NetlistSpiceWriter() 

-

81 expanded_netlist.write(out_path, spice_writer) 

-

82 debug(f"Wrote expanded netlist to: {out_path}") 

-

83 

-

84 allure.attach.file(csv_path, attachment_type=allure.attachment_type.CSV) 

-

85 allure.attach.file(out_path, attachment_type=allure.attachment_type.TEXT) 

-
- - - diff --git a/pycov/z_5f30060c77e65d78_netlist_reducer_test_py.html b/pycov/z_5f30060c77e65d78_netlist_reducer_test_py.html deleted file mode 100644 index 690dcb1e..00000000 --- a/pycov/z_5f30060c77e65d78_netlist_reducer_test_py.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Coverage for tests/klayout/netlist_reducer_test.py: 100% - - - - - -
-
-

- Coverage for tests/klayout/netlist_reducer_test.py: - 100% -

- -

- 33 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import tempfile 

-

27import unittest 

-

28 

-

29import klayout.db as kdb 

-

30 

-

31from kpex.log import ( 

-

32 LogLevel, 

-

33 set_log_level, 

-

34) 

-

35from kpex.klayout.netlist_reducer import NetlistReducer 

-

36 

-

37 

-

38@allure.parent_suite("Unit Tests") 

-

39@allure.tag("Netlist", "Netlist Reduction") 

-

40class Test(unittest.TestCase): 

-

41 @classmethod 

-

42 def setUpClass(cls): 

-

43 set_log_level(LogLevel.DEBUG) 

-

44 

-

45 @property 

-

46 def klayout_testdata_dir(self) -> str: 

-

47 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

48 'testdata', 'klayout', 'netlists')) 

-

49 

-

50 def _test_netlist_reduction(self, netlist_path: str, cell_name: str): 

-

51 netlist = kdb.Netlist() 

-

52 reader = kdb.NetlistSpiceReader() 

-

53 netlist.read(netlist_path, reader) 

-

54 

-

55 reducer = NetlistReducer() 

-

56 reduced_netlist = reducer.reduce(netlist=netlist, top_cell_name=cell_name) 

-

57 

-

58 out_path = tempfile.mktemp(prefix=f"{cell_name}_Reduced_Netlist_", suffix=".cir") 

-

59 spice_writer = kdb.NetlistSpiceWriter() 

-

60 reduced_netlist.write(out_path, spice_writer) 

-

61 print(f"Wrote reduced netlist to: {out_path}") 

-

62 allure.attach.file(out_path, attachment_type=allure.attachment_type.TEXT) 

-

63 

-

64 def test_netlist_reduction_1(self): 

-

65 netlist_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_Expanded_Netlist.cir') 

-

66 self._test_netlist_reduction(netlist_path=netlist_path, cell_name='nmos_diode2') 

-

67 

-

68 def test_netlist_reduction_2(self): 

-

69 netlist_path = os.path.join(self.klayout_testdata_dir, 'cap_vpp_Expanded_Netlist.cir') 

-

70 self._test_netlist_reduction(netlist_path=netlist_path, cell_name='TOP') 

-
- - - diff --git a/pycov/z_741a08911aeaedad___init___py.html b/pycov/z_741a08911aeaedad___init___py.html deleted file mode 100644 index d6cf747b..00000000 --- a/pycov/z_741a08911aeaedad___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for tests/common/__init__.py: 100% - - - - - -
-
-

- Coverage for tests/common/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_741a08911aeaedad_capacitance_matrix_test_py.html b/pycov/z_741a08911aeaedad_capacitance_matrix_test_py.html deleted file mode 100644 index 904b0da6..00000000 --- a/pycov/z_741a08911aeaedad_capacitance_matrix_test_py.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Coverage for tests/common/capacitance_matrix_test.py: 100% - - - - - -
-
-

- Coverage for tests/common/capacitance_matrix_test.py: - 100% -

- -

- 36 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import allure 

-

27import os 

-

28import tempfile 

-

29from typing import * 

-

30import unittest 

-

31 

-

32from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

33 

-

34 

-

35@allure.parent_suite("Unit Tests") 

-

36@allure.tag("Capacitance", "FasterCap") 

-

37class Test(unittest.TestCase): 

-

38 @property 

-

39 def klayout_testdata_dir(self) -> str: 

-

40 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

41 'testdata', 'fastercap')) 

-

42 

-

43 def test_parse_csv(self): 

-

44 csv_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

45 parsed_matrix = CapacitanceMatrix.parse_csv(path=csv_path, separator=';') 

-

46 self.assertEqual(3, len(parsed_matrix.rows)) 

-

47 self.assertEqual(3, len(parsed_matrix.rows[0])) 

-

48 self.assertEqual(3, len(parsed_matrix.rows[1])) 

-

49 self.assertEqual(3, len(parsed_matrix.rows[2])) 

-

50 self.assertEqual( 

-

51 ['g1_VSUBS', 'g1_VDD', 'g1_VSS'], 

-

52 parsed_matrix.conductor_names 

-

53 ) 

-

54 

-

55 def test_write_csv(self): 

-

56 csv_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

57 parsed_matrix = CapacitanceMatrix.parse_csv(path=csv_path, separator=';') 

-

58 out_path = tempfile.mktemp(prefix='fastercap_matrix_raw__', suffix='.csv') 

-

59 parsed_matrix.write_csv(output_path=out_path, separator=';') 

-

60 parsed_matrix2 = CapacitanceMatrix.parse_csv(path=out_path, separator=';') 

-

61 self.assertEqual(parsed_matrix, parsed_matrix2) 

-

62 

-

63 def test_averaged_off_diagonals(self): 

-

64 csv_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_FasterCap_Result_Matrix.csv') 

-

65 parsed_matrix = CapacitanceMatrix.parse_csv(path=csv_path, separator=';') 

-

66 avg_matrix = parsed_matrix.averaged_off_diagonals() 

-

67 out_path = tempfile.mktemp(prefix='fastercap_matrix_avg__', suffix='.csv') 

-

68 avg_matrix.write_csv(output_path=out_path, separator=';') 

-

69 allure.attach.file(out_path, attachment_type=allure.attachment_type.CSV) 

-

70 print(f"averaged matrix stored in {out_path}") 

-
- - - diff --git a/pycov/z_8d81377067d4aa92_process_parasitics_pb2_py.html b/pycov/z_8d81377067d4aa92_process_parasitics_pb2_py.html deleted file mode 100644 index 8264c8a2..00000000 --- a/pycov/z_8d81377067d4aa92_process_parasitics_pb2_py.html +++ /dev/null @@ -1,151 +0,0 @@ - - - - - Coverage for build\python_kpex_protobuf\process_parasitics_pb2.py: 36% - - - - - -
-
-

- Coverage for build\python_kpex_protobuf\process_parasitics_pb2.py: - 36% -

- -

- 33 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# -*- coding: utf-8 -*- 

-

2# Generated by the protocol buffer compiler. DO NOT EDIT! 

-

3# NO CHECKED-IN PROTOBUF GENCODE 

-

4# source: process_parasitics.proto 

-

5# Protobuf Python Version: 5.29.0 

-

6"""Generated protocol buffer code.""" 

-

7from google.protobuf import descriptor as _descriptor 

-

8from google.protobuf import descriptor_pool as _descriptor_pool 

-

9from google.protobuf import runtime_version as _runtime_version 

-

10from google.protobuf import symbol_database as _symbol_database 

-

11from google.protobuf.internal import builder as _builder 

-

12_runtime_version.ValidateProtobufRuntimeVersion( 

-

13 _runtime_version.Domain.PUBLIC, 

-

14 5, 

-

15 29, 

-

16 0, 

-

17 '', 

-

18 'process_parasitics.proto' 

-

19) 

-

20# @@protoc_insertion_point(imports) 

-

21 

-

22_sym_db = _symbol_database.Default() 

-

23 

-

24 

-

25 

-

26 

-

27DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x18process_parasitics.proto\x12\tkpex.tech\"\x8a\x01\n\x15ProcessParasiticsInfo\x12\x11\n\tside_halo\x18\n \x01(\x05\x12-\n\nresistance\x18n \x01(\x0b\x32\x19.kpex.tech.ResistanceInfo\x12/\n\x0b\x63\x61pacitance\x18o \x01(\x0b\x32\x1a.kpex.tech.CapacitanceInfo\"\x98\x02\n\x0eResistanceInfo\x12\x39\n\x06layers\x18\x01 \x03(\x0b\x32).kpex.tech.ResistanceInfo.LayerResistance\x12\x35\n\x04vias\x18\x02 \x03(\x0b\x32\'.kpex.tech.ResistanceInfo.ViaResistance\x1a]\n\x0fLayerResistance\x12\x12\n\nlayer_name\x18\x01 \x01(\t\x12\x12\n\nresistance\x18\x02 \x01(\x01\x12\"\n\x1a\x63orner_adjustment_fraction\x18\x03 \x01(\x01\x1a\x35\n\rViaResistance\x12\x10\n\x08via_name\x18\x01 \x01(\t\x12\x12\n\nresistance\x18\x02 \x01(\x01\"\x98\x05\n\x0f\x43\x61pacitanceInfo\x12\x44\n\nsubstrates\x18\xc8\x01 \x03(\x0b\x32/.kpex.tech.CapacitanceInfo.SubstrateCapacitance\x12@\n\x08overlaps\x18\xc9\x01 \x03(\x0b\x32-.kpex.tech.CapacitanceInfo.OverlapCapacitance\x12\x42\n\tsidewalls\x18\xca\x01 \x03(\x0b\x32..kpex.tech.CapacitanceInfo.SidewallCapacitance\x12H\n\x0csideoverlaps\x18\xcb\x01 \x03(\x0b\x32\x31.kpex.tech.CapacitanceInfo.SideOverlapCapacitance\x1a\x63\n\x14SubstrateCapacitance\x12\x12\n\nlayer_name\x18\x01 \x01(\t\x12\x18\n\x10\x61rea_capacitance\x18\x02 \x01(\x01\x12\x1d\n\x15perimeter_capacitance\x18\x03 \x01(\x01\x1a\\\n\x12OverlapCapacitance\x12\x16\n\x0etop_layer_name\x18\x01 \x01(\t\x12\x19\n\x11\x62ottom_layer_name\x18\x02 \x01(\t\x12\x13\n\x0b\x63\x61pacitance\x18\x03 \x01(\x01\x1aN\n\x13SidewallCapacitance\x12\x12\n\nlayer_name\x18\x01 \x01(\t\x12\x13\n\x0b\x63\x61pacitance\x18\x02 \x01(\x01\x12\x0e\n\x06offset\x18\x03 \x01(\x01\x1a\\\n\x16SideOverlapCapacitance\x12\x15\n\rin_layer_name\x18\x01 \x01(\t\x12\x16\n\x0eout_layer_name\x18\x02 \x01(\t\x12\x13\n\x0b\x63\x61pacitance\x18\x03 \x01(\x01\"\x0e\n\x0cStyleVariantb\x06proto3') 

-

28 

-

29_globals = globals() 

-

30_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 

-

31_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'process_parasitics_pb2', _globals) 

-

32if not _descriptor._USE_C_DESCRIPTORS: 

-

33 DESCRIPTOR._loaded_options = None 

-

34 _globals['_PROCESSPARASITICSINFO']._serialized_start=40 

-

35 _globals['_PROCESSPARASITICSINFO']._serialized_end=178 

-

36 _globals['_RESISTANCEINFO']._serialized_start=181 

-

37 _globals['_RESISTANCEINFO']._serialized_end=461 

-

38 _globals['_RESISTANCEINFO_LAYERRESISTANCE']._serialized_start=313 

-

39 _globals['_RESISTANCEINFO_LAYERRESISTANCE']._serialized_end=406 

-

40 _globals['_RESISTANCEINFO_VIARESISTANCE']._serialized_start=408 

-

41 _globals['_RESISTANCEINFO_VIARESISTANCE']._serialized_end=461 

-

42 _globals['_CAPACITANCEINFO']._serialized_start=464 

-

43 _globals['_CAPACITANCEINFO']._serialized_end=1128 

-

44 _globals['_CAPACITANCEINFO_SUBSTRATECAPACITANCE']._serialized_start=761 

-

45 _globals['_CAPACITANCEINFO_SUBSTRATECAPACITANCE']._serialized_end=860 

-

46 _globals['_CAPACITANCEINFO_OVERLAPCAPACITANCE']._serialized_start=862 

-

47 _globals['_CAPACITANCEINFO_OVERLAPCAPACITANCE']._serialized_end=954 

-

48 _globals['_CAPACITANCEINFO_SIDEWALLCAPACITANCE']._serialized_start=956 

-

49 _globals['_CAPACITANCEINFO_SIDEWALLCAPACITANCE']._serialized_end=1034 

-

50 _globals['_CAPACITANCEINFO_SIDEOVERLAPCAPACITANCE']._serialized_start=1036 

-

51 _globals['_CAPACITANCEINFO_SIDEOVERLAPCAPACITANCE']._serialized_end=1128 

-

52 _globals['_STYLEVARIANT']._serialized_start=1130 

-

53 _globals['_STYLEVARIANT']._serialized_end=1144 

-

54# @@protoc_insertion_point(module_scope) 

-
- - - diff --git a/pycov/z_8d81377067d4aa92_process_stack_pb2_py.html b/pycov/z_8d81377067d4aa92_process_stack_pb2_py.html deleted file mode 100644 index da9bf0ec..00000000 --- a/pycov/z_8d81377067d4aa92_process_stack_pb2_py.html +++ /dev/null @@ -1,155 +0,0 @@ - - - - - Coverage for build\python_kpex_protobuf\process_stack_pb2.py: 32% - - - - - -
-
-

- Coverage for build\python_kpex_protobuf\process_stack_pb2.py: - 32% -

- -

- 37 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# -*- coding: utf-8 -*- 

-

2# Generated by the protocol buffer compiler. DO NOT EDIT! 

-

3# NO CHECKED-IN PROTOBUF GENCODE 

-

4# source: process_stack.proto 

-

5# Protobuf Python Version: 5.29.0 

-

6"""Generated protocol buffer code.""" 

-

7from google.protobuf import descriptor as _descriptor 

-

8from google.protobuf import descriptor_pool as _descriptor_pool 

-

9from google.protobuf import runtime_version as _runtime_version 

-

10from google.protobuf import symbol_database as _symbol_database 

-

11from google.protobuf.internal import builder as _builder 

-

12_runtime_version.ValidateProtobufRuntimeVersion( 

-

13 _runtime_version.Domain.PUBLIC, 

-

14 5, 

-

15 29, 

-

16 0, 

-

17 '', 

-

18 'process_stack.proto' 

-

19) 

-

20# @@protoc_insertion_point(imports) 

-

21 

-

22_sym_db = _symbol_database.Default() 

-

23 

-

24 

-

25 

-

26 

-

27DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x13process_stack.proto\x12\tkpex.tech\"\xb5\x0f\n\x10ProcessStackInfo\x12\x35\n\x06layers\x18\x64 \x03(\x0b\x32%.kpex.tech.ProcessStackInfo.LayerInfo\x1a?\n\x07\x43ontact\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bmetal_above\x18\n \x01(\t\x12\x11\n\tthickness\x18\x14 \x01(\x01\x1a\x46\n\x0eSubstrateLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\tthickness\x18\x02 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1ak\n\nNWellLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x12:\n\rcontact_above\x18( \x01(\x0b\x32#.kpex.tech.ProcessStackInfo.Contact\x1ao\n\x0e\x44iffusionLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x12:\n\rcontact_above\x18( \x01(\x0b\x32#.kpex.tech.ProcessStackInfo.Contact\x1a\'\n\x0f\x46ieldOxideLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x1a@\n\x15SimpleDielectricLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1a\x9f\x01\n\x18\x43onformalDielectricLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x12\x1c\n\x14thickness_over_metal\x18\x14 \x01(\x01\x12 \n\x18thickness_where_no_metal\x18\x15 \x01(\x01\x12\x1a\n\x12thickness_sidewall\x18\x16 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1a~\n\x17SidewallDielectricLayer\x12\x14\n\x0c\x64ielectric_k\x18\n \x01(\x01\x12\x1a\n\x12height_above_metal\x18\x14 \x01(\x01\x12\x1e\n\x16width_outside_sidewall\x18\x15 \x01(\x01\x12\x11\n\treference\x18\x1e \x01(\t\x1a\x9d\x01\n\nMetalLayer\x12\x0e\n\x06height\x18\x01 \x01(\x01\x12\x11\n\tthickness\x18\x02 \x01(\x01\x12\x17\n\x0freference_below\x18\x1e \x01(\t\x12\x17\n\x0freference_above\x18\x1f \x01(\t\x12:\n\rcontact_above\x18( \x01(\x0b\x32#.kpex.tech.ProcessStackInfo.Contact\x1a\xc4\x05\n\tLayerInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x39\n\nlayer_type\x18\x02 \x01(\x0e\x32%.kpex.tech.ProcessStackInfo.LayerType\x12\x45\n\x0fsubstrate_layer\x18Z \x01(\x0b\x32*.kpex.tech.ProcessStackInfo.SubstrateLayerH\x00\x12=\n\x0bnwell_layer\x18\t \x01(\x0b\x32&.kpex.tech.ProcessStackInfo.NWellLayerH\x00\x12\x45\n\x0f\x64iffusion_layer\x18\n \x01(\x0b\x32*.kpex.tech.ProcessStackInfo.DiffusionLayerH\x00\x12H\n\x11\x66ield_oxide_layer\x18\x0b \x01(\x0b\x32+.kpex.tech.ProcessStackInfo.FieldOxideLayerH\x00\x12T\n\x17simple_dielectric_layer\x18\x0c \x01(\x0b\x32\x31.kpex.tech.ProcessStackInfo.SimpleDielectricLayerH\x00\x12Z\n\x1a\x63onformal_dielectric_layer\x18\r \x01(\x0b\x32\x34.kpex.tech.ProcessStackInfo.ConformalDielectricLayerH\x00\x12X\n\x19sidewall_dielectric_layer\x18\x0e \x01(\x0b\x32\x33.kpex.tech.ProcessStackInfo.SidewallDielectricLayerH\x00\x12=\n\x0bmetal_layer\x18\x0f \x01(\x0b\x32&.kpex.tech.ProcessStackInfo.MetalLayerH\x00\x42\x0c\n\nparameters\"\x8e\x02\n\tLayerType\x12\x1a\n\x16LAYER_TYPE_UNSPECIFIED\x10\x00\x12\x18\n\x14LAYER_TYPE_SUBSTRATE\x10\n\x12\x14\n\x10LAYER_TYPE_NWELL\x10\x14\x12\x18\n\x14LAYER_TYPE_DIFFUSION\x10\x1e\x12\x1a\n\x16LAYER_TYPE_FIELD_OXIDE\x10(\x12 \n\x1cLAYER_TYPE_SIMPLE_DIELECTRIC\x10\x32\x12#\n\x1fLAYER_TYPE_CONFORMAL_DIELECTRIC\x10<\x12\"\n\x1eLAYER_TYPE_SIDEWALL_DIELECTRIC\x10\x46\x12\x14\n\x10LAYER_TYPE_METAL\x10Pb\x06proto3') 

-

28 

-

29_globals = globals() 

-

30_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 

-

31_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'process_stack_pb2', _globals) 

-

32if not _descriptor._USE_C_DESCRIPTORS: 

-

33 DESCRIPTOR._loaded_options = None 

-

34 _globals['_PROCESSSTACKINFO']._serialized_start=35 

-

35 _globals['_PROCESSSTACKINFO']._serialized_end=2008 

-

36 _globals['_PROCESSSTACKINFO_CONTACT']._serialized_start=110 

-

37 _globals['_PROCESSSTACKINFO_CONTACT']._serialized_end=173 

-

38 _globals['_PROCESSSTACKINFO_SUBSTRATELAYER']._serialized_start=175 

-

39 _globals['_PROCESSSTACKINFO_SUBSTRATELAYER']._serialized_end=245 

-

40 _globals['_PROCESSSTACKINFO_NWELLLAYER']._serialized_start=247 

-

41 _globals['_PROCESSSTACKINFO_NWELLLAYER']._serialized_end=354 

-

42 _globals['_PROCESSSTACKINFO_DIFFUSIONLAYER']._serialized_start=356 

-

43 _globals['_PROCESSSTACKINFO_DIFFUSIONLAYER']._serialized_end=467 

-

44 _globals['_PROCESSSTACKINFO_FIELDOXIDELAYER']._serialized_start=469 

-

45 _globals['_PROCESSSTACKINFO_FIELDOXIDELAYER']._serialized_end=508 

-

46 _globals['_PROCESSSTACKINFO_SIMPLEDIELECTRICLAYER']._serialized_start=510 

-

47 _globals['_PROCESSSTACKINFO_SIMPLEDIELECTRICLAYER']._serialized_end=574 

-

48 _globals['_PROCESSSTACKINFO_CONFORMALDIELECTRICLAYER']._serialized_start=577 

-

49 _globals['_PROCESSSTACKINFO_CONFORMALDIELECTRICLAYER']._serialized_end=736 

-

50 _globals['_PROCESSSTACKINFO_SIDEWALLDIELECTRICLAYER']._serialized_start=738 

-

51 _globals['_PROCESSSTACKINFO_SIDEWALLDIELECTRICLAYER']._serialized_end=864 

-

52 _globals['_PROCESSSTACKINFO_METALLAYER']._serialized_start=867 

-

53 _globals['_PROCESSSTACKINFO_METALLAYER']._serialized_end=1024 

-

54 _globals['_PROCESSSTACKINFO_LAYERINFO']._serialized_start=1027 

-

55 _globals['_PROCESSSTACKINFO_LAYERINFO']._serialized_end=1735 

-

56 _globals['_PROCESSSTACKINFO_LAYERTYPE']._serialized_start=1738 

-

57 _globals['_PROCESSSTACKINFO_LAYERTYPE']._serialized_end=2008 

-

58# @@protoc_insertion_point(module_scope) 

-
- - - diff --git a/pycov/z_8d81377067d4aa92_tech_pb2_py.html b/pycov/z_8d81377067d4aa92_tech_pb2_py.html deleted file mode 100644 index f7e508f5..00000000 --- a/pycov/z_8d81377067d4aa92_tech_pb2_py.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - Coverage for build\python_kpex_protobuf\tech_pb2.py: 61% - - - - - -
-
-

- Coverage for build\python_kpex_protobuf\tech_pb2.py: - 61% -

- -

- 23 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# -*- coding: utf-8 -*- 

-

2# Generated by the protocol buffer compiler. DO NOT EDIT! 

-

3# NO CHECKED-IN PROTOBUF GENCODE 

-

4# source: tech.proto 

-

5# Protobuf Python Version: 5.29.0 

-

6"""Generated protocol buffer code.""" 

-

7from google.protobuf import descriptor as _descriptor 

-

8from google.protobuf import descriptor_pool as _descriptor_pool 

-

9from google.protobuf import runtime_version as _runtime_version 

-

10from google.protobuf import symbol_database as _symbol_database 

-

11from google.protobuf.internal import builder as _builder 

-

12_runtime_version.ValidateProtobufRuntimeVersion( 

-

13 _runtime_version.Domain.PUBLIC, 

-

14 5, 

-

15 29, 

-

16 0, 

-

17 '', 

-

18 'tech.proto' 

-

19) 

-

20# @@protoc_insertion_point(imports) 

-

21 

-

22_sym_db = _symbol_database.Default() 

-

23 

-

24 

-

25import process_stack_pb2 as process__stack__pb2 

-

26import process_parasitics_pb2 as process__parasitics__pb2 

-

27 

-

28 

-

29DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ntech.proto\x12\tkpex.tech\x1a\x13process_stack.proto\x1a\x18process_parasitics.proto\"\xef\x01\n\nTechnology\x12\x0c\n\x04name\x18\x01 \x01(\t\x12$\n\x06layers\x18\x65 \x03(\x0b\x32\x14.kpex.tech.LayerInfo\x12\x39\n\x13lvs_computed_layers\x18x \x03(\x0b\x32\x1c.kpex.tech.ComputedLayerInfo\x12\x33\n\rprocess_stack\x18\x8c\x01 \x01(\x0b\x32\x1b.kpex.tech.ProcessStackInfo\x12=\n\x12process_parasitics\x18\xc8\x01 \x01(\x0b\x32 .kpex.tech.ProcessParasiticsInfo\"W\n\tLayerInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x0b \x01(\t\x12\x11\n\tgds_layer\x18\x15 \x01(\r\x12\x14\n\x0cgds_datatype\x18\x1f \x01(\r\"\xf0\x01\n\x11\x43omputedLayerInfo\x12/\n\x04kind\x18\n \x01(\x0e\x32!.kpex.tech.ComputedLayerInfo.Kind\x12(\n\nlayer_info\x18\x14 \x01(\x0b\x32\x14.kpex.tech.LayerInfo\x12\x1b\n\x13original_layer_name\x18\x1e \x01(\t\"c\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\x10\n\x0cKIND_REGULAR\x10\x01\x12\x19\n\x15KIND_DEVICE_CAPACITOR\x10\x02\x12\x18\n\x14KIND_DEVICE_RESISTOR\x10\x03\x62\x06proto3') 

-

30 

-

31_globals = globals() 

-

32_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 

-

33_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'tech_pb2', _globals) 

-

34if not _descriptor._USE_C_DESCRIPTORS: 

-

35 DESCRIPTOR._loaded_options = None 

-

36 _globals['_TECHNOLOGY']._serialized_start=73 

-

37 _globals['_TECHNOLOGY']._serialized_end=312 

-

38 _globals['_LAYERINFO']._serialized_start=314 

-

39 _globals['_LAYERINFO']._serialized_end=401 

-

40 _globals['_COMPUTEDLAYERINFO']._serialized_start=404 

-

41 _globals['_COMPUTEDLAYERINFO']._serialized_end=644 

-

42 _globals['_COMPUTEDLAYERINFO_KIND']._serialized_start=545 

-

43 _globals['_COMPUTEDLAYERINFO_KIND']._serialized_end=644 

-

44# @@protoc_insertion_point(module_scope) 

-
- - - diff --git a/pycov/z_95258413a42419dc_rcx25_test_py.html b/pycov/z_95258413a42419dc_rcx25_test_py.html deleted file mode 100644 index ad370fef..00000000 --- a/pycov/z_95258413a42419dc_rcx25_test_py.html +++ /dev/null @@ -1,409 +0,0 @@ - - - - - Coverage for tests\rcx25\rcx25_test.py: 82% - - - - - -
-
-

- Coverage for tests\rcx25\rcx25_test.py: - 82% -

- -

- 95 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import io 

-

27import json 

-

28import tempfile 

-

29 

-

30import allure 

-

31import csv_diff 

-

32import os 

-

33from typing import * 

-

34import unittest 

-

35 

-

36import klayout.db as kdb 

-

37import klayout.lay as klay 

-

38 

-

39from kpex.kpex_cli import KpexCLI 

-

40from kpex.rcx25.extraction_results import CellExtractionResults 

-

41 

-

42 

-

43CSVPath = str 

-

44PNGPath = str 

-

45parent_suite = "kpex/2.5D Extraction Tests" 

-

46tags = ("PEX", "2.5D", "MAGIC") 

-

47 

-

48 

-

49def _kpex_pdk_dir() -> str: 

-

50 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

51 'pdk', 'sky130A', 'libs.tech', 'kpex')) 

-

52 

-

53 

-

54def _sky130a_testdata_dir() -> str: 

-

55 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

56 'testdata', 'designs', 'sky130A')) 

-

57 

-

58 

-

59def _gds(*path_components) -> str: 

-

60 return os.path.join(_sky130a_testdata_dir(), *path_components) 

-

61 

-

62 

-

63def _save_layout_preview(gds_path: str, 

-

64 output_png_path: str): 

-

65 kdb.Technology.clear_technologies() 

-

66 default_lyt_path = os.path.abspath(f"{_kpex_pdk_dir()}/sky130A.lyt") 

-

67 tech = kdb.Technology.create_technology('sky130A') 

-

68 tech.load(default_lyt_path) 

-

69 

-

70 lv = klay.LayoutView() 

-

71 lv.load_layout(gds_path) 

-

72 lv.max_hier() 

-

73 lv.set_config('background-color', '#000000') 

-

74 lv.set_config('bitmap-oversampling', '1') 

-

75 lv.set_config('default-font-size', '4') 

-

76 lv.set_config('default-text-size', '0.1') 

-

77 lv.save_image_with_options( 

-

78 output_png_path, 

-

79 width=4096, height=2160 

-

80 # , 

-

81 # linewidth=2, 

-

82 # resolution=0.25 # 4x as large fonts 

-

83 ) 

-

84 

-

85def _run_rcx25d_single_cell(*path_components) -> Tuple[CellExtractionResults, CSVPath, PNGPath]: 

-

86 gds_path = _gds(*path_components) 

-

87 

-

88 preview_png_path = tempfile.mktemp(prefix=f"layout_preview_", suffix=".png") 

-

89 _save_layout_preview(gds_path, preview_png_path) 

-

90 tech_json_path = os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

91 'build', 'sky130A_tech.pb.json')) 

-

92 output_dir_path = os.path.realpath(os.path.join(__file__, '..', '..', '..', 'output_sky130A')) 

-

93 cli = KpexCLI() 

-

94 cli.main(['main', 

-

95 '--tech', tech_json_path, 

-

96 '--gds', gds_path, 

-

97 '--out_dir', output_dir_path, 

-

98 '--2.5D', 'y']) 

-

99 assert cli.rcx25_extraction_results is not None 

-

100 assert len(cli.rcx25_extraction_results.cell_extraction_results) == 1 # assume single cell test 

-

101 results = list(cli.rcx25_extraction_results.cell_extraction_results.values())[0] 

-

102 assert results.cell_name == path_components[-1][:-len('.gds.gz')] 

-

103 return results, cli.rcx25_extracted_csv_path, preview_png_path 

-

104 

-

105 

-

106def assert_expected_matches_obtained(*path_components, 

-

107 expected_csv_content: str) -> CellExtractionResults: 

-

108 result, csv, preview_png = _run_rcx25d_single_cell(*path_components) 

-

109 allure.attach.file(csv, name='pex_obtained.csv', attachment_type=allure.attachment_type.CSV) 

-

110 allure.attach.file(preview_png, name='📸 layout_preview.png', attachment_type=allure.attachment_type.PNG) 

-

111 expected_csv = csv_diff.load_csv(io.StringIO(expected_csv_content), key='Device') 

-

112 with open(csv, 'r') as f: 

-

113 obtained_csv = csv_diff.load_csv(f, key='Device') 

-

114 diff = csv_diff.compare(expected_csv, obtained_csv, show_unchanged=False) 

-

115 human_diff = csv_diff.human_text( 

-

116 diff, current=obtained_csv, extras=(('Net1','{Net1}'),('Net2','{Net2}')) 

-

117 ) 

-

118 allure.attach(expected_csv_content, name='pex_expected.csv', attachment_type=allure.attachment_type.CSV) 

-

119 allure.attach(json.dumps(diff, sort_keys=True, indent=' ').encode("utf8"), 

-

120 name='pex_diff.json', attachment_type=allure.attachment_type.JSON) 

-

121 allure.attach(human_diff.encode("utf8"), name='‼️ pex_diff.txt', attachment_type=allure.attachment_type.TEXT) 

-

122 # assert diff['added'] == [] 

-

123 # assert diff['removed'] == [] 

-

124 # assert diff['changed'] == [] 

-

125 # assert diff['columns_added'] == [] 

-

126 # assert diff['columns_removed'] == [] 

-

127 assert human_diff == '', 'Diff detected' 

-

128 return result 

-

129 

-

130@allure.parent_suite(parent_suite) 

-

131@allure.tag(*tags) 

-

132def test_single_plate_100um_x_100um_li1_over_substrate(): 

-

133 # MAGIC GIVES (8.3 revision 485): 

-

134 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

135 # C0 PLATE VSUBS 0.38618p 

-

136 assert_expected_matches_obtained( 

-

137 'test_patterns', 'single_plate_100um_x_100um_li1_over_substrate.gds.gz', 

-

138 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

139C1;PLATE;VSUBS;386.18""" 

-

140 ) 

-

141 

-

142 

-

143@allure.parent_suite(parent_suite) 

-

144@allure.tag(*tags) 

-

145def test_overlap_plates_100um_x_100um_li1_m1(): 

-

146 # MAGIC GIVES (8.3 revision 485): 

-

147 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

148 # C2 LOWER VSUBS 0.38618p 

-

149 # C0 UPPER LOWER 0.294756p 

-

150 # C1 UPPER VSUBS 0.205833p 

-

151 #_______________________________ NOTE: with halo=50µm __________________________________ 

-

152 # C2 LOWER VSUBS 0.38618p 

-

153 # C0 LOWER UPPER 0.294867p 

-

154 # C1 UPPER VSUBS 0.205621p 

-

155 # NOTE: magic with --magic_halo=50 (µm) gives UPPER-VSUBS of 0.205621p 

-

156 # which is due to the handling of https://github.com/martinjankoehler/magic/issues/1 

-

157 assert_expected_matches_obtained( 

-

158 'test_patterns', 'overlap_plates_100um_x_100um_li1_m1.gds.gz', 

-

159 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

160C1;LOWER;VSUBS;386.18 

-

161C2;LOWER;UPPER;294.592 

-

162C3;UPPER;VSUBS;205.52""" 

-

163 ) 

-

164 

-

165@allure.parent_suite(parent_suite) 

-

166@allure.tag(*tags) 

-

167def test_overlap_plates_100um_x_100um_li1_m1_m2_m3(): 

-

168 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

169 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

170 # C7 li1 VSUBS 0.38618p 

-

171 # C3 li1 met1 0.294756p # DIFFERS a bit !!! TODO 

-

172 # C6 met1 VSUBS 0.205833p # DIFFERS a bit !!! TODO 

-

173 # C0 met1 met2 0.680652p # DIFFERS a bit !!! TODO 

-

174 # C2 li1 met2 99.3128f # DIFFERS a bit !!! TODO 

-

175 # C5 met2 VSUBS 52.151802f 

-

176 # C4 met3 VSUBS 0.136643p 

-

177 # C1 li1 met3 5.59194f 

-

178 #_______________________________ NOTE: with halo=50µm __________________________________ 

-

179 # C9 li1 VSUBS 0.38618p 

-

180 # C5 li1 met1 0.294867p # DIFFERS a bit !!! TODO 

-

181 # C8 met1 VSUBS 0.205621p # DIFFERS, but that's a MAGIC issue (see test_overlap_plates_100um_x_100um_li1_m1) 

-

182 # C2 met1 met2 0.680769p 

-

183 # C4 li1 met2 99.518005f # DIFFERS a bit !!! TODO 

-

184 # C7 met2 VSUBS 51.5767f # DIFFERS a bit !!! TODO 

-

185 # C3 li1 met3 6.01281f # DIFFERS !!! TODO 

-

186 # C0 met2 met3 0.0422f # we don't have that?! !!! TODO 

-

187 # C6 met3 VSUBS 0.136103p # DIFFERS a bit !!! TODO 

-

188 # C1 met1 met3 0.012287f # NOTE: we don't have that, due to halo=8µm 

-

189 

-

190 assert_expected_matches_obtained( 

-

191 'test_patterns', 'overlap_plates_100um_x_100um_li1_m1_m2_m3.gds.gz', 

-

192 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

193C1;VSUBS;li1;386.18 

-

194C2;li1;met1;294.592 

-

195C3;VSUBS;met1;205.52 

-

196C4;met1;met2;680.482 

-

197C5;li1;met2;99.015 

-

198C6;VSUBS;met2;51.302 

-

199C7;VSUBS;met3;135.996 

-

200C8;li1;met3;5.031""" 

-

201 ) 

-

202 

-

203 

-

204@allure.parent_suite(parent_suite) 

-

205@allure.tag(*tags) 

-

206def test_sidewall_100um_x_100um_distance_200nm_li1(): 

-

207 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

208 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

209 # C2 C VSUBS 8.231f 

-

210 # C4 A VSUBS 8.231f 

-

211 # C3 B VSUBS 4.54159f 

-

212 # C0 B C 7.5f 

-

213 # C1 A B 7.5f 

-

214 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

215 # (same!) 

-

216 

-

217 assert_expected_matches_obtained( 

-

218 'test_patterns', 'sidewall_100um_x_100um_distance_200nm_li1.gds.gz', 

-

219 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

220C1;C;VSUBS;11.92 # TODO: magic=8.231f  

-

221C2;A;VSUBS;11.92 # TODO: magic=8.231f 

-

222C3;B;VSUBS;11.92 # TODO: magic=4.452f 

-

223C4;B;C;7.5 

-

224C5;A;B;7.5""" 

-

225 ) 

-

226 

-

227 

-

228@allure.parent_suite(parent_suite) 

-

229@allure.tag(*tags) 

-

230def test_sidewall_net_uturn_l1_redux(): 

-

231 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

232 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

233 # C2 C0 VSUBS 38.1255f 

-

234 # C1 C1 VSUBS 12.5876f 

-

235 # C0 C0 C1 1.87386f 

-

236 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

237 # (same!) 

-

238 

-

239 assert_expected_matches_obtained( 

-

240 'test_patterns', 'sidewall_net_uturn_l1_redux.gds.gz', 

-

241 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

242C1;C0;VSUBS;40.642 

-

243C2;C1;VSUBS;15.079 

-

244C3;C0;C1;0.019 TODO, MAGIC=1.87386 fF""" 

-

245 ) 

-

246 

-

247 

-

248@allure.parent_suite(parent_suite) 

-

249@allure.tag(*tags) 

-

250def test_sidewall_cap_vpp_04p4x04p6_l1_redux(): 

-

251 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

252 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

253 # C2 C0 VSUBS 0.300359f 

-

254 # C1 C1 VSUBS 0.086832f 

-

255 # C0 C0 C1 0.286226f 

-

256 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

257 # (same!) 

-

258 

-

259 assert_expected_matches_obtained( 

-

260 'test_patterns', 'sidewall_cap_vpp_04p4x04p6_l1_redux.gds.gz', 

-

261 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

262C1;C0;VSUBS;0.447 TODO 

-

263C2;C1;VSUBS;0.223 TODO 

-

264C3;C0;C1;0.145 TODO""" 

-

265 ) 

-

266 

-

267 

-

268@allure.parent_suite(parent_suite) 

-

269@allure.tag(*tags) 

-

270def test_near_body_shield_li1_m1(): 

-

271 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

272 #_______________________________ NOTE: with halo=8µm __________________________________ 

-

273 # C5 BOTTOM VSUBS 0.405082p 

-

274 # C1 BOTTOM TOPB 0.215823p # DIFFERS marginally <0,1fF 

-

275 # C2 BOTTOM TOPA 0.215823p # DIFFERS marginally <0,1fF 

-

276 # C0 TOPA TOPB 0.502857f 

-

277 # C3 TOPB VSUBS 0.737292f # DIFFERS, but that's a MAGIC issue (see test_overlap_plates_100um_x_100um_li1_m1) 

-

278 # C4 TOPA VSUBS 0.737292f # DIFFERS, but that's a MAGIC issue (see test_overlap_plates_100um_x_100um_li1_m1) 

-

279 #_______________________________ NOTE: with halo=50µm __________________________________ 

-

280 # NOTE: with halo=50µm, C3/C4 becomes 0.29976f 

-

281 # see https://github.com/martinjankoehler/magic/issues/2 

-

282 

-

283 assert_expected_matches_obtained( 

-

284 'test_patterns', 'near_body_shield_li1_m1.gds.gz', 

-

285 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

286C1;BOTTOM;VSUBS;405.082 

-

287C2;BOTTOM;TOPA;215.898 

-

288C3;BOTTOM;TOPB;215.898 

-

289C4;TOPA;TOPB;0.503""" 

-

290 ) 

-

291 

-

292 

-

293@allure.parent_suite(parent_suite) 

-

294@allure.tag(*tags) 

-

295def test_sideoverlap_simple_plates_li1_m1(): 

-

296 # MAGIC GIVES (8.3 revision 485): (sorting changed to match order) 

-

297 # _______________________________ NOTE: with halo=8µm __________________________________ 

-

298 # C2 li1 VSUBS 7.931799f 

-

299 # C1 met1 VSUBS 0.248901p 

-

300 # C0 li1 met1 0.143335f 

-

301 # _______________________________ NOTE: with halo=50µm __________________________________ 

-

302 # C2 li1 VSUBS 7.931799f 

-

303 # C1 met1 VSUBS 0.248901p 

-

304 # C0 li1 met1 0.156859f 

-

305 

-

306 assert_expected_matches_obtained( 

-

307 'test_patterns', 'sideoverlap_simple_plates_li1_m1.gds.gz', 

-

308 expected_csv_content="""Device;Net1;Net2;Capacitance [fF] 

-

309C1;VSUBS;li1;7.932 

-

310C2;VSUBS;met1;249.059 

-

311C3;li1;met1;0.125 TODO""" 

-

312 ) 

-
- - - diff --git a/pycov/z_9747eacc0c5fa802_lvs_runner_test_py.html b/pycov/z_9747eacc0c5fa802_lvs_runner_test_py.html deleted file mode 100644 index cdf660ad..00000000 --- a/pycov/z_9747eacc0c5fa802_lvs_runner_test_py.html +++ /dev/null @@ -1,160 +0,0 @@ - - - - - Coverage for tests\klayout\lvs_runner_test.py: 54% - - - - - -
-
-

- Coverage for tests\klayout\lvs_runner_test.py: - 54% -

- -

- 24 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import allure 

-

27import os 

-

28import tempfile 

-

29import unittest 

-

30 

-

31from kpex.klayout.lvs_runner import LVSRunner 

-

32 

-

33 

-

34@allure.parent_suite("Unit Tests") 

-

35@allure.tag("LVS", "KLayout") 

-

36@unittest.skip # NOTE: this is relatively long running! 

-

37class Test(unittest.TestCase): 

-

38 @property 

-

39 def testdata_dir(self) -> str: 

-

40 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 'testdata', 'klayout', 'lvs')) 

-

41 

-

42 def test_run_klayout_lvs(self): 

-

43 gds_path = os.path.join(self.testdata_dir, 'nmos_diode2', 'nmos_diode2.gds.gz') 

-

44 schematic_path = os.path.join(self.testdata_dir, 'nmos_diode2', 'nmos_diode2.spice') 

-

45 

-

46 tmp_dir = tempfile.mkdtemp(prefix="lvs_run_") 

-

47 log_path = os.path.join(tmp_dir, "out.log") 

-

48 lvsdb_path = os.path.join(tmp_dir, "out.lvsdb.gz") 

-

49 

-

50 # TODO! 

-

51 # lvs_script = os.path.join(os.environ['PDKPATH'], 'libs.tech', 'klayout', 'lvs', 'sky130.lvs') 

-

52 lvs_script = os.path.join(os.environ['HOME'], '.klayout', 'salt', 'sky130A_el', 

-

53 'lvs', 'core', 'sky130.lvs') 

-

54 

-

55 runner = LVSRunner() 

-

56 runner.run_klayout_lvs(exe_path="klayout", 

-

57 lvs_script=lvs_script, 

-

58 gds_path=gds_path, 

-

59 schematic_path=schematic_path, 

-

60 log_path=log_path, 

-

61 lvsdb_path=lvsdb_path) 

-

62 print(f"LVS log file: {log_path}") 

-

63 print(f"LVSDB file: {lvsdb_path}") 

-
- - - diff --git a/pycov/z_9747eacc0c5fa802_netlist_expander_test_py.html b/pycov/z_9747eacc0c5fa802_netlist_expander_test_py.html deleted file mode 100644 index 7146c919..00000000 --- a/pycov/z_9747eacc0c5fa802_netlist_expander_test_py.html +++ /dev/null @@ -1,182 +0,0 @@ - - - - - Coverage for tests\klayout\netlist_expander_test.py: 100% - - - - - -
-
-

- Coverage for tests\klayout\netlist_expander_test.py: - 100% -

- -

- 37 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import allure 

-

27import os 

-

28import tempfile 

-

29import unittest 

-

30 

-

31import klayout.db as kdb 

-

32 

-

33from kpex.klayout.lvsdb_extractor import KLayoutExtractionContext 

-

34from kpex.klayout.netlist_expander import NetlistExpander 

-

35from kpex.log import ( 

-

36 debug, 

-

37) 

-

38from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

39from kpex.tech_info import TechInfo 

-

40 

-

41 

-

42@allure.parent_suite("Unit Tests") 

-

43@allure.tag("Netlist", "Netlist Expansion") 

-

44class Test(unittest.TestCase): 

-

45 @property 

-

46 def klayout_testdata_dir(self) -> str: 

-

47 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

48 'testdata', 'fastercap')) 

-

49 

-

50 @property 

-

51 def tech_info_json_path(self) -> str: 

-

52 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

53 'build', 'sky130A_tech.pb.json')) 

-

54 

-

55 def test_netlist_expansion(self): 

-

56 exp = NetlistExpander() 

-

57 

-

58 cell_name = 'nmos_diode2' 

-

59 

-

60 lvsdb = kdb.LayoutVsSchematic() 

-

61 lvsdb_path = os.path.join(self.klayout_testdata_dir, f"{cell_name}.lvsdb.gz") 

-

62 lvsdb.read(lvsdb_path) 

-

63 

-

64 csv_path = os.path.join(self.klayout_testdata_dir, f"{cell_name}_FasterCap_Result_Matrix.csv") 

-

65 

-

66 cap_matrix = CapacitanceMatrix.parse_csv(csv_path, separator=';') 

-

67 

-

68 tech = TechInfo.from_json(self.tech_info_json_path, 

-

69 dielectric_filter=None) 

-

70 

-

71 pex_context = KLayoutExtractionContext.prepare_extraction(top_cell=cell_name, 

-

72 lvsdb=lvsdb, 

-

73 tech=tech, 

-

74 blackbox_devices=False) 

-

75 expanded_netlist = exp.expand(extracted_netlist=pex_context.lvsdb.netlist(), 

-

76 top_cell_name=pex_context.top_cell.name, 

-

77 cap_matrix=cap_matrix, 

-

78 blackbox_devices=False) 

-

79 out_path = tempfile.mktemp(prefix=f"{cell_name}_expanded_netlist_", suffix=".cir") 

-

80 spice_writer = kdb.NetlistSpiceWriter() 

-

81 expanded_netlist.write(out_path, spice_writer) 

-

82 debug(f"Wrote expanded netlist to: {out_path}") 

-

83 

-

84 allure.attach.file(csv_path, attachment_type=allure.attachment_type.CSV) 

-

85 allure.attach.file(out_path, attachment_type=allure.attachment_type.TEXT) 

-
- - - diff --git a/pycov/z_9747eacc0c5fa802_netlist_reducer_test_py.html b/pycov/z_9747eacc0c5fa802_netlist_reducer_test_py.html deleted file mode 100644 index 994f955d..00000000 --- a/pycov/z_9747eacc0c5fa802_netlist_reducer_test_py.html +++ /dev/null @@ -1,167 +0,0 @@ - - - - - Coverage for tests\klayout\netlist_reducer_test.py: 100% - - - - - -
-
-

- Coverage for tests\klayout\netlist_reducer_test.py: - 100% -

- -

- 33 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import allure 

-

25import os 

-

26import tempfile 

-

27import unittest 

-

28 

-

29import klayout.db as kdb 

-

30 

-

31from kpex.log import ( 

-

32 LogLevel, 

-

33 set_log_level, 

-

34) 

-

35from kpex.klayout.netlist_reducer import NetlistReducer 

-

36 

-

37 

-

38@allure.parent_suite("Unit Tests") 

-

39@allure.tag("Netlist", "Netlist Reduction") 

-

40class Test(unittest.TestCase): 

-

41 @classmethod 

-

42 def setUpClass(cls): 

-

43 set_log_level(LogLevel.DEBUG) 

-

44 

-

45 @property 

-

46 def klayout_testdata_dir(self) -> str: 

-

47 return os.path.realpath(os.path.join(__file__, '..', '..', '..', 

-

48 'testdata', 'klayout', 'netlists')) 

-

49 

-

50 def _test_netlist_reduction(self, netlist_path: str, cell_name: str): 

-

51 netlist = kdb.Netlist() 

-

52 reader = kdb.NetlistSpiceReader() 

-

53 netlist.read(netlist_path, reader) 

-

54 

-

55 reducer = NetlistReducer() 

-

56 reduced_netlist = reducer.reduce(netlist=netlist, top_cell_name=cell_name) 

-

57 

-

58 out_path = tempfile.mktemp(prefix=f"{cell_name}_Reduced_Netlist_", suffix=".cir") 

-

59 spice_writer = kdb.NetlistSpiceWriter() 

-

60 reduced_netlist.write(out_path, spice_writer) 

-

61 print(f"Wrote reduced netlist to: {out_path}") 

-

62 allure.attach.file(out_path, attachment_type=allure.attachment_type.TEXT) 

-

63 

-

64 def test_netlist_reduction_1(self): 

-

65 netlist_path = os.path.join(self.klayout_testdata_dir, 'nmos_diode2_Expanded_Netlist.cir') 

-

66 self._test_netlist_reduction(netlist_path=netlist_path, cell_name='nmos_diode2') 

-

67 

-

68 def test_netlist_reduction_2(self): 

-

69 netlist_path = os.path.join(self.klayout_testdata_dir, 'cap_vpp_Expanded_Netlist.cir') 

-

70 self._test_netlist_reduction(netlist_path=netlist_path, cell_name='TOP') 

-
- - - diff --git a/pycov/z_a44f0ac069e85531___init___py.html b/pycov/z_a44f0ac069e85531___init___py.html deleted file mode 100644 index bf2538e7..00000000 --- a/pycov/z_a44f0ac069e85531___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for tests\__init__.py: 100% - - - - - -
-
-

- Coverage for tests\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_a5841ccd503d0903___init___py.html b/pycov/z_a5841ccd503d0903___init___py.html deleted file mode 100644 index f1d6e118..00000000 --- a/pycov/z_a5841ccd503d0903___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex/fastercap/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/fastercap/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_a5841ccd503d0903_fastercap_input_builder_py.html b/pycov/z_a5841ccd503d0903_fastercap_input_builder_py.html deleted file mode 100644 index 8c13a2fa..00000000 --- a/pycov/z_a5841ccd503d0903_fastercap_input_builder_py.html +++ /dev/null @@ -1,437 +0,0 @@ - - - - - Coverage for kpex/fastercap/fastercap_input_builder.py: 11% - - - - - -
-
-

- Coverage for kpex/fastercap/fastercap_input_builder.py: - 11% -

- -

- 171 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26 

-

27# 

-

28# Protocol Buffer Schema for FasterCap Input Files 

-

29# https://www.fastfieldsolvers.com/software.htm#fastercap 

-

30# 

-

31 

-

32from typing import * 

-

33from functools import cached_property 

-

34import math 

-

35 

-

36import klayout.db as kdb 

-

37 

-

38from ..klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo, GDSPair 

-

39from .fastercap_model_generator import FasterCapModelBuilder, FasterCapModelGenerator 

-

40from ..log import ( 

-

41 console, 

-

42 debug, 

-

43 info, 

-

44 warning, 

-

45 error 

-

46) 

-

47from ..tech_info import TechInfo 

-

48 

-

49import process_stack_pb2 

-

50 

-

51 

-

52class FasterCapInputBuilder: 

-

53 def __init__(self, 

-

54 pex_context: KLayoutExtractionContext, 

-

55 tech_info: TechInfo, 

-

56 k_void: float = 3.5, 

-

57 delaunay_amax: float = 0.0, 

-

58 delaunay_b: float = 1.0): 

-

59 self.pex_context = pex_context 

-

60 self.tech_info = tech_info 

-

61 self.k_void = k_void 

-

62 self.delaunay_amax = delaunay_amax 

-

63 self.delaunay_b = delaunay_b 

-

64 

-

65 @cached_property 

-

66 def dbu(self) -> float: 

-

67 return self.pex_context.dbu 

-

68 

-

69 def gds_pair(self, layer_name) -> Optional[GDSPair]: 

-

70 gds_pair = self.tech_info.gds_pair_for_computed_layer_name.get(layer_name, None) 

-

71 if not gds_pair: 

-

72 gds_pair = self.tech_info.gds_pair_for_layer_name.get(layer_name, None) 

-

73 if not gds_pair: 

-

74 warning(f"Can't find GDS pair for layer {layer_name}") 

-

75 return None 

-

76 return gds_pair 

-

77 

-

78 def shapes_of_net(self, layer_name: str, net: kdb.Net) -> Optional[kdb.Region]: 

-

79 gds_pair = self.gds_pair(layer_name=layer_name) 

-

80 if not gds_pair: 

-

81 return None 

-

82 

-

83 shapes = self.pex_context.shapes_of_net(gds_pair=gds_pair, net=net) 

-

84 if not shapes: 

-

85 debug(f"Nothing extracted for layer {layer_name}") 

-

86 return shapes 

-

87 

-

88 def shapes_of_layer(self, layer_name: str) -> Optional[kdb.Region]: 

-

89 gds_pair = self.gds_pair(layer_name=layer_name) 

-

90 if not gds_pair: 

-

91 return None 

-

92 

-

93 shapes = self.pex_context.shapes_of_layer(gds_pair=gds_pair) 

-

94 if not shapes: 

-

95 debug(f"Nothing extracted for layer {layer_name}") 

-

96 return shapes 

-

97 

-

98 def top_cell_bbox(self) -> kdb.Box: 

-

99 return self.pex_context.top_cell_bbox() 

-

100 

-

101 def build(self) -> FasterCapModelGenerator: 

-

102 lvsdb = self.pex_context.lvsdb 

-

103 netlist: kdb.Netlist = lvsdb.netlist() 

-

104 

-

105 def format_terminal(t: kdb.NetTerminalRef) -> str: 

-

106 td = t.terminal_def() 

-

107 d = t.device() 

-

108 return f"{d.expanded_name()}/{td.name}/{td.description}" 

-

109 

-

110 model_builder = FasterCapModelBuilder( 

-

111 dbu=self.dbu, 

-

112 k_void=self.k_void, 

-

113 delaunay_amax=self.delaunay_amax, # test/compare with smaller, e.g. 0.05 => more triangles 

-

114 delaunay_b=self.delaunay_b # test/compare with 1.0 => more triangles at edges 

-

115 ) 

-

116 

-

117 fox_layer = self.tech_info.field_oxide_layer 

-

118 

-

119 model_builder.add_material(name=fox_layer.name, k=fox_layer.field_oxide_layer.dielectric_k) 

-

120 for diel_name, diel_k in self.tech_info.dielectric_by_name.items(): 

-

121 model_builder.add_material(name=diel_name, k=diel_k) 

-

122 

-

123 circuit = netlist.circuit_by_name(self.pex_context.top_cell.name) 

-

124 # https://www.klayout.de/doc-qt5/code/class_Circuit.html 

-

125 if not circuit: 

-

126 circuits = [c.name for c in netlist.each_circuit()] 

-

127 raise Exception(f"Expected circuit called {self.pex_context.top_cell.name} in extracted netlist, " 

-

128 f"only available circuits are: {circuits}") 

-

129 

-

130 diffusion_regions: List[kdb.Region] = [] 

-

131 

-

132 for net in circuit.each_net(): 

-

133 # https://www.klayout.de/doc-qt5/code/class_Net.html 

-

134 debug(f"Net name={net.name}, expanded_name={net.expanded_name()}, pin_count={net.pin_count()}, " 

-

135 f"is_floating={net.is_floating()}, is_passive={net.is_passive()}, " 

-

136 f"terminals={list(map(lambda t: format_terminal(t), net.each_terminal()))}") 

-

137 

-

138 net_name = net.expanded_name() 

-

139 

-

140 for metal_layer in self.tech_info.process_metal_layers: 

-

141 metal_layer_name = metal_layer.name 

-

142 metal_layer = metal_layer.metal_layer 

-

143 

-

144 metal_z_bottom = metal_layer.height 

-

145 metal_z_top = metal_z_bottom + metal_layer.thickness 

-

146 

-

147 shapes = self.shapes_of_net(layer_name=metal_layer_name, net=net) 

-

148 if shapes: 

-

149 if shapes.count() >= 1: 

-

150 info(f"Conductor {net_name}, metal {metal_layer_name}, " 

-

151 f"z={metal_layer.height}, height={metal_layer.thickness}") 

-

152 model_builder.add_conductor(net_name=net_name, 

-

153 layer=shapes, 

-

154 z=metal_layer.height, 

-

155 height=metal_layer.thickness) 

-

156 

-

157 if metal_layer.HasField('contact_above'): 

-

158 contact = metal_layer.contact_above 

-

159 shapes = self.shapes_of_net(layer_name=contact.name, net=net) 

-

160 if shapes and not shapes.is_empty(): 

-

161 info(f"Conductor {net_name}, via {contact.name}, " 

-

162 f"z={metal_z_top}, height={contact.thickness}") 

-

163 model_builder.add_conductor(net_name=net_name, 

-

164 layer=shapes, 

-

165 z=metal_z_top, 

-

166 height=contact.thickness) 

-

167 

-

168 # diel_above = self.tech_info.process_stack_layer_by_name.get(metal_layer.reference_above, None) 

-

169 # if diel_above: 

-

170 # #model_builder.add_dielectric(material_name=metal_layer.reference_above, 

-

171 # # layer=kdb.Region().) 

-

172 # pass 

-

173 # TODO: add stuff 

-

174 

-

175 # DIFF / TAP 

-

176 for diffusion_layer in self.tech_info.process_diffusion_layers: 

-

177 diffusion_layer_name = diffusion_layer.name 

-

178 diffusion_layer = diffusion_layer.diffusion_layer 

-

179 shapes = self.shapes_of_net(layer_name=diffusion_layer_name, net=net) 

-

180 if shapes and not shapes.is_empty(): 

-

181 diffusion_regions.append(shapes) 

-

182 info(f"Diffusion {net_name}, layer {diffusion_layer_name}, " 

-

183 f"z={0}, height={0.1}") 

-

184 model_builder.add_conductor(net_name=net_name, 

-

185 layer=shapes, 

-

186 z=0, # TODO 

-

187 height=0.1) # TODO: diffusion_layer.height 

-

188 

-

189 contact = diffusion_layer.contact_above 

-

190 shapes = self.shapes_of_net(layer_name=contact.name, net=net) 

-

191 if shapes and not shapes.is_empty(): 

-

192 info(f"Diffusion {net_name}, contact {contact.name}, " 

-

193 f"z={0}, height={contact.thickness}") 

-

194 model_builder.add_conductor(net_name=net_name, 

-

195 layer=shapes, 

-

196 z=0.0, 

-

197 height=contact.thickness) 

-

198 

-

199 enlarged_top_cell_bbox = self.top_cell_bbox().enlarged(math.floor(8 / self.dbu)) # 8µm fringe halo 

-

200 

-

201 # 

-

202 # global substrate block below everything. independent of nets! 

-

203 # 

-

204 

-

205 substrate_layer = self.tech_info.process_substrate_layer.substrate_layer 

-

206 substrate_region = kdb.Region() 

-

207 

-

208 substrate_block = enlarged_top_cell_bbox.dup() 

-

209 substrate_region.insert(substrate_block) 

-

210 

-

211 diffusion_margin = math.floor(1 / self.dbu) # 1 µm 

-

212 for d in diffusion_regions: 

-

213 substrate_region -= d.sized(diffusion_margin) 

-

214 info(f"Substrate VSUBS, " 

-

215 f"z={0 - substrate_layer.height - substrate_layer.thickness}, height={substrate_layer.thickness}") 

-

216 model_builder.add_conductor(net_name="VSUBS", 

-

217 layer=substrate_region, 

-

218 z=0 - substrate_layer.height - substrate_layer.thickness, 

-

219 height=substrate_layer.thickness) 

-

220 

-

221 # 

-

222 # add dielectrics 

-

223 # 

-

224 

-

225 fox_region = kdb.Region() 

-

226 fox_block = enlarged_top_cell_bbox.dup() 

-

227 fox_region.insert(fox_block) 

-

228 

-

229 # field oxide goes from substrate/diff/well up to below the gate-poly 

-

230 gate_poly_height = self.tech_info.gate_poly_layer.metal_layer.height 

-

231 fox_z = 0 

-

232 fox_height = gate_poly_height - fox_z 

-

233 info(f"Simple dielectric (field oxide) {fox_layer.name}: " 

-

234 f"z={fox_z}, height={fox_height}") 

-

235 model_builder.add_dielectric(material_name=fox_layer.name, 

-

236 layer=fox_region, 

-

237 z=fox_z, 

-

238 height=fox_height) 

-

239 

-

240 for metal_layer in self.tech_info.process_metal_layers: 

-

241 metal_layer_name = metal_layer.name 

-

242 metal_layer = metal_layer.metal_layer 

-

243 

-

244 metal_z_bottom = metal_layer.height 

-

245 

-

246 extracted_shapes = self.shapes_of_layer(layer_name=metal_layer_name) 

-

247 

-

248 sidewall_region: Optional[kdb.Region] = None 

-

249 sidewall_height = 0 

-

250 

-

251 no_metal_region: Optional[kdb.Region] = None 

-

252 no_metal_height = 0 

-

253 

-

254 # 

-

255 # add sidewall dielectrics 

-

256 # 

-

257 if extracted_shapes: 

-

258 sidewall_height = 0 

-

259 sidewall_region = extracted_shapes 

-

260 sidewallee = metal_layer_name 

-

261 

-

262 while True: 

-

263 sidewall = self.tech_info.sidewall_dielectric_layer(sidewallee) 

-

264 if not sidewall: 

-

265 break 

-

266 match sidewall.layer_type: 

-

267 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIDEWALL_DIELECTRIC: 

-

268 d = math.floor(sidewall.sidewall_dielectric_layer.width_outside_sidewall / self.dbu) 

-

269 sidewall_region = sidewall_region.sized(d) 

-

270 h_delta = sidewall.sidewall_dielectric_layer.height_above_metal or metal_layer.thickness 

-

271 # if h_delta == 0: 

-

272 # h_delta = metal_layer.thickness 

-

273 sidewall_height += h_delta 

-

274 info(f"Sidewall dielectric {sidewall.name}: z={metal_layer.height}, height={sidewall_height}") 

-

275 model_builder.add_dielectric(material_name=sidewall.name, 

-

276 layer=sidewall_region, 

-

277 z=metal_layer.height, 

-

278 height=sidewall_height) 

-

279 

-

280 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_CONFORMAL_DIELECTRIC: 

-

281 conf_diel = sidewall.conformal_dielectric_layer 

-

282 d = math.floor(conf_diel.thickness_sidewall / self.dbu) 

-

283 sidewall_region = sidewall_region.sized(d) 

-

284 h_delta = metal_layer.thickness + conf_diel.thickness_over_metal 

-

285 sidewall_height += h_delta 

-

286 info(f"Conformal dielectric (sidewall) {sidewall.name}: " 

-

287 f"z={metal_layer.height}, height={sidewall_height}") 

-

288 model_builder.add_dielectric(material_name=sidewall.name, 

-

289 layer=sidewall_region, 

-

290 z=metal_layer.height, 

-

291 height=sidewall_height) 

-

292 if conf_diel.thickness_where_no_metal > 0.0: 

-

293 no_metal_block = enlarged_top_cell_bbox.dup() 

-

294 no_metal_region = kdb.Region() 

-

295 no_metal_region.insert(no_metal_block) 

-

296 no_metal_region -= sidewall_region 

-

297 no_metal_height = conf_diel.thickness_where_no_metal 

-

298 info(f"Conformal dielectric (where no metal) {sidewall.name}: " 

-

299 f"z={metal_layer.height}, height={no_metal_height}") 

-

300 model_builder.add_dielectric(material_name=sidewall.name, 

-

301 layer=no_metal_region, 

-

302 z=metal_layer.height, 

-

303 height=no_metal_height) 

-

304 

-

305 sidewallee = sidewall.name 

-

306 

-

307 # 

-

308 # add simple dielectric 

-

309 # 

-

310 simple_dielectric, diel_height = self.tech_info.simple_dielectric_above_metal(metal_layer_name) 

-

311 if simple_dielectric: 

-

312 diel_block = enlarged_top_cell_bbox.dup() 

-

313 diel_region = kdb.Region() 

-

314 diel_region.insert(diel_block) 

-

315 if sidewall_region: 

-

316 assert sidewall_height >= 0.0 

-

317 diel_region -= sidewall_region 

-

318 info(f"Simple dielectric (sidewall) {simple_dielectric.name}: " 

-

319 f"z={metal_z_bottom + sidewall_height}, height={diel_height - sidewall_height}") 

-

320 model_builder.add_dielectric(material_name=simple_dielectric.name, 

-

321 layer=sidewall_region, 

-

322 z=metal_z_bottom + sidewall_height, 

-

323 height=diel_height - sidewall_height) 

-

324 if no_metal_region: 

-

325 info(f"Simple dielectric (no metal) {simple_dielectric.name}: " 

-

326 f"z={metal_z_bottom + no_metal_height}, height={diel_height - no_metal_height}") 

-

327 model_builder.add_dielectric(material_name=simple_dielectric.name, 

-

328 layer=diel_region, 

-

329 z=metal_z_bottom + no_metal_height, 

-

330 height=diel_height - no_metal_height) 

-

331 else: 

-

332 info(f"Simple dielectric {simple_dielectric.name}: " 

-

333 f"z={metal_z_bottom}, height={diel_height}") 

-

334 model_builder.add_dielectric(material_name=simple_dielectric.name, 

-

335 layer=diel_region, 

-

336 z=metal_z_bottom, 

-

337 height=diel_height) 

-

338 

-

339 gen = model_builder.generate() 

-

340 return gen 

-
- - - diff --git a/pycov/z_a5841ccd503d0903_fastercap_model_generator_py.html b/pycov/z_a5841ccd503d0903_fastercap_model_generator_py.html deleted file mode 100644 index d6ba9346..00000000 --- a/pycov/z_a5841ccd503d0903_fastercap_model_generator_py.html +++ /dev/null @@ -1,1136 +0,0 @@ - - - - - Coverage for kpex/fastercap/fastercap_model_generator.py: 95% - - - - - -
-
-

- Coverage for kpex/fastercap/fastercap_model_generator.py: - 95% -

- -

- 658 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24 

-

25# A class providing a service for building FastCap2 or FasterCap models 

-

26# 

-

27# This class is used the following way: 

-

28# 

-

29# 1) Create a FasterCapModelBuilder object 

-

30# Specify the default k value which is the k assumed for "empty space". 

-

31# You can also specify a maximum area and the "b" parameter for the 

-

32# triangulation. The b parameter corresponds to the minimum angle 

-

33# and should be <=1 (b=sin(min_angle)*2). 

-

34# I.e. b=1 -> min_angle=30 deg, b=0.5 -> min_angle~14.5 deg. 

-

35# 

-

36# 2) Add material definitions for the dielectrics 

-

37# Each material definition consists of a k value and 

-

38# a material name. 

-

39# 

-

40# 3) Add layers in the 2.5d view fashion 

-

41# Each layer is a sheet in 3d space that is extruded in vertical 

-

42# direction with the given start and stop z (or height) 

-

43# The layer must be a DRC::Layer or RBA::Region object. 

-

44# 

-

45# Layers can be added in two ways: 

-

46# 

-

47# * As conductors: specify the net name 

-

48# 

-

49# * As dielectric layer: specify the material name 

-

50# 

-

51# The layers can intersect. The package resolves intersections 

-

52# based on priority: conductors first, dielectrics according to 

-

53# their position in the "materials" definition (first entries have 

-

54# higher prio) 

-

55# 

-

56# 4) Generate a 3d model using "generate" 

-

57# This method returns an object you can use to generate STL files 

-

58# or FastCap files. 

-

59 

-

60 

-

61from __future__ import annotations 

-

62 

-

63import base64 

-

64from collections import defaultdict 

-

65import hashlib 

-

66import os 

-

67from typing import * 

-

68from dataclasses import dataclass 

-

69from functools import reduce 

-

70import math 

-

71 

-

72import klayout.db as kdb 

-

73 

-

74from kpex.log import ( 

-

75 debug, 

-

76 info, 

-

77 warning, 

-

78 error 

-

79) 

-

80 

-

81 

-

82@dataclass 

-

83class FasterCapModelBuilder: 

-

84 dbu: float 

-

85 """Database unit""" 

-

86 

-

87 k_void: float 

-

88 """Default dielectric of 'empty space'""" 

-

89 

-

90 delaunay_amax: float 

-

91 """Maximum area parameter for the Delaunay triangulation""" 

-

92 

-

93 delaunay_b: float 

-

94 """ 

-

95 The delaunay_b parameter for the Delaunay triangulation  

-

96 corresponds to the minimum angle 

-

97 and should be <=1 (b=sin(min_angle)*2). 

-

98 I.e. b=1 -> min_angle=30 deg, b=0.5 -> min_angle~14.5 deg. 

-

99 """ 

-

100 

-

101 def __init__(self, 

-

102 dbu: float, 

-

103 k_void: float, 

-

104 delaunay_amax: float = 0.0, 

-

105 delaunay_b: float = 1.0, 

-

106 ): 

-

107 self.dbu = dbu 

-

108 self.k_void = k_void 

-

109 self.delaunay_amax = delaunay_amax 

-

110 self.delaunay_b = delaunay_b 

-

111 

-

112 self.materials: Dict[str, float] = {} 

-

113 self.net_names: List[str] = [] 

-

114 

-

115 # layer, zstart, zstop 

-

116 self.clayers: Dict[str, List[Tuple[kdb.Region, float, float]]] = {} 

-

117 self.dlayers: Dict[str, List[Tuple[kdb.Region, float, float]]] = {} 

-

118 

-

119 info(f"DBU: {'%.12g' % self.dbu}") 

-

120 info(f"Delaunay b: {'%.12g' % self.delaunay_b}") 

-

121 info(f"Delaunay area_max: {'%.12g' % self.delaunay_amax}") 

-

122 

-

123 def add_material(self, name: str, k: float): 

-

124 self.materials[name] = k 

-

125 

-

126 def add_dielectric(self, 

-

127 material_name: str, 

-

128 layer: kdb.Region, 

-

129 z: float, 

-

130 height: float): 

-

131 if hasattr(layer, 'data'): 

-

132 layer = layer.data 

-

133 self._add_layer(name=material_name, layer=layer, is_dielectric=True, z=z, height=height) 

-

134 

-

135 def add_conductor(self, 

-

136 net_name: str, 

-

137 layer: kdb.Region, 

-

138 z: float, 

-

139 height: float): 

-

140 if hasattr(layer, 'data'): 

-

141 layer = layer.data 

-

142 self._add_layer(name=net_name, layer=layer, is_dielectric=False, z=z, height=height) 

-

143 

-

144 def _norm2z(self, z: float) -> float: 

-

145 return z * self.dbu 

-

146 

-

147 def _z2norm(self, z: float) -> float: 

-

148 return math.floor(z / self.dbu + 1e-6) 

-

149 

-

150 def _add_layer(self, 

-

151 name: str, 

-

152 layer: kdb.Region, 

-

153 z: float, 

-

154 height: float, 

-

155 is_dielectric: bool): 

-

156 if is_dielectric and name not in self.materials: 

-

157 raise ValueError(f"Unknown material {name} - did you use 'add_material'?") 

-

158 

-

159 zstart: float = z 

-

160 zstop: float = zstart + height 

-

161 

-

162 if is_dielectric: 

-

163 if name not in self.dlayers: 

-

164 self.dlayers[name] = [] 

-

165 self.dlayers[name].append((layer, self._z2norm(zstart), self._z2norm(zstop))) 

-

166 else: 

-

167 if name not in self.clayers: 

-

168 self.clayers[name] = [] 

-

169 self.clayers[name].append((layer, self._z2norm(zstart), self._z2norm(zstop))) 

-

170 

-

171 def generate(self) -> Optional[FasterCapModelGenerator]: 

-

172 z: List[float] = [] 

-

173 for ll in (self.dlayers, self.clayers): 

-

174 for k, v in ll.items(): 

-

175 for l in v: 

-

176 z.extend((l[1], l[2])) 

-

177 z = sorted([*{*z}]) # sort & uniq 

-

178 if len(z) == 0: 

-

179 return None 

-

180 

-

181 gen = FasterCapModelGenerator(dbu=self.dbu, 

-

182 k_void=self.k_void, 

-

183 delaunay_amax=self.delaunay_amax, 

-

184 delaunay_b=self.delaunay_b, 

-

185 materials=self.materials, 

-

186 net_names=list(self.clayers.keys())) 

-

187 for zcurr in z: 

-

188 gen.next_z(self._norm2z(zcurr)) 

-

189 

-

190 for nn, v in self.clayers.items(): 

-

191 for l in v: 

-

192 if l[1] <= zcurr < l[2]: 

-

193 gen.add_in(name=f"+{nn}", layer=l[0]) 

-

194 if l[1] < zcurr <= l[2]: 

-

195 gen.add_out(name=f"+{nn}", layer=l[0]) 

-

196 for mn, v in self.dlayers.items(): 

-

197 for l in v: 

-

198 if l[1] <= zcurr < l[2]: 

-

199 gen.add_in(name=f"-{mn}", layer=l[0]) 

-

200 if l[1] < zcurr <= l[2]: 

-

201 gen.add_out(name=f"-{mn}", layer=l[0]) 

-

202 

-

203 gen.finish_z() 

-

204 

-

205 gen.finalize() 

-

206 return gen 

-

207 

-

208 

-

209@dataclass(frozen=True) 

-

210class HDielKey: 

-

211 outside: Optional[str] 

-

212 inside: Optional[str] 

-

213 

-

214 def __str__(self) -> str: 

-

215 return f"{self.outside or 'void'} <-> {self.inside or 'void'}" 

-

216 

-

217 @property 

-

218 def topic(self) -> str: 

-

219 return 'dielectric' 

-

220 

-

221 def reversed(self) -> HDielKey: 

-

222 return HDielKey(self.inside, self.outside) 

-

223 

-

224 

-

225@dataclass(frozen=True) 

-

226class HCondKey: 

-

227 net_name: str 

-

228 outside: Optional[str] 

-

229 

-

230 def __str__(self) -> str: 

-

231 return f"{self.outside or 'void'} <-> {self.net_name}" 

-

232 

-

233 @property 

-

234 def topic(self) -> str: 

-

235 return 'conductor' 

-

236 

-

237 

-

238@dataclass(frozen=True) 

-

239class VKey: 

-

240 kk: HDielKey | HCondKey 

-

241 p0: kdb.DPoint 

-

242 de: kdb.DVector 

-

243 

-

244 

-

245@dataclass(frozen=True) 

-

246class Point: 

-

247 x: float 

-

248 y: float 

-

249 z: float 

-

250 

-

251 def __sub__(self, other: Point) -> Point: 

-

252 return Point(self.x - other.x, self.y - other.y, self.z - other.z) 

-

253 

-

254 def sq_length(self) -> float: 

-

255 return self.x**2 + self.y**2 + self.z**2 

-

256 

-

257 def to_fastcap(self) -> str: 

-

258 return '%.12g %.12g %.12g' % (self.x, self.y, self.z) 

-

259 

-

260 

-

261def vector_product(a: Point, b: Point) -> Point: 

-

262 vp = Point( 

-

263 a.y * b.z - a.z * b.y, 

-

264 a.z * b.x - a.x * b.z, 

-

265 a.x * b.y - a.y * b.x 

-

266 ) 

-

267 return vp 

-

268 

-

269 

-

270def dot_product(a: Point, b: Point) -> float: 

-

271 dp = a.x * b.x + a.y * b.y + a.z * b.z 

-

272 return dp 

-

273 

-

274 

-

275@dataclass(frozen=True) 

-

276class Triangle: 

-

277 p0: Point 

-

278 p1: Point 

-

279 p2: Point 

-

280 

-

281 def reversed(self) -> Triangle: 

-

282 return Triangle(self.p2, self.p1, self.p0) 

-

283 

-

284 def outside_reference_point(self) -> Point: 

-

285 v1 = self.p1 - self.p0 

-

286 v2 = self.p2 - self.p0 

-

287 vp = Point(v1.y * v2.z - v1.z * v2.y, 

-

288 -v1.x * v2.z + v1.z * v2.x, 

-

289 v1.x * v2.y - v1.y * v2.x) 

-

290 vp_abs = math.sqrt(vp.x ** 2 + vp.y ** 2 + vp.z ** 2) 

-

291 rp = Point(self.p0.x + vp.x / vp_abs, 

-

292 self.p0.y + vp.y / vp_abs, 

-

293 self.p0.z + vp.z / vp_abs) 

-

294 return rp 

-

295 

-

296 def to_fastcap(self) -> str: 

-

297 return ' '.join([p.to_fastcap() for p in (self.p0, self.p1, self.p2)]) 

-

298 

-

299 def __len__(self): 

-

300 return 3 

-

301 

-

302 def __getitem__(self, i) -> Point: 

-

303 match i: 

-

304 case 0: return self.p0 

-

305 case 1: return self.p1 

-

306 case 2: return self.p2 

-

307 case _: raise IndexError("list index out of range") 

-

308 

-

309 

-

310@dataclass(frozen=True) 

-

311class Edge: 

-

312 p0: Point 

-

313 p1: Point 

-

314 

-

315 def vector_of_edge(self) -> Point: 

-

316 return Point( 

-

317 self.p1.x - self.p0.x, 

-

318 self.p1.y - self.p0.y, 

-

319 self.p1.z - self.p0.z 

-

320 ) 

-

321 

-

322 def reversed(self) -> Edge: 

-

323 return Edge(self.p1, self.p0) 

-

324 

-

325 

-

326@dataclass 

-

327class FasterCapModelGenerator: 

-

328 dbu: float 

-

329 """Database unit""" 

-

330 

-

331 k_void: float 

-

332 """Default dielectric of 'empty space'""" 

-

333 

-

334 delaunay_amax: float 

-

335 """Maximum area parameter for the Delaunay triangulation""" 

-

336 

-

337 delaunay_b: float 

-

338 """ 

-

339 The delaunay_b parameter for the Delaunay triangulation  

-

340 corresponds to the minimum angle 

-

341 and should be <=1 (b=sin(min_angle)*2). 

-

342 I.e. b=1 -> min_angle=30 deg, b=0.5 -> min_angle~14.5 deg. 

-

343 """ 

-

344 

-

345 materials: Dict[str, float] 

-

346 """Maps material name to dielectric k""" 

-

347 

-

348 net_names: List[str] 

-

349 

-

350 def __init__(self, 

-

351 dbu: float, 

-

352 k_void: float, 

-

353 delaunay_amax: float, 

-

354 delaunay_b: float, 

-

355 materials: Dict[str, float], 

-

356 net_names: List[str]): 

-

357 self.k_void = k_void 

-

358 self.delaunay_amax = delaunay_amax 

-

359 self.delaunay_b = delaunay_b 

-

360 self.dbu = dbu 

-

361 self.materials = materials 

-

362 self.net_names = net_names 

-

363 

-

364 self.z: Optional[float] = None 

-

365 self.zz: Optional[float] = None 

-

366 self.layers_in: Dict[str, kdb.Region] = {} 

-

367 self.layers_out: Dict[str, kdb.Region] = {} 

-

368 self.state: Dict[str, kdb.Region] = {} 

-

369 self.current: Dict[str, List[kdb.Region]] = {} 

-

370 self.diel_data: Dict[HDielKey, List[Triangle]] = {} 

-

371 self.diel_vdata: Dict[VKey, kdb.Region] = {} 

-

372 self.cond_data: Dict[HCondKey, List[Triangle]] = {} 

-

373 self.cond_vdata: Dict[VKey, kdb.Region] = {} 

-

374 

-

375 def reset(self): 

-

376 self.layers_in = {} 

-

377 self.layers_out = {} 

-

378 

-

379 def add_in(self, name: str, layer: kdb.Region): 

-

380 debug(f"add_in: {name} -> {layer}") 

-

381 if name not in self.layers_in: 

-

382 self.layers_in[name] = kdb.Region() 

-

383 self.layers_in[name] += layer 

-

384 

-

385 def add_out(self, name: str, layer: kdb.Region): 

-

386 debug(f"add_out: {name} -> {layer}") 

-

387 if name not in self.layers_out: 

-

388 self.layers_out[name] = kdb.Region() 

-

389 self.layers_out[name] += layer 

-

390 

-

391 def finish_z(self): 

-

392 debug(f"Finishing layer z={self.z}") 

-

393 

-

394 din: Dict[str, kdb.Region] = {} 

-

395 dout: Dict[str, kdb.Region] = {} 

-

396 all_in = kdb.Region() 

-

397 all_out = kdb.Region() 

-

398 all = kdb.Region() 

-

399 all_cin: Optional[kdb.Region] = None 

-

400 all_cout: Optional[kdb.Region] = None 

-

401 

-

402 for names, prefix in ((self.net_names, '+'), (self.materials.keys(), '-')): 

-

403 for nn in names: 

-

404 mk = prefix + nn 

-

405 

-

406 # compute merged events 

-

407 if mk not in self.current: 

-

408 self.current[mk] = [] 

-

409 current_before = self.current[mk][0].dup() if len(self.current[mk]) >= 1 else kdb.Region() 

-

410 lin, lout, current = self._merge_events(pyra=self.current[mk], 

-

411 lin=self.layers_in.get(mk, None), 

-

412 lout=self.layers_out.get(mk, None)) 

-

413 debug(f"Merged events & status for {mk}:") 

-

414 debug(f" in = {lin}") 

-

415 debug(f" out = {lout}") 

-

416 debug(f" state = {current}") 

-

417 

-

418 if mk not in self.state: 

-

419 self.state[mk] = kdb.Region() 

-

420 

-

421 # legalize in and out events 

-

422 lin_org = lin.dup() 

-

423 lout_org = lout.dup() 

-

424 lout &= self.state[mk] 

-

425 lin -= all 

-

426 lout += current & all_in 

-

427 lin += current_before & all_out 

-

428 lin -= lout_org 

-

429 lout -= lin_org 

-

430 

-

431 # tracks the legalized horizontal cuts 

-

432 self.state[mk] += lin 

-

433 self.state[mk] -= lout 

-

434 

-

435 din[mk] = lin 

-

436 dout[mk] = lout 

-

437 

-

438 debug(f"Legalized events & status for '{mk}':") 

-

439 debug(f" in = {din[mk]}") 

-

440 debug(f" out = {dout[mk]}") 

-

441 debug(f" state = {self.state[mk]}") 

-

442 

-

443 all_in += lin 

-

444 all_out += lout 

-

445 all += self.state[mk] 

-

446 

-

447 if prefix == '+': 

-

448 all_cin = all_in.dup() 

-

449 all_cout = all_out.dup() 

-

450 

-

451 debug(f"All conductor region in: {all_cin}") 

-

452 debug(f"All conductor region out: {all_cout}") 

-

453 

-

454 # check whether states are separated 

-

455 a = reduce(lambda x, y: x+y, self.state.values()) 

-

456 for k, s in self.state.items(): 

-

457 r: kdb.Region = s - a 

-

458 if not r.is_empty(): 

-

459 error(f"State region of {k} ({s}) is not contained entirely " 

-

460 f"in remaining all state region ({a}) - this means there is an overlap") 

-

461 a -= s 

-

462 

-

463 # Now we have legalized the in and out events 

-

464 for mni in self.materials.keys(): 

-

465 lin = din.get(f"-{mni}", None) 

-

466 if lin: 

-

467 lin = lin.dup() 

-

468 lin -= all_cout # handled with the conductor 

-

469 for mno in self.materials.keys(): 

-

470 lout = dout.get(f"-{mno}", None) 

-

471 if lout: 

-

472 d: kdb.Region = lout & lin 

-

473 if not d.is_empty(): 

-

474 self.generate_hdiel(below=mno, above=mni, layer=d) 

-

475 lin -= lout 

-

476 if not lin.is_empty(): 

-

477 self.generate_hdiel(below=None, above=mni, layer=lin) 

-

478 

-

479 for mno in self.materials.keys(): 

-

480 lout = dout.get(f"-{mno}", None) 

-

481 if lout: 

-

482 lout = lout.dup() 

-

483 lout -= all_cin # handled with the conductor 

-

484 for mni in self.materials.keys(): 

-

485 lin = din.get(f"-{mni}", None) 

-

486 if lin: 

-

487 lout -= lin 

-

488 if not lout.is_empty(): 

-

489 self.generate_hdiel(below=mno, above=None, layer=lout) 

-

490 

-

491 for nn in self.net_names: 

-

492 lin = din.get(f"+{nn}", None) 

-

493 if lin: 

-

494 lin = lin.dup() 

-

495 for mno in self.materials.keys(): 

-

496 lout = dout.get(f"-{mno}", None) 

-

497 if lout: 

-

498 d = lout & lin 

-

499 if not d.is_empty(): 

-

500 self.generate_hcond_in(net_name=nn, below=mno, layer=d) 

-

501 lin -= lout 

-

502 if not lin.is_empty(): 

-

503 self.generate_hcond_in(net_name=nn, below=None, layer=lin) 

-

504 

-

505 for nn in self.net_names: 

-

506 lout = dout.get(f"+{nn}", None) 

-

507 if lout: 

-

508 lout = lout.dup() 

-

509 lout -= all_cin # handled with the conductor 

-

510 for mni in self.materials.keys(): 

-

511 lin = din.get(f"-{mni}", None) 

-

512 if lin: 

-

513 d = lout & lin 

-

514 if not d.is_empty(): 

-

515 self.generate_hcond_out(net_name=nn, above=mni, layer=d) 

-

516 lout -= lin 

-

517 if not lout.is_empty(): 

-

518 self.generate_hcond_out(net_name=nn, above=None, layer=lout) 

-

519 

-

520 def next_z(self, z: float): 

-

521 debug(f"Next layer {z}") 

-

522 

-

523 self.reset() 

-

524 

-

525 if self.z is None: 

-

526 self.z = z 

-

527 return 

-

528 

-

529 self.zz = z 

-

530 

-

531 all_cond = kdb.Region() 

-

532 for nn in self.net_names: 

-

533 mk = f"+{nn}" 

-

534 if mk in self.state: 

-

535 all_cond += self.state[mk] 

-

536 all_cond = all_cond.edges() 

-

537 

-

538 for i, mni in enumerate(self.materials): 

-

539 linside = self.state.get(f"-{mni}", None) 

-

540 if linside: 

-

541 linside = linside.edges() 

-

542 linside -= all_cond # handled with the conductor 

-

543 for o, mno in enumerate(self.materials): 

-

544 if i != o: 

-

545 loutside = self.state.get(f"-{mno}", None) 

-

546 if loutside: 

-

547 loutside = loutside.edges() 

-

548 if o > i: 

-

549 d = loutside & linside 

-

550 for e in d: 

-

551 # NOTE: we need to swap points as we started from "outside" 

-

552 self.generate_vdiel(outside=mno, inside=mni, edge=e.swapped_points()) 

-

553 linside -= loutside 

-

554 

-

555 for e in linside: 

-

556 self.generate_vdiel(outside=None, inside=mni, edge=e) 

-

557 

-

558 for nn in self.net_names: 

-

559 mk = f"+{nn}" 

-

560 linside = self.state.get(mk, None) 

-

561 if linside: 

-

562 linside = linside.edges() 

-

563 for mno in self.materials: 

-

564 loutside = self.state.get(f"-{mno}", None) 

-

565 if loutside: 

-

566 loutside = loutside.edges() 

-

567 d = loutside & linside 

-

568 for e in d: 

-

569 # NOTE: we need to swap points as we started from "outside" 

-

570 self.generate_vcond(net_name=nn, outside=mno, edge=e.swapped_points()) 

-

571 linside -= loutside 

-

572 for e in linside: 

-

573 self.generate_vcond(net_name=nn, outside=None, edge=e) 

-

574 

-

575 self.z = z 

-

576 

-

577 def generate_hdiel(self, 

-

578 below: Optional[str], 

-

579 above: Optional[str], 

-

580 layer: kdb.Region): 

-

581 k = HDielKey(below, above) 

-

582 debug(f"Generating horizontal dielectric surface {k} as {layer}") 

-

583 if k not in self.diel_data: 

-

584 self.diel_data[k] = [] 

-

585 data = self.diel_data[k] 

-

586 

-

587 for t in layer.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

588 # NOTE: normal is facing downwards (to "below") 

-

589 pl = list(map(lambda pt: Point(pt.x * self.dbu, pt.y * self.dbu, self.z), 

-

590 t.each_point_hull())) 

-

591 tri = Triangle(*pl) 

-

592 data.append(tri) 

-

593 debug(f" {tri}") 

-

594 

-

595 def generate_v_surface(self, 

-

596 kk: HDielKey | HCondKey, 

-

597 edge: kdb.Edge) -> Tuple[VKey, kdb.Box]: 

-

598 debug(f"Generating vertical {kk.topic} surface {kk} with edge {edge}") 

-

599 

-

600 el = math.sqrt(edge.sq_length()) 

-

601 de = kdb.DVector(edge.d().x / el, edge.d().y / el) 

-

602 ne = kdb.DVector(edge.d().y / el, -edge.d().x / el) 

-

603 p0 = ne * ne.sprod(kdb.DPoint(edge.p1) - kdb.DPoint()) + kdb.DPoint() 

-

604 x1 = (edge.p1 - p0).sprod(de) 

-

605 x2 = (edge.p2 - p0).sprod(de) 

-

606 

-

607 key = VKey(kk, p0, de) 

-

608 surface = kdb.Box(x1, 

-

609 math.floor(self.z / self.dbu + 0.5), 

-

610 x2, 

-

611 math.floor(self.zz / self.dbu + 0.5)) 

-

612 return key, surface 

-

613 

-

614 def generate_vdiel(self, 

-

615 outside: Optional[str], 

-

616 inside: Optional[str], 

-

617 edge: kdb.Edge): 

-

618 if edge.is_degenerate(): 

-

619 return 

-

620 

-

621 key, surface = self.generate_v_surface(HDielKey(outside, inside), edge) 

-

622 if key not in self.diel_vdata: 

-

623 self.diel_vdata[key] = kdb.Region() 

-

624 

-

625 self.diel_vdata[key].insert(surface) 

-

626 

-

627 def generate_hcond_in(self, 

-

628 net_name: str, 

-

629 below: Optional[str], 

-

630 layer: kdb.Region): 

-

631 k = HCondKey(net_name, below) 

-

632 debug(f"Generating horizontal bottom conductor surface {k} as {layer}") 

-

633 

-

634 if k not in self.cond_data: 

-

635 self.cond_data[k] = [] 

-

636 data = self.cond_data[k] 

-

637 

-

638 for t in layer.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

639 # NOTE: normal is facing downwards (to "below") 

-

640 pl = list(map(lambda pt: Point(pt.x * self.dbu, pt.y * self.dbu, self.z), 

-

641 t.each_point_hull())) 

-

642 tri = Triangle(*pl) 

-

643 data.append(tri) 

-

644 debug(f" {tri}") 

-

645 

-

646 def generate_hcond_out(self, 

-

647 net_name: str, 

-

648 above: Optional[str], 

-

649 layer: kdb.Region): 

-

650 k = HCondKey(net_name, above) 

-

651 debug(f"Generating horizontal top conductor surface {k} as {layer}") 

-

652 

-

653 if k not in self.cond_data: 

-

654 self.cond_data[k] = [] 

-

655 data = self.cond_data[k] 

-

656 

-

657 for t in layer.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

658 # NOTE: normal is facing downwards (into conductor) 

-

659 pl = list(map(lambda pt: Point(pt.x * self.dbu, pt.y * self.dbu, self.z), 

-

660 t.each_point_hull())) 

-

661 tri = Triangle(*pl) 

-

662 # now it is facing outside (to "above") 

-

663 tri = tri.reversed() 

-

664 data.append(tri) 

-

665 debug(f" {tri}") 

-

666 

-

667 def generate_vcond(self, 

-

668 net_name: str, 

-

669 outside: Optional[str], 

-

670 edge: kdb.Edge): 

-

671 if edge.is_degenerate(): 

-

672 return 

-

673 

-

674 key, surface = self.generate_v_surface(HCondKey(net_name, outside), edge) 

-

675 if key not in self.cond_vdata: 

-

676 self.cond_vdata[key] = kdb.Region() 

-

677 

-

678 self.cond_vdata[key].insert(surface) 

-

679 

-

680 def triangulate(self, p0: kdb.DPoint, de: kdb.DVector, region: kdb.Region, data: List[Triangle]): 

-

681 def convert_point(pt: kdb.Point) -> Point: 

-

682 pxy = (p0 + de * pt.x) * self.dbu 

-

683 pz = pt.y * self.dbu 

-

684 return Point(pxy.x, pxy.y, pz) 

-

685 

-

686 for t in region.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

687 # NOTE: normal is facing outwards (to "left") 

-

688 pl = list(map(convert_point, t.each_point_hull())) 

-

689 tri = Triangle(*pl) 

-

690 # now it is facing outside (to "above") 

-

691 data.append(tri) 

-

692 debug(f" {tri}") 

-

693 

-

694 def finalize(self): 

-

695 for k, r in self.diel_vdata.items(): 

-

696 debug(f"Finishing vertical dielectric plane {k.kk} at {k.p0}/{k.de}") 

-

697 

-

698 if k.kk not in self.diel_data: 

-

699 self.diel_data[k.kk] = [] 

-

700 data = self.diel_data[k.kk] 

-

701 

-

702 self.triangulate(p0=k.p0, de=k.de, region=r, data=data) 

-

703 

-

704 for k, r in self.cond_vdata.items(): 

-

705 debug(f"Finishing vertical conductor plane {k.kk} at {k.p0} / {k.de}") 

-

706 

-

707 if k.kk not in self.cond_data: 

-

708 self.cond_data[k.kk] = [] 

-

709 data = self.cond_data[k.kk] 

-

710 

-

711 self.triangulate(p0=k.p0, de=k.de, region=r, data=data) 

-

712 

-

713 dk: Dict[HDielKey, List[Triangle]] = {} 

-

714 

-

715 for k in self.diel_data.keys(): 

-

716 kk = k.reversed() 

-

717 if kk not in dk: 

-

718 dk[k] = [] 

-

719 else: 

-

720 debug(f"Combining dielectric surfaces {kk} with reverse") 

-

721 

-

722 for k, v in self.diel_data.items(): 

-

723 kk = k.reversed() 

-

724 if kk in dk: 

-

725 dk[kk] += list(map(lambda t: t.reversed(), v)) 

-

726 else: 

-

727 dk[k] += v 

-

728 

-

729 self.diel_data = dk 

-

730 

-

731 def write_fastcap(self, output_dir_path: str, prefix: str) -> str: 

-

732 max_filename_length = os.pathconf(output_dir_path, 'PC_NAME_MAX') 

-

733 

-

734 lst_fn = os.path.join(output_dir_path, f"{prefix}.lst") 

-

735 file_num = 0 

-

736 lst_file: List[str] = [f"* k_void={'%.12g' % self.k_void}"] 

-

737 

-

738 for k, data in self.diel_data.items(): 

-

739 if len(data) == 0: 

-

740 continue 

-

741 

-

742 file_num += 1 

-

743 

-

744 k_outside = self.materials[k.outside] if k.outside else self.k_void 

-

745 k_inside = self.materials[k.inside] if k.inside else self.k_void 

-

746 

-

747 # lst_file.append(f"* Dielectric interface: outside={outside}, inside={inside}") 

-

748 

-

749 fn = f"{prefix}{file_num}_outside={k.outside or '(void)'}_inside={k.inside or '(void)'}.geo" 

-

750 output_path = os.path.join(output_dir_path, fn) 

-

751 self._write_fastercap_geo(output_path=output_path, 

-

752 data=data, 

-

753 cond_name=None, 

-

754 cond_number=file_num, 

-

755 rename_conductor=False) 

-

756 

-

757 # NOTE: for now, we compute the reference points for each triangle 

-

758 # This is a FasterCap feature, reference point in the *.geo file (end of each T line) 

-

759 rp_s = "0 0 0" 

-

760 lst_file.append(f"D {fn} {'%.12g' % k_outside} {'%.12g' % k_inside} 0 0 0 {rp_s}") 

-

761 

-

762 # 

-

763 # Feedback from FastFieldSolvers: 

-

764 # 

-

765 # - using the '+' trailing statements (conductor collation), 

-

766 # only the same conductor should be collated 

-

767 # 

-

768 # - renaming different conductor numbers ('N' rule line) is not allowed (currently a bug) 

-

769 # - Example: 1->VDD (1.geo) and 2->VDD (2.geo) is not possible 

-

770 # - Both conductor *.geo files should have the same number 

-

771 # - only the last conductor *.geo file should contain the 'N' rule 

-

772 # 

-

773 # - reference points 

-

774 # 

-

775 cond_data_grouped_by_net = defaultdict(list) 

-

776 for k, data in self.cond_data.items(): 

-

777 if len(data) == 0: 

-

778 continue 

-

779 cond_data_grouped_by_net[k.net_name].append((k.outside, data)) 

-

780 

-

781 cond_num = file_num 

-

782 

-

783 for nn, cond_list in cond_data_grouped_by_net.items(): 

-

784 cond_num += 1 

-

785 last_cond_index = len(cond_list) - 1 

-

786 for idx, (outside, data) in enumerate(cond_list): 

-

787 file_num += 1 

-

788 k_outside = self.materials[outside] if outside else self.k_void 

-

789 

-

790 outside = outside or '(void)' 

-

791 # lst_file.append(f"* Conductor interface: outside={outside}, net={nn}") 

-

792 fn = f"{prefix}{file_num}_outside={outside}_net={nn}.geo" 

-

793 if len(fn) > max_filename_length: 

-

794 warning(f"Unusual long net name detected: {nn}") 

-

795 d = hashlib.md5(nn.encode('utf-8')).digest() 

-

796 h = base64.urlsafe_b64encode(d).decode('utf-8').rstrip('=') 

-

797 remaining_len = len(f"{prefix}_{file_num}_outside={outside}_net=.geo") 

-

798 short_nn = nn[0: (max_filename_length - remaining_len - len(h) - 1)] + f"_{h}" 

-

799 fn = f"{prefix}{file_num}_outside={outside}_net={short_nn}.geo" 

-

800 output_path = os.path.join(output_dir_path, fn) 

-

801 self._write_fastercap_geo(output_path=output_path, 

-

802 data=data, 

-

803 cond_number=cond_num, 

-

804 cond_name=nn, 

-

805 rename_conductor=(idx == last_cond_index)) 

-

806 collation_operator = '' if idx == last_cond_index else ' +' 

-

807 lst_file.append(f"C {fn} {'%.12g' % k_outside} 0 0 0{collation_operator}") 

-

808 

-

809 info(f"Writing FasterCap list file: {lst_fn}") 

-

810 with open(lst_fn, "w") as f: 

-

811 f.write('\n'.join(lst_file)) 

-

812 f.write('\n') 

-

813 

-

814 return lst_fn 

-

815 

-

816 @staticmethod 

-

817 def _write_fastercap_geo(output_path: str, 

-

818 data: List[Triangle], 

-

819 cond_number: int, 

-

820 cond_name: Optional[str], 

-

821 rename_conductor: bool): 

-

822 info(f"Writing FasterCap geo file: {output_path}") 

-

823 with open(output_path, "w") as f: 

-

824 f.write(f"0 GEO File\n") 

-

825 for t in data: 

-

826 f.write(f"T {cond_number}") 

-

827 f.write(' ' + t.to_fastcap()) 

-

828 

-

829 # compute a reference point "outside" 

-

830 rp = t.outside_reference_point() 

-

831 rp_s = rp.to_fastcap() 

-

832 

-

833 f.write(f" {rp_s}\n") 

-

834 if cond_name and rename_conductor: 

-

835 f.write(f"N {cond_number} {cond_name}\n") 

-

836 

-

837 def check(self): 

-

838 info("Checking …") 

-

839 errors = 0 

-

840 

-

841 for mn in self.materials.keys(): 

-

842 tris = self._collect_diel_tris(mn) 

-

843 info(f"Material {mn} -> {len(tris)} triangles") 

-

844 errors += self._check_tris(f"Material '{mn}'", tris) 

-

845 

-

846 for nn in self.net_names: 

-

847 tris = self._collect_cond_tris(nn) 

-

848 info(f"Net '{nn}' -> {len(tris)} triangles") 

-

849 errors += self._check_tris(f"Net '{nn}'", tris) 

-

850 

-

851 if errors == 0: 

-

852 info(" No errors found") 

-

853 else: 

-

854 info(f" {errors} error{'s' if errors >= 2 else ''} found") 

-

855 

-

856 def _check_tris(self, msg: str, triangles: List[Triangle]) -> int: 

-

857 errors = 0 

-

858 

-

859 edge_set: Set[Edge] = set() 

-

860 edges = self._normed_edges(triangles) 

-

861 

-

862 for e in edges: 

-

863 if e in edge_set: 

-

864 error(f"{msg}: duplicate edge {self._edge2s(e)}") 

-

865 errors += 1 

-

866 else: 

-

867 edge_set.add(e) 

-

868 

-

869 self._split_edges(edge_set) 

-

870 

-

871 for e in edge_set: 

-

872 if e.reversed() not in edge_set: 

-

873 error(f"{msg}: edge {self._edge2s(e)} not connected with reverse edge (open surface)") 

-

874 errors += 1 

-

875 

-

876 return errors 

-

877 

-

878 def _normed_edges(self, triangles: List[Triangle]) -> List[Edge]: 

-

879 edges = [] 

-

880 

-

881 def normed_dbu(p: Point): 

-

882 return Point(*tuple(map(lambda c: math.floor(c / self.dbu + 0.5), 

-

883 (p.x, p.y, p.z)))) 

-

884 

-

885 for t in triangles: 

-

886 for i in range(0, 3): 

-

887 p1 = normed_dbu(t[i]) 

-

888 p2 = normed_dbu(t[(i + 1) % 3]) 

-

889 edges.append(Edge(p1, p2)) 

-

890 

-

891 return edges 

-

892 

-

893 def _point2s(self, p: Point) -> str: 

-

894 return f"(%.12g, %.12g, %.12g)" % (p.x * self.dbu, p.y * self.dbu, p.z * self.dbu) 

-

895 

-

896 def _edge2s(self, e: Edge) -> str: 

-

897 return f"{self._point2s(e.p0)}-{self._point2s(e.p1)}" 

-

898 

-

899 @staticmethod 

-

900 def _is_antiparallel(a: Point, 

-

901 b: Point) -> bool: 

-

902 vp = vector_product(a, b) 

-

903 if abs(vp.sq_length()) > 0.5: # we got normalized! 

-

904 return False 

-

905 

-

906 sp = dot_product(a, b) 

-

907 return sp < 0 

-

908 

-

909 def _split_edges(self, edges: set[Edge]): 

-

910 edges_by_p2: DefaultDict[Point, List[Edge]] = defaultdict(list) 

-

911 edges_by_p1: DefaultDict[Point, List[Edge]] = defaultdict(list) 

-

912 for e in edges: 

-

913 edges_by_p2[e.p1].append(e) 

-

914 edges_by_p1[e.p0].append(e) 

-

915 

-

916 i = 0 

-

917 while True: 

-

918 i += 1 

-

919 subst: DefaultDict[Edge, List[Edge]] = defaultdict(list) 

-

920 

-

921 for e in edges: 

-

922 ee = edges_by_p2.get(e.p0, []) 

-

923 for eee in ee: 

-

924 ve = e.vector_of_edge() 

-

925 veee = eee.vector_of_edge() 

-

926 if self._is_antiparallel(ve, veee) and \ 

-

927 (veee.sq_length() < ve.sq_length() - 0.5): 

-

928 # There is a shorter edge antiparallel -> 

-

929 # this means we need to insert a split point into e 

-

930 subst[e] += [Edge(e.p0, eee.p0), Edge(eee.p0, e.p1)] 

-

931 

-

932 for e in edges: 

-

933 ee = edges_by_p1.get(e.p1, []) 

-

934 for eee in ee: 

-

935 ve = e.vector_of_edge() 

-

936 veee = eee.vector_of_edge() 

-

937 if self._is_antiparallel(ve, veee) and \ 

-

938 (veee.sq_length() < ve.sq_length() - 0.5): 

-

939 # There is a shorter edge antiparallel -> 

-

940 # this means we need to insert a split point into e 

-

941 subst[e] += [Edge(e.p0, eee.p1), Edge(eee.p1, e.p1)] 

-

942 

-

943 if len(subst) == 0: 

-

944 break 

-

945 

-

946 for e, replacement in subst.items(): 

-

947 edges_by_p1[e.p0].remove(e) 

-

948 edges_by_p2[e.p1].remove(e) 

-

949 edges.remove(e) 

-

950 for r in replacement: 

-

951 edges.add(r) 

-

952 edges_by_p1[r.p0].append(r) 

-

953 edges_by_p2[r.p1].append(r) 

-

954 

-

955 def dump_stl(self, output_dir_path: str, prefix: str): 

-

956 for mn in self.materials.keys(): 

-

957 tris = self._collect_diel_tris(mn) 

-

958 output_path = os.path.join(output_dir_path, f"{prefix}diel_{mn}.stl") 

-

959 self._write_as_stl(output_path, tris) 

-

960 

-

961 for nn in self.net_names: 

-

962 tris = self._collect_cond_tris(nn) 

-

963 output_path = os.path.join(output_dir_path, f"{prefix}cond_{nn}.stl") 

-

964 self._write_as_stl(output_path, tris) 

-

965 

-

966 @staticmethod 

-

967 def _write_as_stl(file_name: str, 

-

968 tris: List[Triangle]): 

-

969 if len(tris) == 0: 

-

970 return 

-

971 

-

972 info(f"Writing STL file {file_name}") 

-

973 with open(file_name, "w") as f: 

-

974 f.write("solid stl\n") 

-

975 for t in tris: 

-

976 f.write(" facet normal 0 0 0\n") 

-

977 f.write(" outer loop\n") 

-

978 t = t.reversed() 

-

979 for p in (t.p0, t.p1, t.p2): 

-

980 f.write(f" vertex {p.to_fastcap()}\n") 

-

981 f.write(" endloop\n") 

-

982 f.write(" endfacet\n") 

-

983 f.write("endsolid stl\n") 

-

984 

-

985 @staticmethod 

-

986 def _merge_events(pyra: List[Optional[kdb.Region]], 

-

987 lin: Optional[kdb.Region], 

-

988 lout: Optional[kdb.Region]) -> Tuple[kdb.Region, kdb.Region, kdb.Region]: 

-

989 lin = lin.dup() if lin else kdb.Region() 

-

990 lout = lout.dup() if lout else kdb.Region() 

-

991 past = pyra[0].dup() if len(pyra) >= 1 else kdb.Region() 

-

992 

-

993 for i in range(0, len(pyra)): 

-

994 ii = len(pyra) - i 

-

995 added: kdb.Region = lin & pyra[ii - 1] 

-

996 if not added.is_empty(): 

-

997 if ii >= len(pyra): 

-

998 pyra.append(kdb.Region()) 

-

999 assert len(pyra) == ii + 1 

-

1000 pyra[ii] += added 

-

1001 lin -= added 

-

1002 

-

1003 if len(pyra) == 0: 

-

1004 pyra.append(kdb.Region()) 

-

1005 pyra[0] += lin 

-

1006 

-

1007 for i in range(0, len(pyra)): 

-

1008 ii = len(pyra) - i 

-

1009 removed: kdb.Region = lout & pyra[ii - 1] 

-

1010 if not removed.is_empty(): 

-

1011 pyra[ii - 1] -= removed 

-

1012 lout -= removed 

-

1013 

-

1014 # compute merged events 

-

1015 lin = pyra[0] - past 

-

1016 lout = past - pyra[0] 

-

1017 return lin, lout, pyra[0] 

-

1018 

-

1019 def _collect_diel_tris(self, material_name: str) -> List[Triangle]: 

-

1020 tris = [] 

-

1021 

-

1022 for k, v in self.diel_data.items(): 

-

1023 if material_name == k.outside: 

-

1024 tris += v 

-

1025 elif material_name == k.inside: 

-

1026 tris += [t.reversed() for t in v] 

-

1027 

-

1028 for k, v in self.cond_data.items(): 

-

1029 if material_name == k.outside: 

-

1030 tris += v 

-

1031 

-

1032 return tris 

-

1033 

-

1034 def _collect_cond_tris(self, net_name: str) -> List[Triangle]: 

-

1035 tris = [] 

-

1036 for k, v in self.cond_data.items(): 

-

1037 if k.net_name == net_name: 

-

1038 tris += [t.reversed() for t in v] 

-

1039 return tris 

-
- - - diff --git a/pycov/z_a5841ccd503d0903_fastercap_runner_py.html b/pycov/z_a5841ccd503d0903_fastercap_runner_py.html deleted file mode 100644 index 02ddb0af..00000000 --- a/pycov/z_a5841ccd503d0903_fastercap_runner_py.html +++ /dev/null @@ -1,225 +0,0 @@ - - - - - Coverage for kpex/fastercap/fastercap_runner.py: 48% - - - - - -
-
-

- Coverage for kpex/fastercap/fastercap_runner.py: - 48% -

- -

- 56 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import re 

-

25import subprocess 

-

26import time 

-

27from typing import * 

-

28 

-

29from kpex.log import ( 

-

30 info, 

-

31 # warning, 

-

32 rule, 

-

33 subproc, 

-

34) 

-

35from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

36 

-

37 

-

38def run_fastercap(exe_path: str, 

-

39 lst_file_path: str, 

-

40 log_path: str, 

-

41 tolerance: float, 

-

42 d_coeff: float, 

-

43 mesh_refinement_value: float, 

-

44 ooc_condition: Optional[int], 

-

45 auto_preconditioner: bool, 

-

46 galerkin_scheme: bool, 

-

47 jacobi_preconditioner: bool): 

-

48 args = [ 

-

49 exe_path, 

-

50 '-b', # console mode, without GUI 

-

51 '-i', # Dump detailed time and memory information 

-

52 '-v', # Verbose output 

-

53 f"-a{tolerance}", # stop when relative error lower than threshold 

-

54 f"-d{d_coeff}", # Direct potential interaction coefficient to mesh refinement ratio 

-

55 f"-m{mesh_refinement_value}", # Mesh relative refinement value 

-

56 ] 

-

57 

-

58 if ooc_condition is not None: 

-

59 args += [f"-f{ooc_condition}"] 

-

60 

-

61 if auto_preconditioner: 

-

62 args += ['-ap'] 

-

63 

-

64 if galerkin_scheme: 

-

65 args += ['-g'] 

-

66 

-

67 if jacobi_preconditioner: 

-

68 args += ['-pj'] 

-

69 

-

70 args += [ 

-

71 lst_file_path 

-

72 ] 

-

73 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

74 

-

75 rule() 

-

76 start = time.time() 

-

77 

-

78 proc = subprocess.Popen(args, 

-

79 stdin=subprocess.DEVNULL, 

-

80 stdout=subprocess.PIPE, 

-

81 stderr=subprocess.STDOUT, 

-

82 universal_newlines=True, 

-

83 text=True) 

-

84 with open(log_path, 'w') as f: 

-

85 while True: 

-

86 line = proc.stdout.readline() 

-

87 if not line: 

-

88 break 

-

89 subproc(line[:-1]) # remove newline 

-

90 f.writelines([line]) 

-

91 proc.wait() 

-

92 

-

93 duration = time.time() - start 

-

94 

-

95 rule() 

-

96 

-

97 if proc.returncode == 0: 

-

98 info(f"FasterCap succeeded after {'%.4g' % duration}s") 

-

99 else: 

-

100 raise Exception(f"FasterCap failed with status code {proc.returncode} after {'%.4g' % duration}s", 

-

101 f"see log file: {log_path}") 

-

102 

-

103 

-

104def fastercap_parse_capacitance_matrix(log_path: str) -> CapacitanceMatrix: 

-

105 with open(log_path, 'r') as f: 

-

106 rlines = f.readlines() 

-

107 rlines.reverse() 

-

108 

-

109 # multiple iterations possible, find the last matrix 

-

110 for idx, line in enumerate(rlines): 

-

111 if line.strip() == "Capacitance matrix is:": 

-

112 m = re.match(r'^Dimension (\d+) x (\d+)$', rlines[idx-1]) 

-

113 if not m: 

-

114 raise Exception(f"Could not parse capacitor matrix dimensions") 

-

115 dim = int(m.group(1)) 

-

116 conductor_names: List[str] = [] 

-

117 rows: List[List[float]] = [] 

-

118 for i in reversed(range(idx-1-dim, idx-1)): 

-

119 line = rlines[i].strip() 

-

120 cells = [cell.strip() for cell in line.split(' ')] 

-

121 cells = list(filter(lambda c: len(c) >= 1, cells)) 

-

122 conductor_names.append(cells[0]) 

-

123 row = [float(cell)/1e6 for cell in cells[1:]] 

-

124 rows.append(row) 

-

125 cm = CapacitanceMatrix(conductor_names=conductor_names, rows=rows) 

-

126 return cm 

-

127 

-

128 raise Exception(f"Could not extract capacitance matrix from FasterCap log file {log_path}") 

-
- - - diff --git a/pycov/z_b5137d8b20ededf9___init___py.html b/pycov/z_b5137d8b20ededf9___init___py.html deleted file mode 100644 index b15b018c..00000000 --- a/pycov/z_b5137d8b20ededf9___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex/common/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/common/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_b5137d8b20ededf9_capacitance_matrix_py.html b/pycov/z_b5137d8b20ededf9_capacitance_matrix_py.html deleted file mode 100644 index 8753fe44..00000000 --- a/pycov/z_b5137d8b20ededf9_capacitance_matrix_py.html +++ /dev/null @@ -1,184 +0,0 @@ - - - - - Coverage for kpex/common/capacitance_matrix.py: 96% - - - - - -
-
-

- Coverage for kpex/common/capacitance_matrix.py: - 96% -

- -

- 51 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import copy 

-

27from dataclasses import dataclass 

-

28import os 

-

29import tempfile 

-

30from typing import * 

-

31 

-

32 

-

33@dataclass 

-

34class CapacitanceMatrix: 

-

35 conductor_names: List[str] # NOTE FasterCap generates [g_1, g_2, ...] 

-

36 rows: List[List[float]] # NOTE: in µm 

-

37 

-

38 def __getitem__(self, key): 

-

39 return self.rows.__getitem__(key) 

-

40 

-

41 def __setitem__(self, key, value): 

-

42 self.rows.__setitem__(key, value) 

-

43 

-

44 @property 

-

45 def dimension(self): 

-

46 return len(self.conductor_names) 

-

47 

-

48 @classmethod 

-

49 def parse_csv(cls, path: str, separator: str = ';'): 

-

50 with open(path, 'r') as f: 

-

51 lines = f.readlines() 

-

52 if len(lines) < 2: 

-

53 raise Exception(f"Capacitance Matrix CSV must at least have 2 lines: " 

-

54 f"{path}") 

-

55 conductor_names = [cell.strip() for cell in lines[0].split(sep=separator)] 

-

56 rows = [] 

-

57 for line in lines[1:]: 

-

58 row = [float(cell.strip()) for cell in line.split(sep=separator)] 

-

59 rows.append(row) 

-

60 return CapacitanceMatrix(conductor_names=conductor_names, 

-

61 rows=rows) 

-

62 

-

63 def write_csv(self, output_path: str, separator: str = ';'): 

-

64 with open(output_path, 'w') as f: 

-

65 header_line = separator.join(self.conductor_names) 

-

66 f.write(header_line) 

-

67 f.write('\n') 

-

68 

-

69 for row in self.rows: 

-

70 cells = ['%.12g' % cell for cell in row] 

-

71 row_line = separator.join(cells) 

-

72 f.write(row_line) 

-

73 f.write('\n') 

-

74 

-

75 def averaged_off_diagonals(self) -> CapacitanceMatrix: 

-

76 c = copy.deepcopy(self) 

-

77 for i in range(len(self.rows)): 

-

78 for j in range(len(self.conductor_names)): 

-

79 if j <= i: 

-

80 continue 

-

81 v1 = self[i][j] 

-

82 v2 = self[j][i] 

-

83 avg = (v1 + v2) / 2 

-

84 # print(f"i={i} j={j}, avg({v1}, {v2}) == {avg}") 

-

85 c[i][j] = avg 

-

86 c[j][i] = avg 

-

87 return c 

-
- - - diff --git a/pycov/z_b7daf585f790d5fa___init___py.html b/pycov/z_b7daf585f790d5fa___init___py.html deleted file mode 100644 index 08e7e76b..00000000 --- a/pycov/z_b7daf585f790d5fa___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex/rcx25/__init__.py: 100% - - - - - -
-
-

- Coverage for kpex/rcx25/__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_b7daf585f790d5fa_extraction_results_py.html b/pycov/z_b7daf585f790d5fa_extraction_results_py.html deleted file mode 100644 index 56c69c1a..00000000 --- a/pycov/z_b7daf585f790d5fa_extraction_results_py.html +++ /dev/null @@ -1,261 +0,0 @@ - - - - - Coverage for kpex/rcx25/extraction_results.py: 81% - - - - - -
-
-

- Coverage for kpex/rcx25/extraction_results.py: - 81% -

- -

- 91 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25from collections import defaultdict 

-

26from dataclasses import dataclass, field 

-

27from typing import * 

-

28 

-

29import process_parasitics_pb2 

-

30 

-

31 

-

32NetName = str 

-

33LayerName = str 

-

34CellName = str 

-

35 

-

36 

-

37@dataclass 

-

38class NodeRegion: 

-

39 layer_name: LayerName 

-

40 net_name: NetName 

-

41 cap_to_gnd: float 

-

42 perimeter: float 

-

43 area: float 

-

44 

-

45 

-

46@dataclass(frozen=True) 

-

47class SidewallKey: 

-

48 layer: LayerName 

-

49 net1: NetName 

-

50 net2: NetName 

-

51 

-

52 

-

53@dataclass 

-

54class SidewallCap: # see Magic EdgeCap, extractInt.c L444 

-

55 key: SidewallKey 

-

56 cap_value: float # femto farad 

-

57 distance: float # distance in µm 

-

58 length: float # length in µm 

-

59 tech_spec: process_parasitics_pb2.CapacitanceInfo.SidewallCapacitance 

-

60 

-

61 

-

62@dataclass(frozen=True) 

-

63class OverlapKey: 

-

64 layer_top: LayerName 

-

65 net_top: NetName 

-

66 layer_bot: LayerName 

-

67 net_bot: NetName 

-

68 

-

69 

-

70@dataclass 

-

71class OverlapCap: 

-

72 key: OverlapKey 

-

73 cap_value: float # femto farad 

-

74 shielded_area: float # in µm^2 

-

75 unshielded_area: float # in µm^2 

-

76 tech_spec: process_parasitics_pb2.CapacitanceInfo.OverlapCapacitance 

-

77 

-

78 

-

79@dataclass(frozen=True) 

-

80class SideOverlapKey: 

-

81 layer_inside: LayerName 

-

82 net_inside: NetName 

-

83 layer_outside: LayerName 

-

84 net_outside: NetName 

-

85 

-

86 def __repr__(self) -> str: 

-

87 return f"{self.layer_inside}({self.net_inside})-"\ 

-

88 f"{self.layer_outside}({self.net_outside})" 

-

89 

-

90 

-

91@dataclass 

-

92class SideOverlapCap: 

-

93 key: SideOverlapKey 

-

94 cap_value: float # femto farad 

-

95 

-

96 def __str__(self) -> str: 

-

97 return f"(Side Overlap): {self.key} = {round(self.cap_value, 6)}fF" 

-

98 

-

99 

-

100@dataclass(frozen=True) 

-

101class NetCoupleKey: 

-

102 net1: NetName 

-

103 net2: NetName 

-

104 

-

105 def __repr__(self) -> str: 

-

106 return f"{self.net1}-{self.net2}" 

-

107 

-

108 # NOTE: we norm net names alphabetically 

-

109 def normed(self) -> NetCoupleKey: 

-

110 if self.net1 < self.net2: 

-

111 return self 

-

112 else: 

-

113 return NetCoupleKey(self.net2, self.net1) 

-

114 

-

115 

-

116@dataclass 

-

117class ExtractionSummary: 

-

118 capacitances: Dict[NetCoupleKey, float] 

-

119 

-

120 @classmethod 

-

121 def merged(cls, summaries: List[ExtractionSummary]) -> ExtractionSummary: 

-

122 merged_capacitances = defaultdict(float) 

-

123 for s in summaries: 

-

124 for couple_key, cap in s.capacitances.items(): 

-

125 merged_capacitances[couple_key.normed()] += cap 

-

126 return ExtractionSummary(merged_capacitances) 

-

127 

-

128 

-

129@dataclass 

-

130class CellExtractionResults: 

-

131 cell_name: CellName 

-

132 

-

133 overlap_coupling: Dict[OverlapKey, OverlapCap] = field(default_factory=dict) 

-

134 sidewall_table: Dict[SidewallKey, SidewallCap] = field(default_factory=dict) 

-

135 sideoverlap_table: Dict[SideOverlapKey, SideOverlapCap] = field(default_factory=dict) 

-

136 

-

137 def summarize(self) -> ExtractionSummary: 

-

138 overlap_summary = ExtractionSummary({ 

-

139 NetCoupleKey(key.net_top, key.net_bot): cap.cap_value 

-

140 for key, cap in self.overlap_coupling.items() 

-

141 }) 

-

142 

-

143 sidewall_summary = ExtractionSummary({ 

-

144 NetCoupleKey(key.net1, key.net2): cap.cap_value 

-

145 for key, cap in self.sidewall_table.items() 

-

146 }) 

-

147 

-

148 sideoverlap_summary = ExtractionSummary({ 

-

149 NetCoupleKey(key.net_inside, key.net_outside): cap.cap_value 

-

150 for key, cap in self.sideoverlap_table.items() 

-

151 }) 

-

152 

-

153 return ExtractionSummary.merged([ 

-

154 overlap_summary, sidewall_summary, sideoverlap_summary 

-

155 ]) 

-

156 

-

157 

-

158@dataclass 

-

159class ExtractionResults: 

-

160 cell_extraction_results: Dict[CellName, CellExtractionResults] = field(default_factory=dict) 

-

161 

-

162 def summarize(self) -> ExtractionSummary: 

-

163 subsummaries = [s.summarize() for s in self.cell_extraction_results.values()] 

-

164 return ExtractionSummary.merged(subsummaries) 

-
- - - diff --git a/pycov/z_b7daf585f790d5fa_extractor_py.html b/pycov/z_b7daf585f790d5fa_extractor_py.html deleted file mode 100644 index 0ecea94c..00000000 --- a/pycov/z_b7daf585f790d5fa_extractor_py.html +++ /dev/null @@ -1,840 +0,0 @@ - - - - - Coverage for kpex/rcx25/extractor.py: 5% - - - - - -
-
-

- Coverage for kpex/rcx25/extractor.py: - 5% -

- -

- 399 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26import math 

-

27from collections import defaultdict 

-

28from typing import * 

-

29 

-

30import klayout.db as kdb 

-

31import klayout.rdb as rdb 

-

32 

-

33from ..klayout.lvsdb_extractor import KLayoutExtractionContext, GDSPair 

-

34from ..log import ( 

-

35 console, 

-

36 debug, 

-

37 info, 

-

38 warning, 

-

39 error 

-

40) 

-

41from ..tech_info import TechInfo 

-

42from .extraction_results import * 

-

43import process_stack_pb2 

-

44 

-

45 

-

46EdgeInterval = Tuple[float, float] 

-

47ChildIndex = int 

-

48EdgeNeighborhood = List[Tuple[EdgeInterval, Dict[ChildIndex, List[kdb.Polygon]]]] 

-

49 

-

50 

-

51class RCExtractor: 

-

52 def __init__(self, 

-

53 pex_context: KLayoutExtractionContext, 

-

54 tech_info: TechInfo, 

-

55 report_path: str): 

-

56 self.pex_context = pex_context 

-

57 self.tech_info = tech_info 

-

58 self.report_path = report_path 

-

59 

-

60 def gds_pair(self, layer_name) -> Optional[GDSPair]: 

-

61 gds_pair = self.tech_info.gds_pair_for_computed_layer_name.get(layer_name, None) 

-

62 if not gds_pair: 

-

63 gds_pair = self.tech_info.gds_pair_for_layer_name.get(layer_name, None) 

-

64 if not gds_pair: 

-

65 warning(f"Can't find GDS pair for layer {layer_name}") 

-

66 return None 

-

67 return gds_pair 

-

68 

-

69 def shapes_of_net(self, layer_name: str, net: kdb.Net) -> Optional[kdb.Region]: 

-

70 gds_pair = self.gds_pair(layer_name=layer_name) 

-

71 if not gds_pair: 

-

72 return None 

-

73 

-

74 shapes = self.pex_context.shapes_of_net(gds_pair=gds_pair, net=net) 

-

75 if not shapes: 

-

76 debug(f"Nothing extracted for layer {layer_name}") 

-

77 return shapes 

-

78 

-

79 def shapes_of_layer(self, layer_name: str) -> Optional[kdb.Region]: 

-

80 gds_pair = self.gds_pair(layer_name=layer_name) 

-

81 if not gds_pair: 

-

82 return None 

-

83 

-

84 shapes = self.pex_context.shapes_of_layer(gds_pair=gds_pair) 

-

85 if not shapes: 

-

86 debug(f"Nothing extracted for layer {layer_name}") 

-

87 return shapes 

-

88 

-

89 def extract(self) -> ExtractionResults: 

-

90 extraction_results = ExtractionResults() 

-

91 

-

92 # TODO: for now, we always flatten and have only 1 cell 

-

93 cell_name = self.pex_context.top_cell.name 

-

94 report = rdb.ReportDatabase(f"PEX {cell_name}") 

-

95 cell_extraction_result = self.extract_cell(cell_name=cell_name, report=report) 

-

96 extraction_results.cell_extraction_results[cell_name] = cell_extraction_result 

-

97 

-

98 report.save(self.report_path) 

-

99 

-

100 return extraction_results 

-

101 

-

102 def extract_cell(self, 

-

103 cell_name: CellName, 

-

104 report: rdb.ReportDatabase) -> CellExtractionResults: 

-

105 lvsdb = self.pex_context.lvsdb 

-

106 netlist: kdb.Netlist = lvsdb.netlist() 

-

107 dbu = self.pex_context.dbu 

-

108 

-

109 extraction_results = CellExtractionResults(cell_name=cell_name) 

-

110 

-

111 rdb_cell = report.create_cell(cell_name) 

-

112 rdb_cat_common = report.create_category("Common") 

-

113 rdb_cat_sidewall_old = report.create_category("Sidewall (legacy space_check)") 

-

114 rdb_cat_sidewall = report.create_category("Sidewall (EdgeNeighborhoodVisitor)") 

-

115 rdb_cat_overlap = report.create_category("Overlap") 

-

116 rdb_cat_fringe = report.create_category("Fringe / Side Overlap") 

-

117 

-

118 def rdb_output(parent_category: rdb.RdbCategory, 

-

119 category_name: str, 

-

120 shapes: kdb.Shapes | kdb.Region | List[kdb.Edge]): 

-

121 rdb_cat = report.create_category(parent_category, category_name) 

-

122 report.create_items(rdb_cell.rdb_id(), ## TODO: if later hierarchical mode is introduced 

-

123 rdb_cat.rdb_id(), 

-

124 kdb.CplxTrans(mag=dbu), 

-

125 shapes) 

-

126 

-

127 circuit = netlist.circuit_by_name(self.pex_context.top_cell.name) 

-

128 # https://www.klayout.de/doc-qt5/code/class_Circuit.html 

-

129 if not circuit: 

-

130 circuits = [c.name for c in netlist.each_circuit()] 

-

131 raise Exception(f"Expected circuit called {self.pex_context.top_cell.name} in extracted netlist, " 

-

132 f"only available circuits are: {circuits}") 

-

133 

-

134 #---------------------------------------------------------------------------------------- 

-

135 layer2net2regions = defaultdict(lambda: defaultdict(kdb.Region)) 

-

136 net2layer2regions = defaultdict(lambda: defaultdict(kdb.Region)) 

-

137 layer_by_name: Dict[LayerName, process_stack_pb2.ProcessStackInfo.LayerInfo] = {} 

-

138 

-

139 layer_regions_by_name: Dict[LayerName, kdb.Region] = defaultdict(kdb.Region) 

-

140 all_region = kdb.Region() 

-

141 regions_below_layer: Dict[LayerName, kdb.Region] = defaultdict(kdb.Region) 

-

142 regions_below_and_including_layer: Dict[LayerName, kdb.Region] = defaultdict(kdb.Region) 

-

143 all_layer_names: List[LayerName] = [] 

-

144 layer_names_below: Dict[LayerName, List[LayerName]] = {} 

-

145 shielding_layer_names: Dict[Tuple[LayerName, LayerName], List[LayerName]] = defaultdict(list) 

-

146 previous_layer_name: Optional[str] = None 

-

147 

-

148 substrate_region = kdb.Region() 

-

149 substrate_region.insert(self.pex_context.top_cell_bbox().enlarged(8.0 / dbu)) # 8 µm halo 

-

150 substrate_layer_name = self.tech_info.internal_substrate_layer_name 

-

151 layer_names_below[substrate_layer_name] = [] 

-

152 all_layer_names.append(substrate_layer_name) 

-

153 layer2net2regions[substrate_layer_name][substrate_layer_name] = substrate_region 

-

154 net2layer2regions[substrate_layer_name][substrate_layer_name] = substrate_region 

-

155 layer_regions_by_name[substrate_layer_name] = substrate_region 

-

156 # NOTE: substrate not needed for 

-

157 # - all_region 

-

158 # - regions_below_layer 

-

159 # - regions_below_and_including_layer 

-

160 

-

161 for metal_layer in self.tech_info.process_metal_layers: 

-

162 layer_name = metal_layer.name 

-

163 gds_pair = self.gds_pair(layer_name) 

-

164 canonical_layer_name = self.tech_info.canonical_layer_name_by_gds_pair[gds_pair] 

-

165 

-

166 all_layer_shapes = self.shapes_of_layer(layer_name) or kdb.Region() 

-

167 layer_regions_by_name[canonical_layer_name] += all_layer_shapes 

-

168 # NOTE: multiple LVS layers can be mapped to the same canonical name 

-

169 if previous_layer_name != canonical_layer_name: 

-

170 regions_below_layer[canonical_layer_name] += all_region 

-

171 layer_names_below[canonical_layer_name] = list(all_layer_names) 

-

172 for ln in all_layer_names: 

-

173 lp = (canonical_layer_name, ln) 

-

174 shielding_layer_names[lp] = [l for l in all_layer_names 

-

175 if l != ln and l not in layer_names_below[ln]] 

-

176 shielding_layer_names[ln, canonical_layer_name] = shielding_layer_names[lp] 

-

177 all_layer_names.append(canonical_layer_name) 

-

178 all_region += all_layer_shapes 

-

179 regions_below_and_including_layer[canonical_layer_name] += all_region 

-

180 

-

181 previous_layer_name = canonical_layer_name 

-

182 

-

183 for net in circuit.each_net(): 

-

184 net_name = net.expanded_name() 

-

185 

-

186 shapes = self.shapes_of_net(layer_name=layer_name, net=net) 

-

187 if shapes: 

-

188 layer2net2regions[canonical_layer_name][net_name] += shapes 

-

189 net2layer2regions[net_name][canonical_layer_name] += shapes 

-

190 layer_by_name[canonical_layer_name] = metal_layer 

-

191 

-

192 shielded_regions_between_layers: Dict[Tuple[LayerName, LayerName], kdb.Region] = {} 

-

193 for top_layer_name in layer2net2regions.keys(): 

-

194 for bot_layer_name in reversed(layer_names_below[top_layer_name]): 

-

195 shielded_region = kdb.Region() 

-

196 shielding_layers = shielding_layer_names.get((top_layer_name, bot_layer_name), None) 

-

197 if shielding_layers: 

-

198 for sl in shielding_layers: 

-

199 shielded_region += layer_regions_by_name[sl] 

-

200 shielded_region.merge() 

-

201 shielded_regions_between_layers[(top_layer_name, bot_layer_name)] = shielded_region 

-

202 shielded_regions_between_layers[(bot_layer_name, top_layer_name)] = shielded_region 

-

203 if shielded_region: 

-

204 rdb_output(rdb_cat_common, f"Shielded ({top_layer_name}-{bot_layer_name})", shielded_region) 

-

205 

-

206 #---------------------------------------------------------------------------------------- 

-

207 

-

208 side_halo_um = self.tech_info.tech.process_parasitics.side_halo 

-

209 side_halo_dbu = int(side_halo_um / dbu) + 1 # add 1 nm to halo 

-

210 

-

211 space_markers_by_layer_name: Dict[LayerName, kdb.Region] = {} 

-

212 rdb_cat_space_markers = report.create_category(rdb_cat_sidewall_old, "All Space Markers") 

-

213 

-

214 for layer_name in layer2net2regions.keys(): 

-

215 if layer_name == substrate_layer_name: 

-

216 continue 

-

217 

-

218 space_markers = layer_regions_by_name[layer_name].space_check( 

-

219 d=side_halo_dbu, # min space in um 

-

220 whole_edges=True, # whole edges 

-

221 metrics=kdb.Metrics.Projection, # metrics 

-

222 ignore_angle=None, # ignore angle 

-

223 min_projection=None, # min projection 

-

224 max_projection=None, # max projection 

-

225 shielded=True, # shielding 

-

226 opposite_filter=kdb.Region.NoOppositeFilter, # error filter for opposite sides 

-

227 rect_filter=kdb.Region.NoRectFilter, # error filter for rect input shapes 

-

228 negative=False, # negative 

-

229 property_constraint=kdb.Region.IgnoreProperties, # property_constraint 

-

230 zero_distance_mode=kdb.Region.IncludeZeroDistanceWhenTouching # zero distance mode 

-

231 ) 

-

232 space_markers_by_layer_name[layer_name] = space_markers 

-

233 rdb_output(rdb_cat_space_markers, f"layer={layer_name}", space_markers) 

-

234 

-

235 # 

-

236 # (1) OVERLAP CAPACITANCE 

-

237 # 

-

238 for top_layer_name in layer2net2regions.keys(): 

-

239 if top_layer_name == substrate_layer_name: 

-

240 continue 

-

241 

-

242 top_net2regions = layer2net2regions.get(top_layer_name, None) 

-

243 if not top_net2regions: 

-

244 continue 

-

245 

-

246 top_overlap_specs = self.tech_info.overlap_cap_by_layer_names.get(top_layer_name, None) 

-

247 if not top_overlap_specs: 

-

248 warning(f"No overlap cap specified for layer top={top_layer_name}") 

-

249 continue 

-

250 

-

251 rdb_cat_top_layer = report.create_category(rdb_cat_overlap, f"top_layer={top_layer_name}") 

-

252 

-

253 shapes_top_layer = layer_regions_by_name[top_layer_name] 

-

254 

-

255 for bot_layer_name in reversed(layer_names_below[top_layer_name]): 

-

256 bot_net2regions = layer2net2regions.get(bot_layer_name, None) 

-

257 if not bot_net2regions: 

-

258 continue 

-

259 

-

260 overlap_cap_spec = top_overlap_specs.get(bot_layer_name, None) 

-

261 if not overlap_cap_spec: 

-

262 warning(f"No overlap cap specified for layer top={top_layer_name}/bottom={bot_layer_name}") 

-

263 continue 

-

264 

-

265 rdb_cat_bot_layer = report.create_category(rdb_cat_top_layer, f"bot_layer={bot_layer_name}") 

-

266 

-

267 shielded_region = shielded_regions_between_layers[(top_layer_name, bot_layer_name)].and_(shapes_top_layer) 

-

268 rdb_output(rdb_cat_bot_layer, "Shielded Between Layers Region", shielded_region) 

-

269 

-

270 for net_top in top_net2regions.keys(): 

-

271 shapes_top_net: kdb.Region = top_net2regions[net_top].dup() 

-

272 

-

273 for net_bot in bot_net2regions.keys(): 

-

274 if net_top == net_bot: 

-

275 continue 

-

276 

-

277 shapes_bot_net: kdb.Region = bot_net2regions[net_bot] 

-

278 

-

279 overlapping_shapes = shapes_top_net.__and__(shapes_bot_net) 

-

280 if overlapping_shapes: 

-

281 rdb_cat_nets = report.create_category(rdb_cat_bot_layer, f"{net_top}{net_bot}") 

-

282 rdb_output(rdb_cat_nets, "Overlapping Shapes", overlapping_shapes) 

-

283 

-

284 shielded_net_shapes = overlapping_shapes.__and__(shielded_region) 

-

285 rdb_output(rdb_cat_nets, "Shielded Shapes", shielded_net_shapes) 

-

286 

-

287 unshielded_net_shapes = overlapping_shapes - shielded_net_shapes 

-

288 rdb_output(rdb_cat_nets, "Unshielded Shapes", unshielded_net_shapes) 

-

289 

-

290 area_um2 = overlapping_shapes.area() * dbu**2 

-

291 shielded_area_um2 = shielded_net_shapes.area() * dbu**2 

-

292 unshielded_area_um2 = area_um2 - shielded_area_um2 

-

293 cap_femto = unshielded_area_um2 * overlap_cap_spec.capacitance / 1000.0 

-

294 shielded_cap_femto = shielded_area_um2 * overlap_cap_spec.capacitance / 1000.0 

-

295 info(f"(Overlap): {top_layer_name}({net_top})-{bot_layer_name}({net_bot}): " 

-

296 f"Unshielded area: {unshielded_area_um2} µm^2, " 

-

297 f"cap: {round(cap_femto, 2)} fF") 

-

298 if cap_femto > 0.0: 

-

299 ovk = OverlapKey(layer_top=top_layer_name, 

-

300 net_top=net_top, 

-

301 layer_bot=bot_layer_name, 

-

302 net_bot=net_bot) 

-

303 cap = OverlapCap(key=ovk, 

-

304 cap_value=cap_femto, 

-

305 shielded_area=shielded_area_um2, 

-

306 unshielded_area=unshielded_area_um2, 

-

307 tech_spec=overlap_cap_spec) 

-

308 report.create_category( # used as info text 

-

309 rdb_cat_nets, 

-

310 f"{round(cap_femto, 3)} fF " 

-

311 f"({round(shielded_cap_femto, 3)} fF shielded " 

-

312 f"of total {round(cap_femto+shielded_cap_femto, 3)} fF)" 

-

313 ) 

-

314 extraction_results.overlap_coupling[ovk] = cap 

-

315 

-

316 # (2) SIDEWALL CAPACITANCE 

-

317 # 

-

318 for layer_name in layer2net2regions.keys(): 

-

319 if layer_name == substrate_layer_name: 

-

320 continue 

-

321 

-

322 sidewall_cap_spec = self.tech_info.sidewall_cap_by_layer_name.get(layer_name, None) 

-

323 if not sidewall_cap_spec: 

-

324 warning(f"No sidewall cap specified for layer {layer_name}") 

-

325 continue 

-

326 

-

327 net2regions = layer2net2regions.get(layer_name, None) 

-

328 if not net2regions: 

-

329 continue 

-

330 

-

331 rdb_cat_sw_layer = report.create_category(rdb_cat_sidewall_old, f"layer={layer_name}") 

-

332 

-

333 space_markers = space_markers_by_layer_name[layer_name] 

-

334 

-

335 for i, net1 in enumerate(net2regions.keys()): 

-

336 for j, net2 in enumerate(net2regions.keys()): 

-

337 if i < j: 

-

338 

-

339 # info(f"Sidewall on {layer_name}: Nets {net1} <-> {net2}") 

-

340 shapes1: kdb.Region = net2regions[net1] 

-

341 shapes2: kdb.Region = net2regions[net2] 

-

342 

-

343 markers_net1 = space_markers.interacting(shapes1) 

-

344 sidewall_edge_pairs = markers_net1.interacting(shapes2) 

-

345 

-

346 if not sidewall_edge_pairs: 

-

347 continue 

-

348 

-

349 rdb_cat_sw_nets = report.create_category(rdb_cat_sidewall_old, f"{net1} - {net2}") 

-

350 rdb_output(rdb_cat_sw_nets, f"Shapes {net1}", shapes1) 

-

351 rdb_output(rdb_cat_sw_nets, f"Shapes {net2}", shapes2) 

-

352 rdb_output(rdb_cat_sw_nets, f"Markers interacting {net1}", markers_net1) 

-

353 rdb_output(rdb_cat_sw_nets, f"Markers interacting {net1}-{net2}", sidewall_edge_pairs) 

-

354 

-

355 for idx, pair in enumerate(sidewall_edge_pairs): 

-

356 edge1: kdb.Edge = pair.first 

-

357 edge2: kdb.Edge = pair.second 

-

358 

-

359 # TODO: support non-parallel situations 

-

360 # avg_length = (edge1.length() + edge2.length()) / 2.0 

-

361 # avg_distance = (pair.polygon(0).perimeter() - edge1.length() - edge2.length()) / 2.0 

-

362 avg_length = min(edge1.length(), edge2.length()) 

-

363 avg_distance = pair.distance() 

-

364 

-

365 debug(f"Edge pair distance {avg_distance}, symmetric? {pair.symmetric}, " 

-

366 f"perimeter {pair.perimeter()}, parallel? {edge1.is_parallel(edge2)}") 

-

367 

-

368 # (3) SIDEWALL CAPACITANCE 

-

369 # 

-

370 # C = Csidewall * l * t / s 

-

371 # C = Csidewall * l / s 

-

372 

-

373 length_um = avg_length * dbu 

-

374 distance_um = avg_distance * dbu 

-

375 

-

376 # NOTE: this is automatically bidirectional, 

-

377 # whereas MAGIC counts 2 sidewall contributions (one for each side of the cap) 

-

378 cap_femto = (length_um * sidewall_cap_spec.capacitance) / \ 

-

379 (distance_um + sidewall_cap_spec.offset) / 1000.0 

-

380 

-

381 rdb_output(rdb_cat_sw_nets, f"Edge Pair {idx}: {round(cap_femto, 3)} fF", pair) 

-

382 

-

383 info(f"(Sidewall) layer {layer_name}: Nets {net1} <-> {net2}: {round(cap_femto, 5)} fF") 

-

384 

-

385 swk = SidewallKey(layer=layer_name, net1=net1, net2=net2) 

-

386 sw_cap = SidewallCap(key=swk, 

-

387 cap_value=cap_femto, 

-

388 distance=distance_um, 

-

389 length=length_um, 

-

390 tech_spec=sidewall_cap_spec) 

-

391 extraction_results.sidewall_table[swk] = sw_cap 

-

392 

-

393 # 

-

394 # (3) FRINGE / SIDE OVERLAP CAPACITANCE 

-

395 # 

-

396 

-

397 class FringeEdgeNeighborhoodVisitor(kdb.EdgeNeighborhoodVisitor): 

-

398 def __init__(self, 

-

399 inside_layer_name: str, 

-

400 inside_net_name: str, 

-

401 outside_layer_name: str, 

-

402 child_names: List[str], 

-

403 tech_info: TechInfo, 

-

404 report_category: rdb.RdbCategory): 

-

405 self.inside_layer_name = inside_layer_name 

-

406 self.inside_net_name = inside_net_name 

-

407 self.outside_layer_name = outside_layer_name 

-

408 self.child_names = child_names 

-

409 # NOTE: child_names[0] is the inside net (foreign) 

-

410 # child_names[1] is the shielded net (between layers) 

-

411 # child_names[2:] are the outside nets 

-

412 self.tech_info = tech_info 

-

413 self.report_category = report_category 

-

414 

-

415 # NOTE: overlap_cap_by_layer_names is top/bot (dict is not symmetric) 

-

416 self.overlap_cap_spec = tech_info.overlap_cap_by_layer_names[inside_layer_name].get(outside_layer_name, None) 

-

417 if not self.overlap_cap_spec: 

-

418 self.overlap_cap_spec = tech_info.overlap_cap_by_layer_names[outside_layer_name][inside_layer_name] 

-

419 

-

420 self.substrate_cap_spec = tech_info.substrate_cap_by_layer_name[inside_layer_name] 

-

421 self.sideoverlap_cap_spec = tech_info.side_overlap_cap_by_layer_names[inside_layer_name][outside_layer_name] 

-

422 

-

423 self.sidewall_cap_spec = tech_info.sidewall_cap_by_layer_name[inside_layer_name] 

-

424 

-

425 self.category_name_counter: Dict[str, int] = defaultdict(int) 

-

426 self.sidewall_counter = 0 

-

427 

-

428 def begin_polygon(self, 

-

429 layout: kdb.Layout, 

-

430 cell: kdb.Cell, 

-

431 polygon: kdb.Polygon): 

-

432 debug(f"----------------------------------------") 

-

433 debug(f"Polygon: {polygon}") 

-

434 

-

435 def end_polygon(self): 

-

436 debug(f"End of polygon") 

-

437 

-

438 def on_edge(self, 

-

439 layout: kdb.Layout, 

-

440 cell: kdb.Cell, 

-

441 edge: kdb.Edge, 

-

442 neighborhood: EdgeNeighborhood): 

-

443 # 

-

444 # NOTE: this complex operation will automatically rotate every edge to be on the x-axis 

-

445 # going from 0 to edge.length 

-

446 # so we only have to consider the y-axis to get the near and far distances 

-

447 # 

-

448 

-

449 # TODO: consider z-shielding! 

-

450 

-

451 debug(f"inside_layer={self.inside_layer_name}, " 

-

452 f"inside_net={self.inside_net_name}, " 

-

453 f"outside_layer={self.outside_layer_name}, " 

-

454 f"edge = {edge}") 

-

455 

-

456 rdb_inside_layer = report.create_category(rdb_cat_sidewall, f"layer={self.inside_layer_name}") 

-

457 rdb_sidewall_inside_net = report.create_category(rdb_inside_layer, f"inside={self.inside_net_name}") 

-

458 

-

459 for (x1, x2), polygons_by_net in neighborhood: 

-

460 if not polygons_by_net: 

-

461 continue 

-

462 

-

463 edge_interval_length = x2 - x1 

-

464 edge_interval_length_um = edge_interval_length * dbu 

-

465 

-

466 edge_interval_original = (self.to_original_trans(edge) * 

-

467 kdb.Edge(kdb.Point(x1, 0), kdb.Point(x2, 0))) 

-

468 transformed_category_name = f"Edge interval {(x1, x2)}" 

-

469 self.category_name_counter[transformed_category_name] += 1 

-

470 rdb_cat_edge_interval = report.create_category( 

-

471 self.report_category, 

-

472 f"{transformed_category_name} ({self.category_name_counter[transformed_category_name]})" 

-

473 ) 

-

474 rdb_output(rdb_cat_edge_interval, f"Original Section {edge_interval_original}", edge_interval_original) 

-

475 

-

476 polygons_on_same_layer = polygons_by_net.get(1, None) 

-

477 shielded_region_lateral = kdb.Region() 

-

478 if polygons_on_same_layer: 

-

479 shielded_region_lateral.insert(polygons_on_same_layer) 

-

480 rdb_output(rdb_cat_edge_interval, 'Laterally nearby shapes', 

-

481 kdb.Region([self.to_original_trans(edge) * p for p in shielded_region_lateral])) 

-

482 

-

483 # NOTE: first lateral nearby shape blocks everything beyond (like sidewall situation) up to halo 

-

484 def distance_near(p: kdb.Polygon) -> float: 

-

485 bbox: kdb.Box = p.bbox() 

-

486 

-

487 if not p.is_box(): 

-

488 warning(f"Side overlap, outside polygon {p} is not a box. " 

-

489 f"Currently, only boxes are supported, will be using bounding box {bbox}") 

-

490 ## distance_near = (bbox.p1.y + bbox.p2.y) / 2.0 

-

491 distance_near = min(bbox.p1.y, bbox.p2.y) 

-

492 if distance_near < 0: 

-

493 distance_near = 0 

-

494 return distance_near 

-

495 

-

496 nearest_lateral_shape = (math.inf, polygons_on_same_layer[0]) 

-

497 for p in polygons_on_same_layer: 

-

498 dnear = distance_near(p) 

-

499 if dnear < nearest_lateral_shape[0]: 

-

500 nearest_lateral_shape = (dnear, p) 

-

501 

-

502 rdb_output(rdb_cat_edge_interval, 'Closest nearby shape', 

-

503 kdb.Region(self.to_original_trans(edge) * nearest_lateral_shape[1])) 

-

504 

-

505 # NOTE: this method is always called for a single nearest edge (line), so the 

-

506 # polygons have 4 points. 

-

507 # Polygons points are sorted clockwise, so the edge 

-

508 # that goes from right-to-left is the nearest edge 

-

509 nearby_opposing_edge = [e for e in nearest_lateral_shape[1].each_edge() if e.d().x < 0][-1] 

-

510 nearby_opposing_edge_trans = self.to_original_trans(edge) * nearby_opposing_edge 

-

511 

-

512 opposing_net = '<unknown>' 

-

513 # find the opposing net 

-

514 for other_net, region in layer2net2regions[self.inside_layer_name].items(): 

-

515 if other_net == self.inside_net_name: 

-

516 continue 

-

517 if region.interacting(nearby_opposing_edge_trans).count() >= 1: 

-

518 # we found the other net! 

-

519 opposing_net = other_net 

-

520 break 

-

521 

-

522 rdb_output(rdb_cat_edge_interval, 

-

523 f"Closest nearby edge (net {opposing_net})", [nearby_opposing_edge_trans]) 

-

524 

-

525 sidewall_edge_pair = [nearby_opposing_edge_trans, edge_interval_original] 

-

526 distance_um = nearest_lateral_shape[0] * dbu 

-

527 sidewall_cap_femto = (edge_interval_length_um * self.sidewall_cap_spec.capacitance) / \ 

-

528 (distance_um + self.sidewall_cap_spec.offset) / 1000.0 / 2.0 

-

529 

-

530 rdb_sidewall_outside_net = report.create_category(rdb_sidewall_inside_net, 

-

531 f"outside={opposing_net}") 

-

532 self.sidewall_counter += 1 

-

533 rdb_output(rdb_sidewall_outside_net, 

-

534 f"#{self.sidewall_counter}: " 

-

535 f"len {round(edge_interval_length_um, 3)} µm, distance {round(distance_um, 3)} µm, " 

-

536 f"{round(sidewall_cap_femto, 3)} fF", 

-

537 sidewall_edge_pair) 

-

538 

-

539 nearby_shield = kdb.Polygon([nearby_opposing_edge.p1, 

-

540 nearby_opposing_edge.p2, 

-

541 kdb.Point(x1, side_halo_dbu), 

-

542 kdb.Point(x2, side_halo_dbu)]) 

-

543 

-

544 rdb_output(rdb_cat_edge_interval, 'Nearby shield', 

-

545 kdb.Region(self.to_original_trans(edge) * nearby_shield)) 

-

546 

-

547 shielded_region_between = kdb.Region() 

-

548 shielded_polygons = polygons_by_net.get(2, None) # shielded from layers between 

-

549 if shielded_polygons: 

-

550 shielded_region_between.insert(shielded_polygons) 

-

551 

-

552 for net_index, polygons in polygons_by_net.items(): 

-

553 if net_index == 0: # laterally shielded 

-

554 continue 

-

555 elif net_index == 1: # ignore "shielded" 

-

556 continue 

-

557 

-

558 if not polygons: 

-

559 continue 

-

560 

-

561 unshielded_region: kdb.Region = kdb.Region(polygons) - shielded_region_between 

-

562 if not unshielded_region: 

-

563 continue 

-

564 

-

565 net_name = self.child_names[net_index] 

-

566 rdb_cat_outside_net = report.create_category(rdb_cat_edge_interval, 

-

567 f"outside_net={net_name}") 

-

568 

-

569 rdb_output(rdb_cat_outside_net, 'Unshielded', 

-

570 kdb.Region([self.to_original_trans(edge) * p for p in unshielded_region])) 

-

571 

-

572 for p in unshielded_region: 

-

573 bbox: kdb.Box = p.bbox() 

-

574 

-

575 if not p.is_box(): 

-

576 warning(f"Side overlap, outside polygon {p} is not a box. " 

-

577 f"Currently, only boxes are supported, will be using bounding box {bbox}") 

-

578 distance_near = bbox.p1.y #+ 1 

-

579 if distance_near < 0: 

-

580 distance_near = 0 

-

581 distance_far = bbox.p2.y #- 2 

-

582 if distance_far < 0: 

-

583 distance_far = 0 

-

584 try: 

-

585 assert distance_near >= 0 

-

586 assert distance_far >= distance_near 

-

587 except AssertionError: 

-

588 print() 

-

589 raise 

-

590 

-

591 if distance_far == distance_near: 

-

592 continue 

-

593 

-

594 distance_near_um = distance_near * dbu 

-

595 distance_far_um = distance_far * dbu 

-

596 

-

597 # NOTE: overlap scaling is 1/50 (see MAGIC ExtTech) 

-

598 alpha_scale_factor = 0.02 * 0.01 * 0.5 * 200.0 

-

599 alpha_c = self.overlap_cap_spec.capacitance * alpha_scale_factor 

-

600 

-

601 # see Magic ExtCouple.c L1164 

-

602 cnear = (2.0 / math.pi) * math.atan(alpha_c * distance_near_um) 

-

603 cfar = (2.0 / math.pi) * math.atan(alpha_c * distance_far_um) 

-

604 

-

605 # "cfrac" is the fractional portion of the fringe cap seen 

-

606 # by tile tp along its length. This is independent of the 

-

607 # portion of the boundary length that tile tp occupies. 

-

608 cfrac = cfar - cnear 

-

609 

-

610 # The fringe portion extracted from the substrate will be 

-

611 # different than the portion added to the coupling layer. 

-

612 sfrac: float 

-

613 

-

614 # see Magic ExtCouple.c L1198 

-

615 alpha_s = self.substrate_cap_spec.area_capacitance / alpha_scale_factor 

-

616 if alpha_s != alpha_c: 

-

617 snear = (2.0 / math.pi) * math.atan(alpha_s * distance_near_um) 

-

618 sfar = (2.0 / math.pi) * math.atan(alpha_s * distance_far_um) 

-

619 sfrac = sfar - snear 

-

620 else: 

-

621 sfrac = cfrac 

-

622 

-

623 if outside_layer_name == substrate_layer_name: 

-

624 cfrac = sfrac 

-

625 

-

626 cap_femto = (cfrac * edge_interval_length_um * 

-

627 self.sideoverlap_cap_spec.capacitance / 1000.0) 

-

628 if cap_femto > 0.0: 

-

629 report.create_category(rdb_cat_outside_net, f"{round(cap_femto, 3)} fF") # used as info text 

-

630 

-

631 sok = SideOverlapKey(layer_inside=self.inside_layer_name, 

-

632 net_inside=self.inside_net_name, 

-

633 layer_outside=self.outside_layer_name, 

-

634 net_outside=net_name) 

-

635 sov = extraction_results.sideoverlap_table.get(sok, None) 

-

636 if sov: 

-

637 sov.cap_value += cap_femto 

-

638 else: 

-

639 sov = SideOverlapCap(key=sok, cap_value=cap_femto) 

-

640 extraction_results.sideoverlap_table[sok] = sov 

-

641 

-

642 # efflength = (cfrac - sov.so_coupfrac) * (double) length; 

-

643 # cap += e->ec_cap * efflength; 

-

644 # 

-

645 # subfrac += sov.so_subfrac; / *Just add the shielded fraction * / 

-

646 # efflength = (sfrac - subfrac) * (double) length; 

-

647 # 

-

648 # subcap = ExtCurStyle->exts_perimCap[ta][0] * efflength; 

-

649 

-

650 # TODO: shielding lateral 

-

651 

-

652 # TODO: fringe portion extracted from substrate 

-

653 

-

654 for inside_layer_name in layer2net2regions.keys(): 

-

655 if inside_layer_name == substrate_layer_name: 

-

656 continue 

-

657 

-

658 inside_net2regions = layer2net2regions.get(inside_layer_name, None) 

-

659 if not inside_net2regions: 

-

660 continue 

-

661 

-

662 inside_fringe_specs = self.tech_info.side_overlap_cap_by_layer_names.get(inside_layer_name, None) 

-

663 if not inside_fringe_specs: 

-

664 warning(f"No fringe / side overlap cap specified for layer inside={inside_layer_name}") 

-

665 continue 

-

666 

-

667 shapes_inside_layer = layer_regions_by_name[inside_layer_name] 

-

668 fringe_halo_inside = shapes_inside_layer.sized(side_halo_dbu) - shapes_inside_layer 

-

669 

-

670 rdb_cat_inside_layer = report.create_category(rdb_cat_fringe, f"inside_layer={inside_layer_name}") 

-

671 rdb_output(rdb_cat_inside_layer, "fringe_halo_inside", fringe_halo_inside) 

-

672 

-

673 # Side Overlap: metal <-> metal (additionally, substrate) 

-

674 for outside_layer_name in layer2net2regions.keys(): 

-

675 if inside_layer_name == outside_layer_name: 

-

676 continue 

-

677 

-

678 outside_net2regions = layer2net2regions.get(outside_layer_name, None) 

-

679 if not outside_net2regions: 

-

680 continue 

-

681 

-

682 cap_spec = inside_fringe_specs.get(outside_layer_name, None) 

-

683 if not cap_spec: 

-

684 warning(f"No side overlap cap specified for " 

-

685 f"layer inside={inside_layer_name}/outside={outside_layer_name}") 

-

686 continue 

-

687 

-

688 shapes_outside_layer = layer_regions_by_name[outside_layer_name] 

-

689 if not shapes_outside_layer: 

-

690 continue 

-

691 

-

692 shapes_outside_layer_within_halo = shapes_outside_layer.__and__(fringe_halo_inside) 

-

693 if not shapes_outside_layer_within_halo: 

-

694 continue 

-

695 

-

696 rdb_cat_outside_layer = report.create_category(rdb_cat_inside_layer, 

-

697 f"outside_layer={outside_layer_name}") 

-

698 

-

699 shielded_regions_between = shielded_regions_between_layers[(inside_layer_name, outside_layer_name)] 

-

700 rdb_output(rdb_cat_outside_layer, 'Shielded between layers', shielded_regions_between) 

-

701 

-

702 for net_inside in inside_net2regions.keys(): 

-

703 shapes_inside_net: kdb.Region = inside_net2regions[net_inside] 

-

704 if not shapes_inside_net: 

-

705 continue 

-

706 

-

707 rdb_cat_inside_net = report.create_category(rdb_cat_outside_layer, 

-

708 f"inside_net={net_inside}") 

-

709 

-

710 visitor = FringeEdgeNeighborhoodVisitor( 

-

711 inside_layer_name=inside_layer_name, 

-

712 inside_net_name=net_inside, 

-

713 outside_layer_name=outside_layer_name, 

-

714 child_names=[net_inside, 'NEARBY_SHAPES', 'SHIELD_BETWEEN'] + 

-

715 [k for k in outside_net2regions.keys() if k != net_inside], 

-

716 tech_info=self.tech_info, 

-

717 report_category=rdb_cat_inside_net 

-

718 ) 

-

719 

-

720 nearby_shapes = shapes_inside_layer - shapes_inside_net 

-

721 # children = [kdb.CompoundRegionOperationNode.new_secondary(shapes_inside_net), 

-

722 children = [kdb.CompoundRegionOperationNode.new_foreign(), 

-

723 kdb.CompoundRegionOperationNode.new_secondary(nearby_shapes), 

-

724 kdb.CompoundRegionOperationNode.new_secondary(shielded_regions_between)] + \ 

-

725 [kdb.CompoundRegionOperationNode.new_secondary(region) 

-

726 for net, region in list(outside_net2regions.items()) 

-

727 if net != net_inside] 

-

728 

-

729 node = kdb.CompoundRegionOperationNode.new_edge_neighborhood( 

-

730 children, 

-

731 visitor, 

-

732 0, # bext 

-

733 0, # eext, 

-

734 0, # din 

-

735 side_halo_dbu # dout 

-

736 ) 

-

737 

-

738 shapes_inside_net.complex_op(node) 

-

739 

-

740 for so in extraction_results.sideoverlap_table.values(): 

-

741 info(so) 

-

742 

-

743 return extraction_results 

-
- - - diff --git a/pycov/z_b89b04cf284a76bf___init___py.html b/pycov/z_b89b04cf284a76bf___init___py.html deleted file mode 100644 index 052a6b10..00000000 --- a/pycov/z_b89b04cf284a76bf___init___py.html +++ /dev/null @@ -1,141 +0,0 @@ - - - - - Coverage for kpex\log\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\log\__init__.py: - 100% -

- -

- 1 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24""" 

-

25The Logging Module 

-

26------------------ 

-

27 

-

28kpex log handler, 

-

29implemented using the ``log`` and ``rich`` libraries. 

-

30""" 

-

31 

-

32from .logger import ( 

-

33 LogLevel, 

-

34 set_log_level, 

-

35 register_additional_handler, 

-

36 deregister_additional_handler, 

-

37 console, 

-

38 debug, 

-

39 rule, 

-

40 subproc, 

-

41 info, 

-

42 warning, 

-

43 error 

-

44) 

-
- - - diff --git a/pycov/z_b89b04cf284a76bf_logger_py.html b/pycov/z_b89b04cf284a76bf_logger_py.html deleted file mode 100644 index 7234c537..00000000 --- a/pycov/z_b89b04cf284a76bf_logger_py.html +++ /dev/null @@ -1,280 +0,0 @@ - - - - - Coverage for kpex\log\logger.py: 95% - - - - - -
-
-

- Coverage for kpex\log\logger.py: - 95% -

- -

- 79 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25from enum import IntEnum 

-

26from functools import cached_property 

-

27import logging 

-

28import rich.console 

-

29import rich.logging 

-

30from typing import * 

-

31 

-

32 

-

33class LogLevel(IntEnum): 

-

34 ALL = 0 

-

35 DEBUG = 10 

-

36 SUBPROCESS = 12 

-

37 VERBOSE = 15 

-

38 INFO = 20 

-

39 WARNING = 30 

-

40 ERROR = 40 

-

41 CRITICAL = 50 

-

42 DEFAULT = SUBPROCESS 

-

43 

-

44 @classmethod 

-

45 @cached_property 

-

46 def level_by_name(cls) -> Dict[str, LogLevel]: 

-

47 return {e.name: e for e in cls} 

-

48 

-

49 

-

50class LogLevelFormatter(logging.Formatter): 

-

51 def format(self, record: logging.LogRecord) -> str: 

-

52 msg = record.getMessage() 

-

53 match record.levelno: 

-

54 case LogLevel.WARNING.value: return f"[yellow]{msg}" 

-

55 case LogLevel.ERROR.value: return f"[red]{msg}" 

-

56 case _: 

-

57 return msg 

-

58 

-

59 

-

60class LogLevelFilter(logging.Filter): 

-

61 def __init__(self, levels: Iterable[str], invert: bool = False): 

-

62 super().__init__() 

-

63 self.levels = levels 

-

64 self.invert = invert 

-

65 

-

66 def filter(self, record: logging.LogRecord) -> bool: 

-

67 if self.invert: 

-

68 return record.levelname not in self.levels 

-

69 else: 

-

70 return record.levelname in self.levels 

-

71 

-

72 

-

73console = rich.console.Console() 

-

74__logger = logging.getLogger("__kpex__") 

-

75 

-

76 

-

77def set_log_level(log_level: LogLevel): 

-

78 __logger.setLevel(log_level) 

-

79 

-

80 

-

81def register_additional_handler(handler: logging.Handler): 

-

82 """ 

-

83 Adds a new handler to the default logger. 

-

84 

-

85 :param handler: The new handler. Must be of type ``logging.Handler`` 

-

86 or its subclasses. 

-

87 """ 

-

88 __logger.addHandler(handler) 

-

89 

-

90 

-

91def deregister_additional_handler(handler: logging.Handler): 

-

92 """ 

-

93 Removes a registered handler from the default logger. 

-

94 

-

95 :param handler: The handler. If not registered, the behavior 

-

96 of this function is undefined. 

-

97 """ 

-

98 __logger.removeHandler(handler) 

-

99 

-

100 

-

101 

-

102def configure_logger(): 

-

103 global __logger, console 

-

104 

-

105 for level in LogLevel: 

-

106 logging.addLevelName(level=level.value, levelName=level.name) 

-

107 

-

108 subprocess_rich_handler = rich.logging.RichHandler( 

-

109 console=console, 

-

110 show_time=False, 

-

111 omit_repeated_times=False, 

-

112 show_level=False, 

-

113 show_path=False, 

-

114 enable_link_path=False, 

-

115 markup=False, 

-

116 tracebacks_word_wrap=False, 

-

117 keywords=[] 

-

118 ) 

-

119 subprocess_rich_handler.addFilter(LogLevelFilter(['SUBPROCESS'])) 

-

120 

-

121 rich_handler = rich.logging.RichHandler( 

-

122 console=console, 

-

123 omit_repeated_times=False, 

-

124 show_level=True, 

-

125 markup=True, 

-

126 rich_tracebacks=True, 

-

127 tracebacks_suppress=[], 

-

128 keywords=[] 

-

129 ) 

-

130 

-

131 rich_handler.setFormatter(LogLevelFormatter(fmt='%(message)s', datefmt='[%X]')) 

-

132 rich_handler.addFilter(LogLevelFilter(['SUBPROCESS'], invert=True)) 

-

133 

-

134 set_log_level(LogLevel.SUBPROCESS) 

-

135 

-

136 __logger.handlers.clear() 

-

137 __logger.addHandler(subprocess_rich_handler) 

-

138 __logger.addHandler(rich_handler) 

-

139 

-

140 

-

141def debug(*args, **kwargs): 

-

142 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

143 kwargs['stacklevel'] = 2 

-

144 __logger.debug(*args, **kwargs) 

-

145 

-

146 

-

147def subproc(msg: object, **kwargs): 

-

148 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

149 kwargs['stacklevel'] = 2 

-

150 __logger.log(LogLevel.SUBPROCESS, msg, **kwargs) 

-

151 

-

152 

-

153def rule(title: str = '', **kwargs): # pragma: no cover 

-

154 """ 

-

155 Prints a horizontal line on the terminal enclosing the first argument 

-

156 if the log level is <= INFO. 

-

157 

-

158 Kwargs are passed to https://rich.readthedocs.io/en/stable/reference/console.html#rich.console.Console.rule 

-

159 

-

160 :param title: A title string to enclose in the console rule 

-

161 """ 

-

162 console.rule(title) 

-

163 

-

164 

-

165def info(*args, **kwargs): 

-

166 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

167 kwargs['stacklevel'] = 2 

-

168 __logger.info(*args, **kwargs) 

-

169 

-

170 

-

171def warning(*args, **kwargs): 

-

172 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

173 kwargs['stacklevel'] = 2 

-

174 __logger.warning(*args, **kwargs) 

-

175 

-

176 

-

177def error(*args, **kwargs): 

-

178 if not kwargs.get('stacklevel'): # ensure logged file location is correct 

-

179 kwargs['stacklevel'] = 2 

-

180 __logger.error(*args, **kwargs) 

-

181 

-

182 

-

183configure_logger() 

-
- - - diff --git a/pycov/z_bb4acdb2528096e4___init___py.html b/pycov/z_bb4acdb2528096e4___init___py.html deleted file mode 100644 index 09f6b474..00000000 --- a/pycov/z_bb4acdb2528096e4___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex\magic\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\magic\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_bb4acdb2528096e4_magic_runner_py.html b/pycov/z_bb4acdb2528096e4_magic_runner_py.html deleted file mode 100644 index eacdb0fa..00000000 --- a/pycov/z_bb4acdb2528096e4_magic_runner_py.html +++ /dev/null @@ -1,250 +0,0 @@ - - - - - Coverage for kpex\magic\magic_runner.py: 34% - - - - - -
-
-

- Coverage for kpex\magic\magic_runner.py: - 34% -

- -

- 44 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from enum import StrEnum 

-

25import re 

-

26import time 

-

27from typing import * 

-

28 

-

29import os 

-

30import subprocess 

-

31import unittest 

-

32 

-

33from kpex.log import ( 

-

34 info, 

-

35 # warning, 

-

36 rule, 

-

37 subproc, 

-

38) 

-

39from kpex.version import __version__ 

-

40 

-

41 

-

42class MagicPEXMode(StrEnum): 

-

43 CC = "CC" 

-

44 RC = "RC" 

-

45 DEFAULT = "CC" 

-

46 

-

47 

-

48def prepare_magic_script(gds_path: str, 

-

49 cell_name: str, 

-

50 run_dir_path: str, 

-

51 script_path: str, 

-

52 output_netlist_path: str, 

-

53 pex_mode: MagicPEXMode, 

-

54 c_threshold: float, 

-

55 r_threshold: float, 

-

56 halo: Optional[float]): 

-

57 gds_path = os.path.abspath(gds_path) 

-

58 run_dir_path = os.path.abspath(run_dir_path) 

-

59 output_netlist_path = os.path.abspath(output_netlist_path) 

-

60 

-

61 halo_scale = 200.0 

-

62 

-

63 script: str = "" 

-

64 match pex_mode: 

-

65 case MagicPEXMode.CC: 

-

66 script = f"""# Generated by kpex {__version__} 

-

67crashbackups stop 

-

68drc off 

-

69gds read {gds_path} 

-

70load {cell_name} 

-

71select top cell 

-

72flatten {cell_name}_flat 

-

73load {cell_name}_flat 

-

74cellname delete {cell_name} -noprompt 

-

75cellname rename {cell_name}_flat {cell_name} 

-

76select top cell 

-

77extract path {run_dir_path}{'' if halo is None else f"\nextract halo {round(halo * halo_scale)}"} 

-

78extract all 

-

79ext2spice cthresh {c_threshold} 

-

80ext2spice format ngspice 

-

81ext2spice -p {run_dir_path} -o {output_netlist_path} 

-

82quit -noprompt""" 

-

83 case MagicPEXMode.RC: 

-

84 script = f"""# Generated by kpex {__version__} 

-

85crashbackups stop 

-

86drc off 

-

87gds read {gds_path} 

-

88load {cell_name} 

-

89select top cell 

-

90flatten {cell_name}_flat 

-

91load {cell_name}_flat 

-

92cellname delete {cell_name} -noprompt 

-

93cellname rename {cell_name}_flat {cell_name} 

-

94select top cell 

-

95extract path {run_dir_path}{'' if halo is None else f"\nextract halo {round(halo * halo_scale)}"} 

-

96extract do resistance 

-

97extract all 

-

98ext2sim labels on 

-

99ext2sim 

-

100extresist tolerance {r_threshold} 

-

101extresist all 

-

102ext2spice cthresh {c_threshold} 

-

103ext2spice rthresh {r_threshold} 

-

104ext2spice extresist on 

-

105ext2spice format ngspice 

-

106ext2spice -p {run_dir_path} -o {output_netlist_path} 

-

107quit -noprompt 

-

108""" 

-

109 with open(script_path, 'w') as f: 

-

110 f.write(script) 

-

111 

-

112def run_magic(exe_path: str, 

-

113 magicrc_path: str, 

-

114 script_path: str, 

-

115 log_path: str): 

-

116 args = [ 

-

117 exe_path, 

-

118 '-dnull', # 

-

119 '-noconsole', # 

-

120 '-rcfile', # 

-

121 magicrc_path, # 

-

122 script_path, # TCL script 

-

123 ] 

-

124 

-

125 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

126 

-

127 start = time.time() 

-

128 

-

129 proc = subprocess.Popen(args, 

-

130 stdin=subprocess.DEVNULL, 

-

131 stdout=subprocess.PIPE, 

-

132 stderr=subprocess.STDOUT, 

-

133 universal_newlines=True, 

-

134 text=True) 

-

135 with open(log_path, 'w') as f: 

-

136 while True: 

-

137 line = proc.stdout.readline() 

-

138 if not line: 

-

139 break 

-

140 subproc(line[:-1]) # remove newline 

-

141 f.writelines([line]) 

-

142 proc.wait() 

-

143 

-

144 duration = time.time() - start 

-

145 

-

146 rule() 

-

147 

-

148 if proc.returncode == 0: 

-

149 info(f"MAGIC succeeded after {'%.4g' % duration}s") 

-

150 else: 

-

151 raise Exception(f"MAGIC failed with status code {proc.returncode} after {'%.4g' % duration}s", 

-

152 f"see log file: {log_path}") 

-

153 

-
- - - diff --git a/pycov/z_c489968eb1a5e358___init___py.html b/pycov/z_c489968eb1a5e358___init___py.html deleted file mode 100644 index 1f8cfde9..00000000 --- a/pycov/z_c489968eb1a5e358___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex\fastercap\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\fastercap\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_c489968eb1a5e358_fastercap_input_builder_py.html b/pycov/z_c489968eb1a5e358_fastercap_input_builder_py.html deleted file mode 100644 index f6e87ede..00000000 --- a/pycov/z_c489968eb1a5e358_fastercap_input_builder_py.html +++ /dev/null @@ -1,437 +0,0 @@ - - - - - Coverage for kpex\fastercap\fastercap_input_builder.py: 11% - - - - - -
-
-

- Coverage for kpex\fastercap\fastercap_input_builder.py: - 11% -

- -

- 171 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1#! /usr/bin/env python3 

-

2# 

-

3# -------------------------------------------------------------------------------- 

-

4# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

5# Johannes Kepler University, Institute for Integrated Circuits. 

-

6# 

-

7# This file is part of KPEX  

-

8# (see https://github.com/martinjankoehler/klayout-pex). 

-

9# 

-

10# This program is free software: you can redistribute it and/or modify 

-

11# it under the terms of the GNU General Public License as published by 

-

12# the Free Software Foundation, either version 3 of the License, or 

-

13# (at your option) any later version. 

-

14# 

-

15# This program is distributed in the hope that it will be useful, 

-

16# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

17# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

18# GNU General Public License for more details. 

-

19# 

-

20# You should have received a copy of the GNU General Public License 

-

21# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

22# SPDX-License-Identifier: GPL-3.0-or-later 

-

23# -------------------------------------------------------------------------------- 

-

24# 

-

25 

-

26 

-

27# 

-

28# Protocol Buffer Schema for FasterCap Input Files 

-

29# https://www.fastfieldsolvers.com/software.htm#fastercap 

-

30# 

-

31 

-

32from typing import * 

-

33from functools import cached_property 

-

34import math 

-

35 

-

36import klayout.db as kdb 

-

37 

-

38from ..klayout.lvsdb_extractor import KLayoutExtractionContext, KLayoutExtractedLayerInfo, GDSPair 

-

39from .fastercap_model_generator import FasterCapModelBuilder, FasterCapModelGenerator 

-

40from ..log import ( 

-

41 console, 

-

42 debug, 

-

43 info, 

-

44 warning, 

-

45 error 

-

46) 

-

47from ..tech_info import TechInfo 

-

48 

-

49import process_stack_pb2 

-

50 

-

51 

-

52class FasterCapInputBuilder: 

-

53 def __init__(self, 

-

54 pex_context: KLayoutExtractionContext, 

-

55 tech_info: TechInfo, 

-

56 k_void: float = 3.5, 

-

57 delaunay_amax: float = 0.0, 

-

58 delaunay_b: float = 1.0): 

-

59 self.pex_context = pex_context 

-

60 self.tech_info = tech_info 

-

61 self.k_void = k_void 

-

62 self.delaunay_amax = delaunay_amax 

-

63 self.delaunay_b = delaunay_b 

-

64 

-

65 @cached_property 

-

66 def dbu(self) -> float: 

-

67 return self.pex_context.dbu 

-

68 

-

69 def gds_pair(self, layer_name) -> Optional[GDSPair]: 

-

70 gds_pair = self.tech_info.gds_pair_for_computed_layer_name.get(layer_name, None) 

-

71 if not gds_pair: 

-

72 gds_pair = self.tech_info.gds_pair_for_layer_name.get(layer_name, None) 

-

73 if not gds_pair: 

-

74 warning(f"Can't find GDS pair for layer {layer_name}") 

-

75 return None 

-

76 return gds_pair 

-

77 

-

78 def shapes_of_net(self, layer_name: str, net: kdb.Net) -> Optional[kdb.Region]: 

-

79 gds_pair = self.gds_pair(layer_name=layer_name) 

-

80 if not gds_pair: 

-

81 return None 

-

82 

-

83 shapes = self.pex_context.shapes_of_net(gds_pair=gds_pair, net=net) 

-

84 if not shapes: 

-

85 debug(f"Nothing extracted for layer {layer_name}") 

-

86 return shapes 

-

87 

-

88 def shapes_of_layer(self, layer_name: str) -> Optional[kdb.Region]: 

-

89 gds_pair = self.gds_pair(layer_name=layer_name) 

-

90 if not gds_pair: 

-

91 return None 

-

92 

-

93 shapes = self.pex_context.shapes_of_layer(gds_pair=gds_pair) 

-

94 if not shapes: 

-

95 debug(f"Nothing extracted for layer {layer_name}") 

-

96 return shapes 

-

97 

-

98 def top_cell_bbox(self) -> kdb.Box: 

-

99 return self.pex_context.top_cell_bbox() 

-

100 

-

101 def build(self) -> FasterCapModelGenerator: 

-

102 lvsdb = self.pex_context.lvsdb 

-

103 netlist: kdb.Netlist = lvsdb.netlist() 

-

104 

-

105 def format_terminal(t: kdb.NetTerminalRef) -> str: 

-

106 td = t.terminal_def() 

-

107 d = t.device() 

-

108 return f"{d.expanded_name()}/{td.name}/{td.description}" 

-

109 

-

110 model_builder = FasterCapModelBuilder( 

-

111 dbu=self.dbu, 

-

112 k_void=self.k_void, 

-

113 delaunay_amax=self.delaunay_amax, # test/compare with smaller, e.g. 0.05 => more triangles 

-

114 delaunay_b=self.delaunay_b # test/compare with 1.0 => more triangles at edges 

-

115 ) 

-

116 

-

117 fox_layer = self.tech_info.field_oxide_layer 

-

118 

-

119 model_builder.add_material(name=fox_layer.name, k=fox_layer.field_oxide_layer.dielectric_k) 

-

120 for diel_name, diel_k in self.tech_info.dielectric_by_name.items(): 

-

121 model_builder.add_material(name=diel_name, k=diel_k) 

-

122 

-

123 circuit = netlist.circuit_by_name(self.pex_context.top_cell.name) 

-

124 # https://www.klayout.de/doc-qt5/code/class_Circuit.html 

-

125 if not circuit: 

-

126 circuits = [c.name for c in netlist.each_circuit()] 

-

127 raise Exception(f"Expected circuit called {self.pex_context.top_cell.name} in extracted netlist, " 

-

128 f"only available circuits are: {circuits}") 

-

129 

-

130 diffusion_regions: List[kdb.Region] = [] 

-

131 

-

132 for net in circuit.each_net(): 

-

133 # https://www.klayout.de/doc-qt5/code/class_Net.html 

-

134 debug(f"Net name={net.name}, expanded_name={net.expanded_name()}, pin_count={net.pin_count()}, " 

-

135 f"is_floating={net.is_floating()}, is_passive={net.is_passive()}, " 

-

136 f"terminals={list(map(lambda t: format_terminal(t), net.each_terminal()))}") 

-

137 

-

138 net_name = net.expanded_name() 

-

139 

-

140 for metal_layer in self.tech_info.process_metal_layers: 

-

141 metal_layer_name = metal_layer.name 

-

142 metal_layer = metal_layer.metal_layer 

-

143 

-

144 metal_z_bottom = metal_layer.height 

-

145 metal_z_top = metal_z_bottom + metal_layer.thickness 

-

146 

-

147 shapes = self.shapes_of_net(layer_name=metal_layer_name, net=net) 

-

148 if shapes: 

-

149 if shapes.count() >= 1: 

-

150 info(f"Conductor {net_name}, metal {metal_layer_name}, " 

-

151 f"z={metal_layer.height}, height={metal_layer.thickness}") 

-

152 model_builder.add_conductor(net_name=net_name, 

-

153 layer=shapes, 

-

154 z=metal_layer.height, 

-

155 height=metal_layer.thickness) 

-

156 

-

157 if metal_layer.HasField('contact_above'): 

-

158 contact = metal_layer.contact_above 

-

159 shapes = self.shapes_of_net(layer_name=contact.name, net=net) 

-

160 if shapes and not shapes.is_empty(): 

-

161 info(f"Conductor {net_name}, via {contact.name}, " 

-

162 f"z={metal_z_top}, height={contact.thickness}") 

-

163 model_builder.add_conductor(net_name=net_name, 

-

164 layer=shapes, 

-

165 z=metal_z_top, 

-

166 height=contact.thickness) 

-

167 

-

168 # diel_above = self.tech_info.process_stack_layer_by_name.get(metal_layer.reference_above, None) 

-

169 # if diel_above: 

-

170 # #model_builder.add_dielectric(material_name=metal_layer.reference_above, 

-

171 # # layer=kdb.Region().) 

-

172 # pass 

-

173 # TODO: add stuff 

-

174 

-

175 # DIFF / TAP 

-

176 for diffusion_layer in self.tech_info.process_diffusion_layers: 

-

177 diffusion_layer_name = diffusion_layer.name 

-

178 diffusion_layer = diffusion_layer.diffusion_layer 

-

179 shapes = self.shapes_of_net(layer_name=diffusion_layer_name, net=net) 

-

180 if shapes and not shapes.is_empty(): 

-

181 diffusion_regions.append(shapes) 

-

182 info(f"Diffusion {net_name}, layer {diffusion_layer_name}, " 

-

183 f"z={0}, height={0.1}") 

-

184 model_builder.add_conductor(net_name=net_name, 

-

185 layer=shapes, 

-

186 z=0, # TODO 

-

187 height=0.1) # TODO: diffusion_layer.height 

-

188 

-

189 contact = diffusion_layer.contact_above 

-

190 shapes = self.shapes_of_net(layer_name=contact.name, net=net) 

-

191 if shapes and not shapes.is_empty(): 

-

192 info(f"Diffusion {net_name}, contact {contact.name}, " 

-

193 f"z={0}, height={contact.thickness}") 

-

194 model_builder.add_conductor(net_name=net_name, 

-

195 layer=shapes, 

-

196 z=0.0, 

-

197 height=contact.thickness) 

-

198 

-

199 enlarged_top_cell_bbox = self.top_cell_bbox().enlarged(math.floor(8 / self.dbu)) # 8µm fringe halo 

-

200 

-

201 # 

-

202 # global substrate block below everything. independent of nets! 

-

203 # 

-

204 

-

205 substrate_layer = self.tech_info.process_substrate_layer.substrate_layer 

-

206 substrate_region = kdb.Region() 

-

207 

-

208 substrate_block = enlarged_top_cell_bbox.dup() 

-

209 substrate_region.insert(substrate_block) 

-

210 

-

211 diffusion_margin = math.floor(1 / self.dbu) # 1 µm 

-

212 for d in diffusion_regions: 

-

213 substrate_region -= d.sized(diffusion_margin) 

-

214 info(f"Substrate VSUBS, " 

-

215 f"z={0 - substrate_layer.height - substrate_layer.thickness}, height={substrate_layer.thickness}") 

-

216 model_builder.add_conductor(net_name="VSUBS", 

-

217 layer=substrate_region, 

-

218 z=0 - substrate_layer.height - substrate_layer.thickness, 

-

219 height=substrate_layer.thickness) 

-

220 

-

221 # 

-

222 # add dielectrics 

-

223 # 

-

224 

-

225 fox_region = kdb.Region() 

-

226 fox_block = enlarged_top_cell_bbox.dup() 

-

227 fox_region.insert(fox_block) 

-

228 

-

229 # field oxide goes from substrate/diff/well up to below the gate-poly 

-

230 gate_poly_height = self.tech_info.gate_poly_layer.metal_layer.height 

-

231 fox_z = 0 

-

232 fox_height = gate_poly_height - fox_z 

-

233 info(f"Simple dielectric (field oxide) {fox_layer.name}: " 

-

234 f"z={fox_z}, height={fox_height}") 

-

235 model_builder.add_dielectric(material_name=fox_layer.name, 

-

236 layer=fox_region, 

-

237 z=fox_z, 

-

238 height=fox_height) 

-

239 

-

240 for metal_layer in self.tech_info.process_metal_layers: 

-

241 metal_layer_name = metal_layer.name 

-

242 metal_layer = metal_layer.metal_layer 

-

243 

-

244 metal_z_bottom = metal_layer.height 

-

245 

-

246 extracted_shapes = self.shapes_of_layer(layer_name=metal_layer_name) 

-

247 

-

248 sidewall_region: Optional[kdb.Region] = None 

-

249 sidewall_height = 0 

-

250 

-

251 no_metal_region: Optional[kdb.Region] = None 

-

252 no_metal_height = 0 

-

253 

-

254 # 

-

255 # add sidewall dielectrics 

-

256 # 

-

257 if extracted_shapes: 

-

258 sidewall_height = 0 

-

259 sidewall_region = extracted_shapes 

-

260 sidewallee = metal_layer_name 

-

261 

-

262 while True: 

-

263 sidewall = self.tech_info.sidewall_dielectric_layer(sidewallee) 

-

264 if not sidewall: 

-

265 break 

-

266 match sidewall.layer_type: 

-

267 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_SIDEWALL_DIELECTRIC: 

-

268 d = math.floor(sidewall.sidewall_dielectric_layer.width_outside_sidewall / self.dbu) 

-

269 sidewall_region = sidewall_region.sized(d) 

-

270 h_delta = sidewall.sidewall_dielectric_layer.height_above_metal or metal_layer.thickness 

-

271 # if h_delta == 0: 

-

272 # h_delta = metal_layer.thickness 

-

273 sidewall_height += h_delta 

-

274 info(f"Sidewall dielectric {sidewall.name}: z={metal_layer.height}, height={sidewall_height}") 

-

275 model_builder.add_dielectric(material_name=sidewall.name, 

-

276 layer=sidewall_region, 

-

277 z=metal_layer.height, 

-

278 height=sidewall_height) 

-

279 

-

280 case process_stack_pb2.ProcessStackInfo.LAYER_TYPE_CONFORMAL_DIELECTRIC: 

-

281 conf_diel = sidewall.conformal_dielectric_layer 

-

282 d = math.floor(conf_diel.thickness_sidewall / self.dbu) 

-

283 sidewall_region = sidewall_region.sized(d) 

-

284 h_delta = metal_layer.thickness + conf_diel.thickness_over_metal 

-

285 sidewall_height += h_delta 

-

286 info(f"Conformal dielectric (sidewall) {sidewall.name}: " 

-

287 f"z={metal_layer.height}, height={sidewall_height}") 

-

288 model_builder.add_dielectric(material_name=sidewall.name, 

-

289 layer=sidewall_region, 

-

290 z=metal_layer.height, 

-

291 height=sidewall_height) 

-

292 if conf_diel.thickness_where_no_metal > 0.0: 

-

293 no_metal_block = enlarged_top_cell_bbox.dup() 

-

294 no_metal_region = kdb.Region() 

-

295 no_metal_region.insert(no_metal_block) 

-

296 no_metal_region -= sidewall_region 

-

297 no_metal_height = conf_diel.thickness_where_no_metal 

-

298 info(f"Conformal dielectric (where no metal) {sidewall.name}: " 

-

299 f"z={metal_layer.height}, height={no_metal_height}") 

-

300 model_builder.add_dielectric(material_name=sidewall.name, 

-

301 layer=no_metal_region, 

-

302 z=metal_layer.height, 

-

303 height=no_metal_height) 

-

304 

-

305 sidewallee = sidewall.name 

-

306 

-

307 # 

-

308 # add simple dielectric 

-

309 # 

-

310 simple_dielectric, diel_height = self.tech_info.simple_dielectric_above_metal(metal_layer_name) 

-

311 if simple_dielectric: 

-

312 diel_block = enlarged_top_cell_bbox.dup() 

-

313 diel_region = kdb.Region() 

-

314 diel_region.insert(diel_block) 

-

315 if sidewall_region: 

-

316 assert sidewall_height >= 0.0 

-

317 diel_region -= sidewall_region 

-

318 info(f"Simple dielectric (sidewall) {simple_dielectric.name}: " 

-

319 f"z={metal_z_bottom + sidewall_height}, height={diel_height - sidewall_height}") 

-

320 model_builder.add_dielectric(material_name=simple_dielectric.name, 

-

321 layer=sidewall_region, 

-

322 z=metal_z_bottom + sidewall_height, 

-

323 height=diel_height - sidewall_height) 

-

324 if no_metal_region: 

-

325 info(f"Simple dielectric (no metal) {simple_dielectric.name}: " 

-

326 f"z={metal_z_bottom + no_metal_height}, height={diel_height - no_metal_height}") 

-

327 model_builder.add_dielectric(material_name=simple_dielectric.name, 

-

328 layer=diel_region, 

-

329 z=metal_z_bottom + no_metal_height, 

-

330 height=diel_height - no_metal_height) 

-

331 else: 

-

332 info(f"Simple dielectric {simple_dielectric.name}: " 

-

333 f"z={metal_z_bottom}, height={diel_height}") 

-

334 model_builder.add_dielectric(material_name=simple_dielectric.name, 

-

335 layer=diel_region, 

-

336 z=metal_z_bottom, 

-

337 height=diel_height) 

-

338 

-

339 gen = model_builder.generate() 

-

340 return gen 

-
- - - diff --git a/pycov/z_c489968eb1a5e358_fastercap_model_generator_py.html b/pycov/z_c489968eb1a5e358_fastercap_model_generator_py.html deleted file mode 100644 index 64675bab..00000000 --- a/pycov/z_c489968eb1a5e358_fastercap_model_generator_py.html +++ /dev/null @@ -1,1136 +0,0 @@ - - - - - Coverage for kpex\fastercap\fastercap_model_generator.py: 83% - - - - - -
-
-

- Coverage for kpex\fastercap\fastercap_model_generator.py: - 83% -

- -

- 658 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24 

-

25# A class providing a service for building FastCap2 or FasterCap models 

-

26# 

-

27# This class is used the following way: 

-

28# 

-

29# 1) Create a FasterCapModelBuilder object 

-

30# Specify the default k value which is the k assumed for "empty space". 

-

31# You can also specify a maximum area and the "b" parameter for the 

-

32# triangulation. The b parameter corresponds to the minimum angle 

-

33# and should be <=1 (b=sin(min_angle)*2). 

-

34# I.e. b=1 -> min_angle=30 deg, b=0.5 -> min_angle~14.5 deg. 

-

35# 

-

36# 2) Add material definitions for the dielectrics 

-

37# Each material definition consists of a k value and 

-

38# a material name. 

-

39# 

-

40# 3) Add layers in the 2.5d view fashion 

-

41# Each layer is a sheet in 3d space that is extruded in vertical 

-

42# direction with the given start and stop z (or height) 

-

43# The layer must be a DRC::Layer or RBA::Region object. 

-

44# 

-

45# Layers can be added in two ways: 

-

46# 

-

47# * As conductors: specify the net name 

-

48# 

-

49# * As dielectric layer: specify the material name 

-

50# 

-

51# The layers can intersect. The package resolves intersections 

-

52# based on priority: conductors first, dielectrics according to 

-

53# their position in the "materials" definition (first entries have 

-

54# higher prio) 

-

55# 

-

56# 4) Generate a 3d model using "generate" 

-

57# This method returns an object you can use to generate STL files 

-

58# or FastCap files. 

-

59 

-

60 

-

61from __future__ import annotations 

-

62 

-

63import base64 

-

64from collections import defaultdict 

-

65import hashlib 

-

66import os 

-

67from typing import * 

-

68from dataclasses import dataclass 

-

69from functools import reduce 

-

70import math 

-

71 

-

72import klayout.db as kdb 

-

73 

-

74from kpex.log import ( 

-

75 debug, 

-

76 info, 

-

77 warning, 

-

78 error 

-

79) 

-

80 

-

81 

-

82@dataclass 

-

83class FasterCapModelBuilder: 

-

84 dbu: float 

-

85 """Database unit""" 

-

86 

-

87 k_void: float 

-

88 """Default dielectric of 'empty space'""" 

-

89 

-

90 delaunay_amax: float 

-

91 """Maximum area parameter for the Delaunay triangulation""" 

-

92 

-

93 delaunay_b: float 

-

94 """ 

-

95 The delaunay_b parameter for the Delaunay triangulation  

-

96 corresponds to the minimum angle 

-

97 and should be <=1 (b=sin(min_angle)*2). 

-

98 I.e. b=1 -> min_angle=30 deg, b=0.5 -> min_angle~14.5 deg. 

-

99 """ 

-

100 

-

101 def __init__(self, 

-

102 dbu: float, 

-

103 k_void: float, 

-

104 delaunay_amax: float = 0.0, 

-

105 delaunay_b: float = 1.0, 

-

106 ): 

-

107 self.dbu = dbu 

-

108 self.k_void = k_void 

-

109 self.delaunay_amax = delaunay_amax 

-

110 self.delaunay_b = delaunay_b 

-

111 

-

112 self.materials: Dict[str, float] = {} 

-

113 self.net_names: List[str] = [] 

-

114 

-

115 # layer, zstart, zstop 

-

116 self.clayers: Dict[str, List[Tuple[kdb.Region, float, float]]] = {} 

-

117 self.dlayers: Dict[str, List[Tuple[kdb.Region, float, float]]] = {} 

-

118 

-

119 info(f"DBU: {'%.12g' % self.dbu}") 

-

120 info(f"Delaunay b: {'%.12g' % self.delaunay_b}") 

-

121 info(f"Delaunay area_max: {'%.12g' % self.delaunay_amax}") 

-

122 

-

123 def add_material(self, name: str, k: float): 

-

124 self.materials[name] = k 

-

125 

-

126 def add_dielectric(self, 

-

127 material_name: str, 

-

128 layer: kdb.Region, 

-

129 z: float, 

-

130 height: float): 

-

131 if hasattr(layer, 'data'): 

-

132 layer = layer.data 

-

133 self._add_layer(name=material_name, layer=layer, is_dielectric=True, z=z, height=height) 

-

134 

-

135 def add_conductor(self, 

-

136 net_name: str, 

-

137 layer: kdb.Region, 

-

138 z: float, 

-

139 height: float): 

-

140 if hasattr(layer, 'data'): 

-

141 layer = layer.data 

-

142 self._add_layer(name=net_name, layer=layer, is_dielectric=False, z=z, height=height) 

-

143 

-

144 def _norm2z(self, z: float) -> float: 

-

145 return z * self.dbu 

-

146 

-

147 def _z2norm(self, z: float) -> float: 

-

148 return math.floor(z / self.dbu + 1e-6) 

-

149 

-

150 def _add_layer(self, 

-

151 name: str, 

-

152 layer: kdb.Region, 

-

153 z: float, 

-

154 height: float, 

-

155 is_dielectric: bool): 

-

156 if is_dielectric and name not in self.materials: 

-

157 raise ValueError(f"Unknown material {name} - did you use 'add_material'?") 

-

158 

-

159 zstart: float = z 

-

160 zstop: float = zstart + height 

-

161 

-

162 if is_dielectric: 

-

163 if name not in self.dlayers: 

-

164 self.dlayers[name] = [] 

-

165 self.dlayers[name].append((layer, self._z2norm(zstart), self._z2norm(zstop))) 

-

166 else: 

-

167 if name not in self.clayers: 

-

168 self.clayers[name] = [] 

-

169 self.clayers[name].append((layer, self._z2norm(zstart), self._z2norm(zstop))) 

-

170 

-

171 def generate(self) -> Optional[FasterCapModelGenerator]: 

-

172 z: List[float] = [] 

-

173 for ll in (self.dlayers, self.clayers): 

-

174 for k, v in ll.items(): 

-

175 for l in v: 

-

176 z.extend((l[1], l[2])) 

-

177 z = sorted([*{*z}]) # sort & uniq 

-

178 if len(z) == 0: 

-

179 return None 

-

180 

-

181 gen = FasterCapModelGenerator(dbu=self.dbu, 

-

182 k_void=self.k_void, 

-

183 delaunay_amax=self.delaunay_amax, 

-

184 delaunay_b=self.delaunay_b, 

-

185 materials=self.materials, 

-

186 net_names=list(self.clayers.keys())) 

-

187 for zcurr in z: 

-

188 gen.next_z(self._norm2z(zcurr)) 

-

189 

-

190 for nn, v in self.clayers.items(): 

-

191 for l in v: 

-

192 if l[1] <= zcurr < l[2]: 

-

193 gen.add_in(name=f"+{nn}", layer=l[0]) 

-

194 if l[1] < zcurr <= l[2]: 

-

195 gen.add_out(name=f"+{nn}", layer=l[0]) 

-

196 for mn, v in self.dlayers.items(): 

-

197 for l in v: 

-

198 if l[1] <= zcurr < l[2]: 

-

199 gen.add_in(name=f"-{mn}", layer=l[0]) 

-

200 if l[1] < zcurr <= l[2]: 

-

201 gen.add_out(name=f"-{mn}", layer=l[0]) 

-

202 

-

203 gen.finish_z() 

-

204 

-

205 gen.finalize() 

-

206 return gen 

-

207 

-

208 

-

209@dataclass(frozen=True) 

-

210class HDielKey: 

-

211 outside: Optional[str] 

-

212 inside: Optional[str] 

-

213 

-

214 def __str__(self) -> str: 

-

215 return f"{self.outside or 'void'} <-> {self.inside or 'void'}" 

-

216 

-

217 @property 

-

218 def topic(self) -> str: 

-

219 return 'dielectric' 

-

220 

-

221 def reversed(self) -> HDielKey: 

-

222 return HDielKey(self.inside, self.outside) 

-

223 

-

224 

-

225@dataclass(frozen=True) 

-

226class HCondKey: 

-

227 net_name: str 

-

228 outside: Optional[str] 

-

229 

-

230 def __str__(self) -> str: 

-

231 return f"{self.outside or 'void'} <-> {self.net_name}" 

-

232 

-

233 @property 

-

234 def topic(self) -> str: 

-

235 return 'conductor' 

-

236 

-

237 

-

238@dataclass(frozen=True) 

-

239class VKey: 

-

240 kk: HDielKey | HCondKey 

-

241 p0: kdb.DPoint 

-

242 de: kdb.DVector 

-

243 

-

244 

-

245@dataclass(frozen=True) 

-

246class Point: 

-

247 x: float 

-

248 y: float 

-

249 z: float 

-

250 

-

251 def __sub__(self, other: Point) -> Point: 

-

252 return Point(self.x - other.x, self.y - other.y, self.z - other.z) 

-

253 

-

254 def sq_length(self) -> float: 

-

255 return self.x**2 + self.y**2 + self.z**2 

-

256 

-

257 def to_fastcap(self) -> str: 

-

258 return '%.12g %.12g %.12g' % (self.x, self.y, self.z) 

-

259 

-

260 

-

261def vector_product(a: Point, b: Point) -> Point: 

-

262 vp = Point( 

-

263 a.y * b.z - a.z * b.y, 

-

264 a.z * b.x - a.x * b.z, 

-

265 a.x * b.y - a.y * b.x 

-

266 ) 

-

267 return vp 

-

268 

-

269 

-

270def dot_product(a: Point, b: Point) -> float: 

-

271 dp = a.x * b.x + a.y * b.y + a.z * b.z 

-

272 return dp 

-

273 

-

274 

-

275@dataclass(frozen=True) 

-

276class Triangle: 

-

277 p0: Point 

-

278 p1: Point 

-

279 p2: Point 

-

280 

-

281 def reversed(self) -> Triangle: 

-

282 return Triangle(self.p2, self.p1, self.p0) 

-

283 

-

284 def outside_reference_point(self) -> Point: 

-

285 v1 = self.p1 - self.p0 

-

286 v2 = self.p2 - self.p0 

-

287 vp = Point(v1.y * v2.z - v1.z * v2.y, 

-

288 -v1.x * v2.z + v1.z * v2.x, 

-

289 v1.x * v2.y - v1.y * v2.x) 

-

290 vp_abs = math.sqrt(vp.x ** 2 + vp.y ** 2 + vp.z ** 2) 

-

291 rp = Point(self.p0.x + vp.x / vp_abs, 

-

292 self.p0.y + vp.y / vp_abs, 

-

293 self.p0.z + vp.z / vp_abs) 

-

294 return rp 

-

295 

-

296 def to_fastcap(self) -> str: 

-

297 return ' '.join([p.to_fastcap() for p in (self.p0, self.p1, self.p2)]) 

-

298 

-

299 def __len__(self): 

-

300 return 3 

-

301 

-

302 def __getitem__(self, i) -> Point: 

-

303 match i: 

-

304 case 0: return self.p0 

-

305 case 1: return self.p1 

-

306 case 2: return self.p2 

-

307 case _: raise IndexError("list index out of range") 

-

308 

-

309 

-

310@dataclass(frozen=True) 

-

311class Edge: 

-

312 p0: Point 

-

313 p1: Point 

-

314 

-

315 def vector_of_edge(self) -> Point: 

-

316 return Point( 

-

317 self.p1.x - self.p0.x, 

-

318 self.p1.y - self.p0.y, 

-

319 self.p1.z - self.p0.z 

-

320 ) 

-

321 

-

322 def reversed(self) -> Edge: 

-

323 return Edge(self.p1, self.p0) 

-

324 

-

325 

-

326@dataclass 

-

327class FasterCapModelGenerator: 

-

328 dbu: float 

-

329 """Database unit""" 

-

330 

-

331 k_void: float 

-

332 """Default dielectric of 'empty space'""" 

-

333 

-

334 delaunay_amax: float 

-

335 """Maximum area parameter for the Delaunay triangulation""" 

-

336 

-

337 delaunay_b: float 

-

338 """ 

-

339 The delaunay_b parameter for the Delaunay triangulation  

-

340 corresponds to the minimum angle 

-

341 and should be <=1 (b=sin(min_angle)*2). 

-

342 I.e. b=1 -> min_angle=30 deg, b=0.5 -> min_angle~14.5 deg. 

-

343 """ 

-

344 

-

345 materials: Dict[str, float] 

-

346 """Maps material name to dielectric k""" 

-

347 

-

348 net_names: List[str] 

-

349 

-

350 def __init__(self, 

-

351 dbu: float, 

-

352 k_void: float, 

-

353 delaunay_amax: float, 

-

354 delaunay_b: float, 

-

355 materials: Dict[str, float], 

-

356 net_names: List[str]): 

-

357 self.k_void = k_void 

-

358 self.delaunay_amax = delaunay_amax 

-

359 self.delaunay_b = delaunay_b 

-

360 self.dbu = dbu 

-

361 self.materials = materials 

-

362 self.net_names = net_names 

-

363 

-

364 self.z: Optional[float] = None 

-

365 self.zz: Optional[float] = None 

-

366 self.layers_in: Dict[str, kdb.Region] = {} 

-

367 self.layers_out: Dict[str, kdb.Region] = {} 

-

368 self.state: Dict[str, kdb.Region] = {} 

-

369 self.current: Dict[str, List[kdb.Region]] = {} 

-

370 self.diel_data: Dict[HDielKey, List[Triangle]] = {} 

-

371 self.diel_vdata: Dict[VKey, kdb.Region] = {} 

-

372 self.cond_data: Dict[HCondKey, List[Triangle]] = {} 

-

373 self.cond_vdata: Dict[VKey, kdb.Region] = {} 

-

374 

-

375 def reset(self): 

-

376 self.layers_in = {} 

-

377 self.layers_out = {} 

-

378 

-

379 def add_in(self, name: str, layer: kdb.Region): 

-

380 debug(f"add_in: {name} -> {layer}") 

-

381 if name not in self.layers_in: 

-

382 self.layers_in[name] = kdb.Region() 

-

383 self.layers_in[name] += layer 

-

384 

-

385 def add_out(self, name: str, layer: kdb.Region): 

-

386 debug(f"add_out: {name} -> {layer}") 

-

387 if name not in self.layers_out: 

-

388 self.layers_out[name] = kdb.Region() 

-

389 self.layers_out[name] += layer 

-

390 

-

391 def finish_z(self): 

-

392 debug(f"Finishing layer z={self.z}") 

-

393 

-

394 din: Dict[str, kdb.Region] = {} 

-

395 dout: Dict[str, kdb.Region] = {} 

-

396 all_in = kdb.Region() 

-

397 all_out = kdb.Region() 

-

398 all = kdb.Region() 

-

399 all_cin: Optional[kdb.Region] = None 

-

400 all_cout: Optional[kdb.Region] = None 

-

401 

-

402 for names, prefix in ((self.net_names, '+'), (self.materials.keys(), '-')): 

-

403 for nn in names: 

-

404 mk = prefix + nn 

-

405 

-

406 # compute merged events 

-

407 if mk not in self.current: 

-

408 self.current[mk] = [] 

-

409 current_before = self.current[mk][0].dup() if len(self.current[mk]) >= 1 else kdb.Region() 

-

410 lin, lout, current = self._merge_events(pyra=self.current[mk], 

-

411 lin=self.layers_in.get(mk, None), 

-

412 lout=self.layers_out.get(mk, None)) 

-

413 debug(f"Merged events & status for {mk}:") 

-

414 debug(f" in = {lin}") 

-

415 debug(f" out = {lout}") 

-

416 debug(f" state = {current}") 

-

417 

-

418 if mk not in self.state: 

-

419 self.state[mk] = kdb.Region() 

-

420 

-

421 # legalize in and out events 

-

422 lin_org = lin.dup() 

-

423 lout_org = lout.dup() 

-

424 lout &= self.state[mk] 

-

425 lin -= all 

-

426 lout += current & all_in 

-

427 lin += current_before & all_out 

-

428 lin -= lout_org 

-

429 lout -= lin_org 

-

430 

-

431 # tracks the legalized horizontal cuts 

-

432 self.state[mk] += lin 

-

433 self.state[mk] -= lout 

-

434 

-

435 din[mk] = lin 

-

436 dout[mk] = lout 

-

437 

-

438 debug(f"Legalized events & status for '{mk}':") 

-

439 debug(f" in = {din[mk]}") 

-

440 debug(f" out = {dout[mk]}") 

-

441 debug(f" state = {self.state[mk]}") 

-

442 

-

443 all_in += lin 

-

444 all_out += lout 

-

445 all += self.state[mk] 

-

446 

-

447 if prefix == '+': 

-

448 all_cin = all_in.dup() 

-

449 all_cout = all_out.dup() 

-

450 

-

451 debug(f"All conductor region in: {all_cin}") 

-

452 debug(f"All conductor region out: {all_cout}") 

-

453 

-

454 # check whether states are separated 

-

455 a = reduce(lambda x, y: x+y, self.state.values()) 

-

456 for k, s in self.state.items(): 

-

457 r: kdb.Region = s - a 

-

458 if not r.is_empty(): 

-

459 error(f"State region of {k} ({s}) is not contained entirely " 

-

460 f"in remaining all state region ({a}) - this means there is an overlap") 

-

461 a -= s 

-

462 

-

463 # Now we have legalized the in and out events 

-

464 for mni in self.materials.keys(): 

-

465 lin = din.get(f"-{mni}", None) 

-

466 if lin: 

-

467 lin = lin.dup() 

-

468 lin -= all_cout # handled with the conductor 

-

469 for mno in self.materials.keys(): 

-

470 lout = dout.get(f"-{mno}", None) 

-

471 if lout: 

-

472 d: kdb.Region = lout & lin 

-

473 if not d.is_empty(): 

-

474 self.generate_hdiel(below=mno, above=mni, layer=d) 

-

475 lin -= lout 

-

476 if not lin.is_empty(): 

-

477 self.generate_hdiel(below=None, above=mni, layer=lin) 

-

478 

-

479 for mno in self.materials.keys(): 

-

480 lout = dout.get(f"-{mno}", None) 

-

481 if lout: 

-

482 lout = lout.dup() 

-

483 lout -= all_cin # handled with the conductor 

-

484 for mni in self.materials.keys(): 

-

485 lin = din.get(f"-{mni}", None) 

-

486 if lin: 

-

487 lout -= lin 

-

488 if not lout.is_empty(): 

-

489 self.generate_hdiel(below=mno, above=None, layer=lout) 

-

490 

-

491 for nn in self.net_names: 

-

492 lin = din.get(f"+{nn}", None) 

-

493 if lin: 

-

494 lin = lin.dup() 

-

495 for mno in self.materials.keys(): 

-

496 lout = dout.get(f"-{mno}", None) 

-

497 if lout: 

-

498 d = lout & lin 

-

499 if not d.is_empty(): 

-

500 self.generate_hcond_in(net_name=nn, below=mno, layer=d) 

-

501 lin -= lout 

-

502 if not lin.is_empty(): 

-

503 self.generate_hcond_in(net_name=nn, below=None, layer=lin) 

-

504 

-

505 for nn in self.net_names: 

-

506 lout = dout.get(f"+{nn}", None) 

-

507 if lout: 

-

508 lout = lout.dup() 

-

509 lout -= all_cin # handled with the conductor 

-

510 for mni in self.materials.keys(): 

-

511 lin = din.get(f"-{mni}", None) 

-

512 if lin: 

-

513 d = lout & lin 

-

514 if not d.is_empty(): 

-

515 self.generate_hcond_out(net_name=nn, above=mni, layer=d) 

-

516 lout -= lin 

-

517 if not lout.is_empty(): 

-

518 self.generate_hcond_out(net_name=nn, above=None, layer=lout) 

-

519 

-

520 def next_z(self, z: float): 

-

521 debug(f"Next layer {z}") 

-

522 

-

523 self.reset() 

-

524 

-

525 if self.z is None: 

-

526 self.z = z 

-

527 return 

-

528 

-

529 self.zz = z 

-

530 

-

531 all_cond = kdb.Region() 

-

532 for nn in self.net_names: 

-

533 mk = f"+{nn}" 

-

534 if mk in self.state: 

-

535 all_cond += self.state[mk] 

-

536 all_cond = all_cond.edges() 

-

537 

-

538 for i, mni in enumerate(self.materials): 

-

539 linside = self.state.get(f"-{mni}", None) 

-

540 if linside: 

-

541 linside = linside.edges() 

-

542 linside -= all_cond # handled with the conductor 

-

543 for o, mno in enumerate(self.materials): 

-

544 if i != o: 

-

545 loutside = self.state.get(f"-{mno}", None) 

-

546 if loutside: 

-

547 loutside = loutside.edges() 

-

548 if o > i: 

-

549 d = loutside & linside 

-

550 for e in d: 

-

551 # NOTE: we need to swap points as we started from "outside" 

-

552 self.generate_vdiel(outside=mno, inside=mni, edge=e.swapped_points()) 

-

553 linside -= loutside 

-

554 

-

555 for e in linside: 

-

556 self.generate_vdiel(outside=None, inside=mni, edge=e) 

-

557 

-

558 for nn in self.net_names: 

-

559 mk = f"+{nn}" 

-

560 linside = self.state.get(mk, None) 

-

561 if linside: 

-

562 linside = linside.edges() 

-

563 for mno in self.materials: 

-

564 loutside = self.state.get(f"-{mno}", None) 

-

565 if loutside: 

-

566 loutside = loutside.edges() 

-

567 d = loutside & linside 

-

568 for e in d: 

-

569 # NOTE: we need to swap points as we started from "outside" 

-

570 self.generate_vcond(net_name=nn, outside=mno, edge=e.swapped_points()) 

-

571 linside -= loutside 

-

572 for e in linside: 

-

573 self.generate_vcond(net_name=nn, outside=None, edge=e) 

-

574 

-

575 self.z = z 

-

576 

-

577 def generate_hdiel(self, 

-

578 below: Optional[str], 

-

579 above: Optional[str], 

-

580 layer: kdb.Region): 

-

581 k = HDielKey(below, above) 

-

582 debug(f"Generating horizontal dielectric surface {k} as {layer}") 

-

583 if k not in self.diel_data: 

-

584 self.diel_data[k] = [] 

-

585 data = self.diel_data[k] 

-

586 

-

587 for t in layer.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

588 # NOTE: normal is facing downwards (to "below") 

-

589 pl = list(map(lambda pt: Point(pt.x * self.dbu, pt.y * self.dbu, self.z), 

-

590 t.each_point_hull())) 

-

591 tri = Triangle(*pl) 

-

592 data.append(tri) 

-

593 debug(f" {tri}") 

-

594 

-

595 def generate_v_surface(self, 

-

596 kk: HDielKey | HCondKey, 

-

597 edge: kdb.Edge) -> Tuple[VKey, kdb.Box]: 

-

598 debug(f"Generating vertical {kk.topic} surface {kk} with edge {edge}") 

-

599 

-

600 el = math.sqrt(edge.sq_length()) 

-

601 de = kdb.DVector(edge.d().x / el, edge.d().y / el) 

-

602 ne = kdb.DVector(edge.d().y / el, -edge.d().x / el) 

-

603 p0 = ne * ne.sprod(kdb.DPoint(edge.p1) - kdb.DPoint()) + kdb.DPoint() 

-

604 x1 = (edge.p1 - p0).sprod(de) 

-

605 x2 = (edge.p2 - p0).sprod(de) 

-

606 

-

607 key = VKey(kk, p0, de) 

-

608 surface = kdb.Box(x1, 

-

609 math.floor(self.z / self.dbu + 0.5), 

-

610 x2, 

-

611 math.floor(self.zz / self.dbu + 0.5)) 

-

612 return key, surface 

-

613 

-

614 def generate_vdiel(self, 

-

615 outside: Optional[str], 

-

616 inside: Optional[str], 

-

617 edge: kdb.Edge): 

-

618 if edge.is_degenerate(): 

-

619 return 

-

620 

-

621 key, surface = self.generate_v_surface(HDielKey(outside, inside), edge) 

-

622 if key not in self.diel_vdata: 

-

623 self.diel_vdata[key] = kdb.Region() 

-

624 

-

625 self.diel_vdata[key].insert(surface) 

-

626 

-

627 def generate_hcond_in(self, 

-

628 net_name: str, 

-

629 below: Optional[str], 

-

630 layer: kdb.Region): 

-

631 k = HCondKey(net_name, below) 

-

632 debug(f"Generating horizontal bottom conductor surface {k} as {layer}") 

-

633 

-

634 if k not in self.cond_data: 

-

635 self.cond_data[k] = [] 

-

636 data = self.cond_data[k] 

-

637 

-

638 for t in layer.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

639 # NOTE: normal is facing downwards (to "below") 

-

640 pl = list(map(lambda pt: Point(pt.x * self.dbu, pt.y * self.dbu, self.z), 

-

641 t.each_point_hull())) 

-

642 tri = Triangle(*pl) 

-

643 data.append(tri) 

-

644 debug(f" {tri}") 

-

645 

-

646 def generate_hcond_out(self, 

-

647 net_name: str, 

-

648 above: Optional[str], 

-

649 layer: kdb.Region): 

-

650 k = HCondKey(net_name, above) 

-

651 debug(f"Generating horizontal top conductor surface {k} as {layer}") 

-

652 

-

653 if k not in self.cond_data: 

-

654 self.cond_data[k] = [] 

-

655 data = self.cond_data[k] 

-

656 

-

657 for t in layer.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

658 # NOTE: normal is facing downwards (into conductor) 

-

659 pl = list(map(lambda pt: Point(pt.x * self.dbu, pt.y * self.dbu, self.z), 

-

660 t.each_point_hull())) 

-

661 tri = Triangle(*pl) 

-

662 # now it is facing outside (to "above") 

-

663 tri = tri.reversed() 

-

664 data.append(tri) 

-

665 debug(f" {tri}") 

-

666 

-

667 def generate_vcond(self, 

-

668 net_name: str, 

-

669 outside: Optional[str], 

-

670 edge: kdb.Edge): 

-

671 if edge.is_degenerate(): 

-

672 return 

-

673 

-

674 key, surface = self.generate_v_surface(HCondKey(net_name, outside), edge) 

-

675 if key not in self.cond_vdata: 

-

676 self.cond_vdata[key] = kdb.Region() 

-

677 

-

678 self.cond_vdata[key].insert(surface) 

-

679 

-

680 def triangulate(self, p0: kdb.DPoint, de: kdb.DVector, region: kdb.Region, data: List[Triangle]): 

-

681 def convert_point(pt: kdb.Point) -> Point: 

-

682 pxy = (p0 + de * pt.x) * self.dbu 

-

683 pz = pt.y * self.dbu 

-

684 return Point(pxy.x, pxy.y, pz) 

-

685 

-

686 for t in region.delaunay(self.delaunay_amax / self.dbu ** 2, self.delaunay_b): 

-

687 # NOTE: normal is facing outwards (to "left") 

-

688 pl = list(map(convert_point, t.each_point_hull())) 

-

689 tri = Triangle(*pl) 

-

690 # now it is facing outside (to "above") 

-

691 data.append(tri) 

-

692 debug(f" {tri}") 

-

693 

-

694 def finalize(self): 

-

695 for k, r in self.diel_vdata.items(): 

-

696 debug(f"Finishing vertical dielectric plane {k.kk} at {k.p0}/{k.de}") 

-

697 

-

698 if k.kk not in self.diel_data: 

-

699 self.diel_data[k.kk] = [] 

-

700 data = self.diel_data[k.kk] 

-

701 

-

702 self.triangulate(p0=k.p0, de=k.de, region=r, data=data) 

-

703 

-

704 for k, r in self.cond_vdata.items(): 

-

705 debug(f"Finishing vertical conductor plane {k.kk} at {k.p0} / {k.de}") 

-

706 

-

707 if k.kk not in self.cond_data: 

-

708 self.cond_data[k.kk] = [] 

-

709 data = self.cond_data[k.kk] 

-

710 

-

711 self.triangulate(p0=k.p0, de=k.de, region=r, data=data) 

-

712 

-

713 dk: Dict[HDielKey, List[Triangle]] = {} 

-

714 

-

715 for k in self.diel_data.keys(): 

-

716 kk = k.reversed() 

-

717 if kk not in dk: 

-

718 dk[k] = [] 

-

719 else: 

-

720 debug(f"Combining dielectric surfaces {kk} with reverse") 

-

721 

-

722 for k, v in self.diel_data.items(): 

-

723 kk = k.reversed() 

-

724 if kk in dk: 

-

725 dk[kk] += list(map(lambda t: t.reversed(), v)) 

-

726 else: 

-

727 dk[k] += v 

-

728 

-

729 self.diel_data = dk 

-

730 

-

731 def write_fastcap(self, output_dir_path: str, prefix: str) -> str: 

-

732 max_filename_length = os.pathconf(output_dir_path, 'PC_NAME_MAX') 

-

733 

-

734 lst_fn = os.path.join(output_dir_path, f"{prefix}.lst") 

-

735 file_num = 0 

-

736 lst_file: List[str] = [f"* k_void={'%.12g' % self.k_void}"] 

-

737 

-

738 for k, data in self.diel_data.items(): 

-

739 if len(data) == 0: 

-

740 continue 

-

741 

-

742 file_num += 1 

-

743 

-

744 k_outside = self.materials[k.outside] if k.outside else self.k_void 

-

745 k_inside = self.materials[k.inside] if k.inside else self.k_void 

-

746 

-

747 # lst_file.append(f"* Dielectric interface: outside={outside}, inside={inside}") 

-

748 

-

749 fn = f"{prefix}{file_num}_outside={k.outside or '(void)'}_inside={k.inside or '(void)'}.geo" 

-

750 output_path = os.path.join(output_dir_path, fn) 

-

751 self._write_fastercap_geo(output_path=output_path, 

-

752 data=data, 

-

753 cond_name=None, 

-

754 cond_number=file_num, 

-

755 rename_conductor=False) 

-

756 

-

757 # NOTE: for now, we compute the reference points for each triangle 

-

758 # This is a FasterCap feature, reference point in the *.geo file (end of each T line) 

-

759 rp_s = "0 0 0" 

-

760 lst_file.append(f"D {fn} {'%.12g' % k_outside} {'%.12g' % k_inside} 0 0 0 {rp_s}") 

-

761 

-

762 # 

-

763 # Feedback from FastFieldSolvers: 

-

764 # 

-

765 # - using the '+' trailing statements (conductor collation), 

-

766 # only the same conductor should be collated 

-

767 # 

-

768 # - renaming different conductor numbers ('N' rule line) is not allowed (currently a bug) 

-

769 # - Example: 1->VDD (1.geo) and 2->VDD (2.geo) is not possible 

-

770 # - Both conductor *.geo files should have the same number 

-

771 # - only the last conductor *.geo file should contain the 'N' rule 

-

772 # 

-

773 # - reference points 

-

774 # 

-

775 cond_data_grouped_by_net = defaultdict(list) 

-

776 for k, data in self.cond_data.items(): 

-

777 if len(data) == 0: 

-

778 continue 

-

779 cond_data_grouped_by_net[k.net_name].append((k.outside, data)) 

-

780 

-

781 cond_num = file_num 

-

782 

-

783 for nn, cond_list in cond_data_grouped_by_net.items(): 

-

784 cond_num += 1 

-

785 last_cond_index = len(cond_list) - 1 

-

786 for idx, (outside, data) in enumerate(cond_list): 

-

787 file_num += 1 

-

788 k_outside = self.materials[outside] if outside else self.k_void 

-

789 

-

790 outside = outside or '(void)' 

-

791 # lst_file.append(f"* Conductor interface: outside={outside}, net={nn}") 

-

792 fn = f"{prefix}{file_num}_outside={outside}_net={nn}.geo" 

-

793 if len(fn) > max_filename_length: 

-

794 warning(f"Unusual long net name detected: {nn}") 

-

795 d = hashlib.md5(nn.encode('utf-8')).digest() 

-

796 h = base64.urlsafe_b64encode(d).decode('utf-8').rstrip('=') 

-

797 remaining_len = len(f"{prefix}_{file_num}_outside={outside}_net=.geo") 

-

798 short_nn = nn[0: (max_filename_length - remaining_len - len(h) - 1)] + f"_{h}" 

-

799 fn = f"{prefix}{file_num}_outside={outside}_net={short_nn}.geo" 

-

800 output_path = os.path.join(output_dir_path, fn) 

-

801 self._write_fastercap_geo(output_path=output_path, 

-

802 data=data, 

-

803 cond_number=cond_num, 

-

804 cond_name=nn, 

-

805 rename_conductor=(idx == last_cond_index)) 

-

806 collation_operator = '' if idx == last_cond_index else ' +' 

-

807 lst_file.append(f"C {fn} {'%.12g' % k_outside} 0 0 0{collation_operator}") 

-

808 

-

809 info(f"Writing FasterCap list file: {lst_fn}") 

-

810 with open(lst_fn, "w") as f: 

-

811 f.write('\n'.join(lst_file)) 

-

812 f.write('\n') 

-

813 

-

814 return lst_fn 

-

815 

-

816 @staticmethod 

-

817 def _write_fastercap_geo(output_path: str, 

-

818 data: List[Triangle], 

-

819 cond_number: int, 

-

820 cond_name: Optional[str], 

-

821 rename_conductor: bool): 

-

822 info(f"Writing FasterCap geo file: {output_path}") 

-

823 with open(output_path, "w") as f: 

-

824 f.write(f"0 GEO File\n") 

-

825 for t in data: 

-

826 f.write(f"T {cond_number}") 

-

827 f.write(' ' + t.to_fastcap()) 

-

828 

-

829 # compute a reference point "outside" 

-

830 rp = t.outside_reference_point() 

-

831 rp_s = rp.to_fastcap() 

-

832 

-

833 f.write(f" {rp_s}\n") 

-

834 if cond_name and rename_conductor: 

-

835 f.write(f"N {cond_number} {cond_name}\n") 

-

836 

-

837 def check(self): 

-

838 info("Checking …") 

-

839 errors = 0 

-

840 

-

841 for mn in self.materials.keys(): 

-

842 tris = self._collect_diel_tris(mn) 

-

843 info(f"Material {mn} -> {len(tris)} triangles") 

-

844 errors += self._check_tris(f"Material '{mn}'", tris) 

-

845 

-

846 for nn in self.net_names: 

-

847 tris = self._collect_cond_tris(nn) 

-

848 info(f"Net '{nn}' -> {len(tris)} triangles") 

-

849 errors += self._check_tris(f"Net '{nn}'", tris) 

-

850 

-

851 if errors == 0: 

-

852 info(" No errors found") 

-

853 else: 

-

854 info(f" {errors} error{'s' if errors >= 2 else ''} found") 

-

855 

-

856 def _check_tris(self, msg: str, triangles: List[Triangle]) -> int: 

-

857 errors = 0 

-

858 

-

859 edge_set: Set[Edge] = set() 

-

860 edges = self._normed_edges(triangles) 

-

861 

-

862 for e in edges: 

-

863 if e in edge_set: 

-

864 error(f"{msg}: duplicate edge {self._edge2s(e)}") 

-

865 errors += 1 

-

866 else: 

-

867 edge_set.add(e) 

-

868 

-

869 self._split_edges(edge_set) 

-

870 

-

871 for e in edge_set: 

-

872 if e.reversed() not in edge_set: 

-

873 error(f"{msg}: edge {self._edge2s(e)} not connected with reverse edge (open surface)") 

-

874 errors += 1 

-

875 

-

876 return errors 

-

877 

-

878 def _normed_edges(self, triangles: List[Triangle]) -> List[Edge]: 

-

879 edges = [] 

-

880 

-

881 def normed_dbu(p: Point): 

-

882 return Point(*tuple(map(lambda c: math.floor(c / self.dbu + 0.5), 

-

883 (p.x, p.y, p.z)))) 

-

884 

-

885 for t in triangles: 

-

886 for i in range(0, 3): 

-

887 p1 = normed_dbu(t[i]) 

-

888 p2 = normed_dbu(t[(i + 1) % 3]) 

-

889 edges.append(Edge(p1, p2)) 

-

890 

-

891 return edges 

-

892 

-

893 def _point2s(self, p: Point) -> str: 

-

894 return f"(%.12g, %.12g, %.12g)" % (p.x * self.dbu, p.y * self.dbu, p.z * self.dbu) 

-

895 

-

896 def _edge2s(self, e: Edge) -> str: 

-

897 return f"{self._point2s(e.p0)}-{self._point2s(e.p1)}" 

-

898 

-

899 @staticmethod 

-

900 def _is_antiparallel(a: Point, 

-

901 b: Point) -> bool: 

-

902 vp = vector_product(a, b) 

-

903 if abs(vp.sq_length()) > 0.5: # we got normalized! 

-

904 return False 

-

905 

-

906 sp = dot_product(a, b) 

-

907 return sp < 0 

-

908 

-

909 def _split_edges(self, edges: set[Edge]): 

-

910 edges_by_p2: DefaultDict[Point, List[Edge]] = defaultdict(list) 

-

911 edges_by_p1: DefaultDict[Point, List[Edge]] = defaultdict(list) 

-

912 for e in edges: 

-

913 edges_by_p2[e.p1].append(e) 

-

914 edges_by_p1[e.p0].append(e) 

-

915 

-

916 i = 0 

-

917 while True: 

-

918 i += 1 

-

919 subst: DefaultDict[Edge, List[Edge]] = defaultdict(list) 

-

920 

-

921 for e in edges: 

-

922 ee = edges_by_p2.get(e.p0, []) 

-

923 for eee in ee: 

-

924 ve = e.vector_of_edge() 

-

925 veee = eee.vector_of_edge() 

-

926 if self._is_antiparallel(ve, veee) and \ 

-

927 (veee.sq_length() < ve.sq_length() - 0.5): 

-

928 # There is a shorter edge antiparallel -> 

-

929 # this means we need to insert a split point into e 

-

930 subst[e] += [Edge(e.p0, eee.p0), Edge(eee.p0, e.p1)] 

-

931 

-

932 for e in edges: 

-

933 ee = edges_by_p1.get(e.p1, []) 

-

934 for eee in ee: 

-

935 ve = e.vector_of_edge() 

-

936 veee = eee.vector_of_edge() 

-

937 if self._is_antiparallel(ve, veee) and \ 

-

938 (veee.sq_length() < ve.sq_length() - 0.5): 

-

939 # There is a shorter edge antiparallel -> 

-

940 # this means we need to insert a split point into e 

-

941 subst[e] += [Edge(e.p0, eee.p1), Edge(eee.p1, e.p1)] 

-

942 

-

943 if len(subst) == 0: 

-

944 break 

-

945 

-

946 for e, replacement in subst.items(): 

-

947 edges_by_p1[e.p0].remove(e) 

-

948 edges_by_p2[e.p1].remove(e) 

-

949 edges.remove(e) 

-

950 for r in replacement: 

-

951 edges.add(r) 

-

952 edges_by_p1[r.p0].append(r) 

-

953 edges_by_p2[r.p1].append(r) 

-

954 

-

955 def dump_stl(self, output_dir_path: str, prefix: str): 

-

956 for mn in self.materials.keys(): 

-

957 tris = self._collect_diel_tris(mn) 

-

958 output_path = os.path.join(output_dir_path, f"{prefix}diel_{mn}.stl") 

-

959 self._write_as_stl(output_path, tris) 

-

960 

-

961 for nn in self.net_names: 

-

962 tris = self._collect_cond_tris(nn) 

-

963 output_path = os.path.join(output_dir_path, f"{prefix}cond_{nn}.stl") 

-

964 self._write_as_stl(output_path, tris) 

-

965 

-

966 @staticmethod 

-

967 def _write_as_stl(file_name: str, 

-

968 tris: List[Triangle]): 

-

969 if len(tris) == 0: 

-

970 return 

-

971 

-

972 info(f"Writing STL file {file_name}") 

-

973 with open(file_name, "w") as f: 

-

974 f.write("solid stl\n") 

-

975 for t in tris: 

-

976 f.write(" facet normal 0 0 0\n") 

-

977 f.write(" outer loop\n") 

-

978 t = t.reversed() 

-

979 for p in (t.p0, t.p1, t.p2): 

-

980 f.write(f" vertex {p.to_fastcap()}\n") 

-

981 f.write(" endloop\n") 

-

982 f.write(" endfacet\n") 

-

983 f.write("endsolid stl\n") 

-

984 

-

985 @staticmethod 

-

986 def _merge_events(pyra: List[Optional[kdb.Region]], 

-

987 lin: Optional[kdb.Region], 

-

988 lout: Optional[kdb.Region]) -> Tuple[kdb.Region, kdb.Region, kdb.Region]: 

-

989 lin = lin.dup() if lin else kdb.Region() 

-

990 lout = lout.dup() if lout else kdb.Region() 

-

991 past = pyra[0].dup() if len(pyra) >= 1 else kdb.Region() 

-

992 

-

993 for i in range(0, len(pyra)): 

-

994 ii = len(pyra) - i 

-

995 added: kdb.Region = lin & pyra[ii - 1] 

-

996 if not added.is_empty(): 

-

997 if ii >= len(pyra): 

-

998 pyra.append(kdb.Region()) 

-

999 assert len(pyra) == ii + 1 

-

1000 pyra[ii] += added 

-

1001 lin -= added 

-

1002 

-

1003 if len(pyra) == 0: 

-

1004 pyra.append(kdb.Region()) 

-

1005 pyra[0] += lin 

-

1006 

-

1007 for i in range(0, len(pyra)): 

-

1008 ii = len(pyra) - i 

-

1009 removed: kdb.Region = lout & pyra[ii - 1] 

-

1010 if not removed.is_empty(): 

-

1011 pyra[ii - 1] -= removed 

-

1012 lout -= removed 

-

1013 

-

1014 # compute merged events 

-

1015 lin = pyra[0] - past 

-

1016 lout = past - pyra[0] 

-

1017 return lin, lout, pyra[0] 

-

1018 

-

1019 def _collect_diel_tris(self, material_name: str) -> List[Triangle]: 

-

1020 tris = [] 

-

1021 

-

1022 for k, v in self.diel_data.items(): 

-

1023 if material_name == k.outside: 

-

1024 tris += v 

-

1025 elif material_name == k.inside: 

-

1026 tris += [t.reversed() for t in v] 

-

1027 

-

1028 for k, v in self.cond_data.items(): 

-

1029 if material_name == k.outside: 

-

1030 tris += v 

-

1031 

-

1032 return tris 

-

1033 

-

1034 def _collect_cond_tris(self, net_name: str) -> List[Triangle]: 

-

1035 tris = [] 

-

1036 for k, v in self.cond_data.items(): 

-

1037 if k.net_name == net_name: 

-

1038 tris += [t.reversed() for t in v] 

-

1039 return tris 

-
- - - diff --git a/pycov/z_c489968eb1a5e358_fastercap_runner_py.html b/pycov/z_c489968eb1a5e358_fastercap_runner_py.html deleted file mode 100644 index 924cfb48..00000000 --- a/pycov/z_c489968eb1a5e358_fastercap_runner_py.html +++ /dev/null @@ -1,225 +0,0 @@ - - - - - Coverage for kpex\fastercap\fastercap_runner.py: 48% - - - - - -
-
-

- Coverage for kpex\fastercap\fastercap_runner.py: - 48% -

- -

- 56 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import re 

-

25import subprocess 

-

26import time 

-

27from typing import * 

-

28 

-

29from kpex.log import ( 

-

30 info, 

-

31 # warning, 

-

32 rule, 

-

33 subproc, 

-

34) 

-

35from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

36 

-

37 

-

38def run_fastercap(exe_path: str, 

-

39 lst_file_path: str, 

-

40 log_path: str, 

-

41 tolerance: float, 

-

42 d_coeff: float, 

-

43 mesh_refinement_value: float, 

-

44 ooc_condition: Optional[int], 

-

45 auto_preconditioner: bool, 

-

46 galerkin_scheme: bool, 

-

47 jacobi_preconditioner: bool): 

-

48 args = [ 

-

49 exe_path, 

-

50 '-b', # console mode, without GUI 

-

51 '-i', # Dump detailed time and memory information 

-

52 '-v', # Verbose output 

-

53 f"-a{tolerance}", # stop when relative error lower than threshold 

-

54 f"-d{d_coeff}", # Direct potential interaction coefficient to mesh refinement ratio 

-

55 f"-m{mesh_refinement_value}", # Mesh relative refinement value 

-

56 ] 

-

57 

-

58 if ooc_condition is not None: 

-

59 args += [f"-f{ooc_condition}"] 

-

60 

-

61 if auto_preconditioner: 

-

62 args += ['-ap'] 

-

63 

-

64 if galerkin_scheme: 

-

65 args += ['-g'] 

-

66 

-

67 if jacobi_preconditioner: 

-

68 args += ['-pj'] 

-

69 

-

70 args += [ 

-

71 lst_file_path 

-

72 ] 

-

73 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

74 

-

75 rule() 

-

76 start = time.time() 

-

77 

-

78 proc = subprocess.Popen(args, 

-

79 stdin=subprocess.DEVNULL, 

-

80 stdout=subprocess.PIPE, 

-

81 stderr=subprocess.STDOUT, 

-

82 universal_newlines=True, 

-

83 text=True) 

-

84 with open(log_path, 'w') as f: 

-

85 while True: 

-

86 line = proc.stdout.readline() 

-

87 if not line: 

-

88 break 

-

89 subproc(line[:-1]) # remove newline 

-

90 f.writelines([line]) 

-

91 proc.wait() 

-

92 

-

93 duration = time.time() - start 

-

94 

-

95 rule() 

-

96 

-

97 if proc.returncode == 0: 

-

98 info(f"FasterCap succeeded after {'%.4g' % duration}s") 

-

99 else: 

-

100 raise Exception(f"FasterCap failed with status code {proc.returncode} after {'%.4g' % duration}s", 

-

101 f"see log file: {log_path}") 

-

102 

-

103 

-

104def fastercap_parse_capacitance_matrix(log_path: str) -> CapacitanceMatrix: 

-

105 with open(log_path, 'r') as f: 

-

106 rlines = f.readlines() 

-

107 rlines.reverse() 

-

108 

-

109 # multiple iterations possible, find the last matrix 

-

110 for idx, line in enumerate(rlines): 

-

111 if line.strip() == "Capacitance matrix is:": 

-

112 m = re.match(r'^Dimension (\d+) x (\d+)$', rlines[idx-1]) 

-

113 if not m: 

-

114 raise Exception(f"Could not parse capacitor matrix dimensions") 

-

115 dim = int(m.group(1)) 

-

116 conductor_names: List[str] = [] 

-

117 rows: List[List[float]] = [] 

-

118 for i in reversed(range(idx-1-dim, idx-1)): 

-

119 line = rlines[i].strip() 

-

120 cells = [cell.strip() for cell in line.split(' ')] 

-

121 cells = list(filter(lambda c: len(c) >= 1, cells)) 

-

122 conductor_names.append(cells[0]) 

-

123 row = [float(cell)/1e6 for cell in cells[1:]] 

-

124 rows.append(row) 

-

125 cm = CapacitanceMatrix(conductor_names=conductor_names, rows=rows) 

-

126 return cm 

-

127 

-

128 raise Exception(f"Could not extract capacitance matrix from FasterCap log file {log_path}") 

-
- - - diff --git a/pycov/z_e404b588faff9084_fastcap_runner_py.html b/pycov/z_e404b588faff9084_fastcap_runner_py.html deleted file mode 100644 index 21b63f48..00000000 --- a/pycov/z_e404b588faff9084_fastcap_runner_py.html +++ /dev/null @@ -1,239 +0,0 @@ - - - - - Coverage for kpex/fastcap/fastcap_runner.py: 55% - - - - - -
-
-

- Coverage for kpex/fastcap/fastcap_runner.py: - 55% -

- -

- 62 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:36 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import re 

-

25import time 

-

26from typing import * 

-

27 

-

28import os 

-

29import subprocess 

-

30 

-

31from kpex.log import ( 

-

32 debug, 

-

33 info, 

-

34 warning, 

-

35 error, 

-

36 rule, 

-

37 subproc, 

-

38) 

-

39from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

40 

-

41 

-

42def run_fastcap(exe_path: str, 

-

43 lst_file_path: str, 

-

44 log_path: str, 

-

45 expansion_order: float = 2, 

-

46 partitioning_depth: str = 'auto', 

-

47 permittivity_factor: float = 1.0, 

-

48 iterative_tolerance: float = 0.01): 

-

49 work_dir = os.path.dirname(lst_file_path) 

-

50 

-

51 # we have to chdir into the directory containing the lst file, 

-

52 # so make all paths absolute, and the lst_file relative 

-

53 log_path = os.path.abspath(log_path) 

-

54 lst_file_path = os.path.basename(lst_file_path) 

-

55 

-

56 info(f"Chdir to {work_dir}") 

-

57 os.chdir(work_dir) 

-

58 args = [ 

-

59 exe_path, 

-

60 f"-o{expansion_order}", 

-

61 ] 

-

62 

-

63 if partitioning_depth != 'auto': 

-

64 args += [ 

-

65 f"-d{partitioning_depth}", 

-

66 ] 

-

67 

-

68 args += [ 

-

69 f"-p{permittivity_factor}", 

-

70 f"-t{iterative_tolerance}", 

-

71 f"-l{lst_file_path}", 

-

72 ] 

-

73 

-

74 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

75 

-

76 rule() 

-

77 start = time.time() 

-

78 

-

79 proc = subprocess.Popen(args, 

-

80 stdin=subprocess.DEVNULL, 

-

81 stdout=subprocess.PIPE, 

-

82 stderr=subprocess.STDOUT, 

-

83 universal_newlines=True, 

-

84 text=True) 

-

85 with open(log_path, 'w') as f: 

-

86 while True: 

-

87 line = proc.stdout.readline() 

-

88 if not line: 

-

89 break 

-

90 subproc(line[:-1]) # remove newline 

-

91 f.writelines([line]) 

-

92 proc.wait() 

-

93 

-

94 duration = time.time() - start 

-

95 

-

96 rule() 

-

97 

-

98 if proc.returncode == 0: 

-

99 info(f"FastCap2 succeeded after {'%.4g' % duration}s") 

-

100 else: 

-

101 raise Exception(f"FastCap2 failed with status code {proc.returncode} after {'%.4g' % duration}s", 

-

102 f"see log file: {log_path}") 

-

103 

-

104 

-

105# CAPACITANCE MATRIX, picofarads 

-

106# 1 2 3 4 

-

107# $1%GROUP2 1 7850 -7277 -2115 54.97 

-

108# $1%GROUP2 2 -7277 3.778e+05 130.9 -3.682e+05 

-

109# $2%GROUP3 3 -2115 130.9 6792 -5388 

-

110# $2%GROUP3 4 54.97 -3.682e+05 -5388 3.753e+05 

-

111def fastcap_parse_capacitance_matrix(log_path: str) -> CapacitanceMatrix: 

-

112 with open(log_path, 'r') as f: 

-

113 rlines = f.readlines() 

-

114 rlines.reverse() 

-

115 

-

116 # multiple iterations possible, find the last matrix 

-

117 for idx, line in enumerate(rlines): 

-

118 if line.startswith('CAPACITANCE MATRIX, '): 

-

119 section_m = re.match(r'CAPACITANCE MATRIX, (\w+)', line) 

-

120 if not section_m: 

-

121 raise Exception(f"Could not parse capacitor unit") 

-

122 unit_str = section_m.group(1) 

-

123 

-

124 dimension_line = rlines[idx-1].strip() 

-

125 dimensions = dimension_line.split() # remove whitespace 

-

126 dim = len(dimensions) 

-

127 conductor_names: List[str] = [] 

-

128 rows: List[List[float]] = [] 

-

129 for i in reversed(range(idx-1-dim, idx-1)): 

-

130 line = rlines[i].strip() 

-

131 cells = [cell.strip() for cell in line.split(' ')] 

-

132 if cells[1] != str(i): 

-

133 warning(f"Expected capacitor matrix row to have index {i}, but obtained {cells[1]}") 

-

134 cells.pop(1) 

-

135 cells = list(filter(lambda c: len(c) >= 1, cells)) 

-

136 conductor_names.append(cells[0]) 

-

137 row = [float(cell)/1e6 for cell in cells[1:]] 

-

138 rows.append(row) 

-

139 cm = CapacitanceMatrix(conductor_names=conductor_names, rows=rows) 

-

140 return cm 

-

141 

-

142 raise Exception(f"Could not extract capacitance matrix from FasterCap log file {log_path}") 

-
- - - diff --git a/pycov/z_f40df6a530c8cf33___init___py.html b/pycov/z_f40df6a530c8cf33___init___py.html deleted file mode 100644 index ba00a1a0..00000000 --- a/pycov/z_f40df6a530c8cf33___init___py.html +++ /dev/null @@ -1,120 +0,0 @@ - - - - - Coverage for kpex\common\__init__.py: 100% - - - - - -
-
-

- Coverage for kpex\common\__init__.py: - 100% -

- -

- 0 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-
- - - diff --git a/pycov/z_f40df6a530c8cf33_capacitance_matrix_py.html b/pycov/z_f40df6a530c8cf33_capacitance_matrix_py.html deleted file mode 100644 index 7ebb06ee..00000000 --- a/pycov/z_f40df6a530c8cf33_capacitance_matrix_py.html +++ /dev/null @@ -1,184 +0,0 @@ - - - - - Coverage for kpex\common\capacitance_matrix.py: 96% - - - - - -
-
-

- Coverage for kpex\common\capacitance_matrix.py: - 96% -

- -

- 51 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24from __future__ import annotations 

-

25 

-

26import copy 

-

27from dataclasses import dataclass 

-

28import os 

-

29import tempfile 

-

30from typing import * 

-

31 

-

32 

-

33@dataclass 

-

34class CapacitanceMatrix: 

-

35 conductor_names: List[str] # NOTE FasterCap generates [g_1, g_2, ...] 

-

36 rows: List[List[float]] # NOTE: in µm 

-

37 

-

38 def __getitem__(self, key): 

-

39 return self.rows.__getitem__(key) 

-

40 

-

41 def __setitem__(self, key, value): 

-

42 self.rows.__setitem__(key, value) 

-

43 

-

44 @property 

-

45 def dimension(self): 

-

46 return len(self.conductor_names) 

-

47 

-

48 @classmethod 

-

49 def parse_csv(cls, path: str, separator: str = ';'): 

-

50 with open(path, 'r') as f: 

-

51 lines = f.readlines() 

-

52 if len(lines) < 2: 

-

53 raise Exception(f"Capacitance Matrix CSV must at least have 2 lines: " 

-

54 f"{path}") 

-

55 conductor_names = [cell.strip() for cell in lines[0].split(sep=separator)] 

-

56 rows = [] 

-

57 for line in lines[1:]: 

-

58 row = [float(cell.strip()) for cell in line.split(sep=separator)] 

-

59 rows.append(row) 

-

60 return CapacitanceMatrix(conductor_names=conductor_names, 

-

61 rows=rows) 

-

62 

-

63 def write_csv(self, output_path: str, separator: str = ';'): 

-

64 with open(output_path, 'w') as f: 

-

65 header_line = separator.join(self.conductor_names) 

-

66 f.write(header_line) 

-

67 f.write('\n') 

-

68 

-

69 for row in self.rows: 

-

70 cells = ['%.12g' % cell for cell in row] 

-

71 row_line = separator.join(cells) 

-

72 f.write(row_line) 

-

73 f.write('\n') 

-

74 

-

75 def averaged_off_diagonals(self) -> CapacitanceMatrix: 

-

76 c = copy.deepcopy(self) 

-

77 for i in range(len(self.rows)): 

-

78 for j in range(len(self.conductor_names)): 

-

79 if j <= i: 

-

80 continue 

-

81 v1 = self[i][j] 

-

82 v2 = self[j][i] 

-

83 avg = (v1 + v2) / 2 

-

84 # print(f"i={i} j={j}, avg({v1}, {v2}) == {avg}") 

-

85 c[i][j] = avg 

-

86 c[j][i] = avg 

-

87 return c 

-
- - - diff --git a/pycov/z_f568a0cfbd87c836_fastcap_runner_py.html b/pycov/z_f568a0cfbd87c836_fastcap_runner_py.html deleted file mode 100644 index c41f4ff9..00000000 --- a/pycov/z_f568a0cfbd87c836_fastcap_runner_py.html +++ /dev/null @@ -1,239 +0,0 @@ - - - - - Coverage for kpex\fastcap\fastcap_runner.py: 55% - - - - - -
-
-

- Coverage for kpex\fastcap\fastcap_runner.py: - 55% -

- -

- 62 statements   - - - -

-

- « prev     - ^ index     - » next -       - coverage.py v7.6.8, - created at 2024-12-05 16:38 +0000 -

- -
-
-
-

1# 

-

2# -------------------------------------------------------------------------------- 

-

3# SPDX-FileCopyrightText: 2024 Martin Jan Köhler and Harald Pretl 

-

4# Johannes Kepler University, Institute for Integrated Circuits. 

-

5# 

-

6# This file is part of KPEX  

-

7# (see https://github.com/martinjankoehler/klayout-pex). 

-

8# 

-

9# This program is free software: you can redistribute it and/or modify 

-

10# it under the terms of the GNU General Public License as published by 

-

11# the Free Software Foundation, either version 3 of the License, or 

-

12# (at your option) any later version. 

-

13# 

-

14# This program is distributed in the hope that it will be useful, 

-

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

-

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

-

17# GNU General Public License for more details. 

-

18# 

-

19# You should have received a copy of the GNU General Public License 

-

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

-

21# SPDX-License-Identifier: GPL-3.0-or-later 

-

22# -------------------------------------------------------------------------------- 

-

23# 

-

24import re 

-

25import time 

-

26from typing import * 

-

27 

-

28import os 

-

29import subprocess 

-

30 

-

31from kpex.log import ( 

-

32 debug, 

-

33 info, 

-

34 warning, 

-

35 error, 

-

36 rule, 

-

37 subproc, 

-

38) 

-

39from kpex.common.capacitance_matrix import CapacitanceMatrix 

-

40 

-

41 

-

42def run_fastcap(exe_path: str, 

-

43 lst_file_path: str, 

-

44 log_path: str, 

-

45 expansion_order: float = 2, 

-

46 partitioning_depth: str = 'auto', 

-

47 permittivity_factor: float = 1.0, 

-

48 iterative_tolerance: float = 0.01): 

-

49 work_dir = os.path.dirname(lst_file_path) 

-

50 

-

51 # we have to chdir into the directory containing the lst file, 

-

52 # so make all paths absolute, and the lst_file relative 

-

53 log_path = os.path.abspath(log_path) 

-

54 lst_file_path = os.path.basename(lst_file_path) 

-

55 

-

56 info(f"Chdir to {work_dir}") 

-

57 os.chdir(work_dir) 

-

58 args = [ 

-

59 exe_path, 

-

60 f"-o{expansion_order}", 

-

61 ] 

-

62 

-

63 if partitioning_depth != 'auto': 

-

64 args += [ 

-

65 f"-d{partitioning_depth}", 

-

66 ] 

-

67 

-

68 args += [ 

-

69 f"-p{permittivity_factor}", 

-

70 f"-t{iterative_tolerance}", 

-

71 f"-l{lst_file_path}", 

-

72 ] 

-

73 

-

74 info(f"Calling {' '.join(args)}, output file: {log_path}") 

-

75 

-

76 rule() 

-

77 start = time.time() 

-

78 

-

79 proc = subprocess.Popen(args, 

-

80 stdin=subprocess.DEVNULL, 

-

81 stdout=subprocess.PIPE, 

-

82 stderr=subprocess.STDOUT, 

-

83 universal_newlines=True, 

-

84 text=True) 

-

85 with open(log_path, 'w') as f: 

-

86 while True: 

-

87 line = proc.stdout.readline() 

-

88 if not line: 

-

89 break 

-

90 subproc(line[:-1]) # remove newline 

-

91 f.writelines([line]) 

-

92 proc.wait() 

-

93 

-

94 duration = time.time() - start 

-

95 

-

96 rule() 

-

97 

-

98 if proc.returncode == 0: 

-

99 info(f"FastCap2 succeeded after {'%.4g' % duration}s") 

-

100 else: 

-

101 raise Exception(f"FastCap2 failed with status code {proc.returncode} after {'%.4g' % duration}s", 

-

102 f"see log file: {log_path}") 

-

103 

-

104 

-

105# CAPACITANCE MATRIX, picofarads 

-

106# 1 2 3 4 

-

107# $1%GROUP2 1 7850 -7277 -2115 54.97 

-

108# $1%GROUP2 2 -7277 3.778e+05 130.9 -3.682e+05 

-

109# $2%GROUP3 3 -2115 130.9 6792 -5388 

-

110# $2%GROUP3 4 54.97 -3.682e+05 -5388 3.753e+05 

-

111def fastcap_parse_capacitance_matrix(log_path: str) -> CapacitanceMatrix: 

-

112 with open(log_path, 'r') as f: 

-

113 rlines = f.readlines() 

-

114 rlines.reverse() 

-

115 

-

116 # multiple iterations possible, find the last matrix 

-

117 for idx, line in enumerate(rlines): 

-

118 if line.startswith('CAPACITANCE MATRIX, '): 

-

119 section_m = re.match(r'CAPACITANCE MATRIX, (\w+)', line) 

-

120 if not section_m: 

-

121 raise Exception(f"Could not parse capacitor unit") 

-

122 unit_str = section_m.group(1) 

-

123 

-

124 dimension_line = rlines[idx-1].strip() 

-

125 dimensions = dimension_line.split() # remove whitespace 

-

126 dim = len(dimensions) 

-

127 conductor_names: List[str] = [] 

-

128 rows: List[List[float]] = [] 

-

129 for i in reversed(range(idx-1-dim, idx-1)): 

-

130 line = rlines[i].strip() 

-

131 cells = [cell.strip() for cell in line.split(' ')] 

-

132 if cells[1] != str(i): 

-

133 warning(f"Expected capacitor matrix row to have index {i}, but obtained {cells[1]}") 

-

134 cells.pop(1) 

-

135 cells = list(filter(lambda c: len(c) >= 1, cells)) 

-

136 conductor_names.append(cells[0]) 

-

137 row = [float(cell)/1e6 for cell in cells[1:]] 

-

138 rows.append(row) 

-

139 cm = CapacitanceMatrix(conductor_names=conductor_names, rows=rows) 

-

140 return cm 

-

141 

-

142 raise Exception(f"Could not extract capacitance matrix from FasterCap log file {log_path}") 

-
- - -