+
+
\ No newline at end of file
diff --git a/.idea/encodings.xml b/.idea/encodings.xml
new file mode 100644
index 0000000..15a15b2
--- /dev/null
+++ b/.idea/encodings.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..8656114
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..14e46a1
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.style.yapf b/.style.yapf
new file mode 100644
index 0000000..728e3f0
--- /dev/null
+++ b/.style.yapf
@@ -0,0 +1,3 @@
+[style]
+based_on_style = pep8
+column_limit = 100
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..ff0614a
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+BSD-3-Clause AND Apache-2.0
diff --git a/LICENSE.Apache-2.0 b/LICENSE.Apache-2.0
new file mode 100644
index 0000000..d645695
--- /dev/null
+++ b/LICENSE.Apache-2.0
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/LICENSE.BSD-3-Clause b/LICENSE.BSD-3-Clause
new file mode 100644
index 0000000..f4af21f
--- /dev/null
+++ b/LICENSE.BSD-3-Clause
@@ -0,0 +1,29 @@
+BSD 3-Clause License
+
+Copyright (c) 2018, Regents of the University of California
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/OA/bag_test/.oalib b/OA/bag_test/.oalib
new file mode 100644
index 0000000..21ffef8
--- /dev/null
+++ b/OA/bag_test/.oalib
@@ -0,0 +1,6 @@
+
+
+
+
+
diff --git a/OA/bag_test/cdsinfo.tag b/OA/bag_test/cdsinfo.tag
new file mode 100644
index 0000000..b75aa95
--- /dev/null
+++ b/OA/bag_test/cdsinfo.tag
@@ -0,0 +1,41 @@
+#
+# This is a cdsinfo.tag file.
+#
+# See the "Cadence Application Infrastructure Reference Manual" for
+# details on the format of this file, its semantics, and its use.
+#
+# The `#' character denotes a comment. Removing the leading `#'
+# character from any of the entries below will activate them.
+#
+# CDSLIBRARY entry - add this entry if the directory containing
+# this cdsinfo.tag file is the root of a Cadence library.
+# CDSLIBRARY
+#
+# CDSLIBCHECK - set this entry to require that libraries have
+# a cdsinfo.tag file with a CDSLIBRARY entry. Legal values are
+# ON and OFF. By default (OFF), directories named in a cds.lib file
+# do not have to have a cdsinfo.tag file with a CDSLIBRARY entry.
+# CDSLIBCHECK ON
+#
+# DMTYPE - set this entry to define the DM system for Cadence's
+# Generic DM facility. Values will be shifted to lower case.
+# DMTYPE none
+# DMTYPE crcs
+# DMTYPE tdm
+# DMTYPE sync
+#
+# NAMESPACE - set this entry to define the library namespace according
+# to the type of machine on which the data is stored. Legal values are
+# `LibraryNT' and
+# `LibraryUnix'.
+# NAMESPACE LibraryUnix
+#
+# Other entries may be added for use by specific applications as
+# name-value pairs. Application documentation will describe the
+# use and behaviour of these entries when appropriate.
+#
+# Current Settings:
+#
+CDSLIBRARY
+DMTYPE none
+NAMESPACE LibraryUnix
diff --git a/OA/bag_test/data.dm b/OA/bag_test/data.dm
new file mode 100644
index 0000000..8a22928
Binary files /dev/null and b/OA/bag_test/data.dm differ
diff --git a/OA/bag_test/net_bus/data.dm b/OA/bag_test/net_bus/data.dm
new file mode 100644
index 0000000..604d5c2
Binary files /dev/null and b/OA/bag_test/net_bus/data.dm differ
diff --git a/OA/bag_test/net_bus/schematic/data.dm b/OA/bag_test/net_bus/schematic/data.dm
new file mode 100644
index 0000000..844b149
Binary files /dev/null and b/OA/bag_test/net_bus/schematic/data.dm differ
diff --git a/OA/bag_test/net_bus/schematic/master.tag b/OA/bag_test/net_bus/schematic/master.tag
new file mode 100644
index 0000000..26be1be
--- /dev/null
+++ b/OA/bag_test/net_bus/schematic/master.tag
@@ -0,0 +1,2 @@
+-- Master.tag File, Rev:1.0
+sch.oa
diff --git a/OA/bag_test/net_bus/schematic/sch.oa b/OA/bag_test/net_bus/schematic/sch.oa
new file mode 100644
index 0000000..2c78c03
Binary files /dev/null and b/OA/bag_test/net_bus/schematic/sch.oa differ
diff --git a/OA/bag_test/net_bus/schematic/thumbnail_128x128.png b/OA/bag_test/net_bus/schematic/thumbnail_128x128.png
new file mode 100644
index 0000000..2972530
Binary files /dev/null and b/OA/bag_test/net_bus/schematic/thumbnail_128x128.png differ
diff --git a/OA/bag_test/net_bus/symbol/master.tag b/OA/bag_test/net_bus/symbol/master.tag
new file mode 100644
index 0000000..e1024da
--- /dev/null
+++ b/OA/bag_test/net_bus/symbol/master.tag
@@ -0,0 +1,2 @@
+-- Master.tag File, Rev:1.0
+symbol.oa
diff --git a/OA/bag_test/net_bus/symbol/symbol.oa b/OA/bag_test/net_bus/symbol/symbol.oa
new file mode 100644
index 0000000..5fc68b9
Binary files /dev/null and b/OA/bag_test/net_bus/symbol/symbol.oa differ
diff --git a/OA/bag_test/net_bus/symbol/thumbnail_128x128.png b/OA/bag_test/net_bus/symbol/thumbnail_128x128.png
new file mode 100644
index 0000000..ee4eb00
Binary files /dev/null and b/OA/bag_test/net_bus/symbol/thumbnail_128x128.png differ
diff --git a/OA/bag_test/pin_array_0/data.dm b/OA/bag_test/pin_array_0/data.dm
new file mode 100644
index 0000000..6b0e753
Binary files /dev/null and b/OA/bag_test/pin_array_0/data.dm differ
diff --git a/OA/bag_test/pin_array_0/schematic/data.dm b/OA/bag_test/pin_array_0/schematic/data.dm
new file mode 100644
index 0000000..27effc2
Binary files /dev/null and b/OA/bag_test/pin_array_0/schematic/data.dm differ
diff --git a/OA/bag_test/pin_array_0/schematic/master.tag b/OA/bag_test/pin_array_0/schematic/master.tag
new file mode 100644
index 0000000..26be1be
--- /dev/null
+++ b/OA/bag_test/pin_array_0/schematic/master.tag
@@ -0,0 +1,2 @@
+-- Master.tag File, Rev:1.0
+sch.oa
diff --git a/OA/bag_test/pin_array_0/schematic/sch.oa b/OA/bag_test/pin_array_0/schematic/sch.oa
new file mode 100644
index 0000000..60e7a6e
Binary files /dev/null and b/OA/bag_test/pin_array_0/schematic/sch.oa differ
diff --git a/OA/bag_test/pin_array_0/schematic/thumbnail_128x128.png b/OA/bag_test/pin_array_0/schematic/thumbnail_128x128.png
new file mode 100644
index 0000000..633f28b
Binary files /dev/null and b/OA/bag_test/pin_array_0/schematic/thumbnail_128x128.png differ
diff --git a/OA/bag_test/pin_array_0/symbol/master.tag b/OA/bag_test/pin_array_0/symbol/master.tag
new file mode 100644
index 0000000..e1024da
--- /dev/null
+++ b/OA/bag_test/pin_array_0/symbol/master.tag
@@ -0,0 +1,2 @@
+-- Master.tag File, Rev:1.0
+symbol.oa
diff --git a/OA/bag_test/pin_array_0/symbol/symbol.oa b/OA/bag_test/pin_array_0/symbol/symbol.oa
new file mode 100644
index 0000000..b77d3ea
Binary files /dev/null and b/OA/bag_test/pin_array_0/symbol/symbol.oa differ
diff --git a/OA/bag_test/pin_array_0/symbol/thumbnail_128x128.png b/OA/bag_test/pin_array_0/symbol/thumbnail_128x128.png
new file mode 100644
index 0000000..5e383d2
Binary files /dev/null and b/OA/bag_test/pin_array_0/symbol/thumbnail_128x128.png differ
diff --git a/README.md b/README.md
index 7c0f63c..aad14f2 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,12 @@
-# BAG_framework_release
+# BAG (BAG AMS Generator)
-open-source release of BAG
\ No newline at end of file
+BAG, a recursive acronym which stands for "BAG AMS Generator", is a fork and successor of
+the [BAG\_framework](https://github.com/ucb-art/BAG_framework).
+
+## Licensing
+
+This library is licensed under the Apache-2.0 license. However, some source files are licensed
+under both Apache-2.0 and BSD-3-Clause license, meaning that the user must comply with the
+terms and conditions of both licenses. See [here](LICENSE.BSD-3-Clause) for full text of the
+BSD license, and [here](LICENSE.Apache-2.0) for full text of the Apache license. See individual
+files to check if they fall under Apache-2.0, or both Apache-2.0 and BSD-3-Clause.
diff --git a/docs/.gitignore b/docs/.gitignore
new file mode 100644
index 0000000..378eac2
--- /dev/null
+++ b/docs/.gitignore
@@ -0,0 +1 @@
+build
diff --git a/docs/Makefile b/docs/Makefile
new file mode 100644
index 0000000..86ecbb9
--- /dev/null
+++ b/docs/Makefile
@@ -0,0 +1,230 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+ $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don\'t have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
+
+.PHONY: help
+help:
+ @echo "Please use \`make ' where is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " applehelp to make an Apple Help Book"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " epub3 to make an epub3"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+ @echo " coverage to run coverage check of the documentation (if enabled)"
+ @echo " dummy to check syntax errors of document sources"
+
+.PHONY: clean
+clean:
+ rm -rf $(BUILDDIR)/*
+
+.PHONY: html
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+.PHONY: dirhtml
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+.PHONY: singlehtml
+singlehtml:
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+.PHONY: pickle
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+.PHONY: json
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+.PHONY: htmlhelp
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+.PHONY: qthelp
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/BAG.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/BAG.qhc"
+
+.PHONY: applehelp
+applehelp:
+ $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
+ @echo
+ @echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
+ @echo "N.B. You won't be able to view it unless you put it in" \
+ "~/Library/Documentation/Help or install it in your application" \
+ "bundle."
+
+.PHONY: devhelp
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/BAG"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/BAG"
+ @echo "# devhelp"
+
+.PHONY: epub
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+.PHONY: epub3
+epub3:
+ $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3
+ @echo
+ @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3."
+
+.PHONY: latex
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+.PHONY: latexpdf
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: latexpdfja
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+.PHONY: text
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+.PHONY: man
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+.PHONY: texinfo
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+.PHONY: info
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+.PHONY: gettext
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+.PHONY: changes
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+.PHONY: linkcheck
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+.PHONY: doctest
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+.PHONY: coverage
+coverage:
+ $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
+ @echo "Testing of coverage in the sources finished, look at the " \
+ "results in $(BUILDDIR)/coverage/python.txt."
+
+.PHONY: xml
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+.PHONY: pseudoxml
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
+
+.PHONY: dummy
+dummy:
+ $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy
+ @echo
+ @echo "Build finished. Dummy builder generates no files."
diff --git a/docs/README b/docs/README
new file mode 100644
index 0000000..42ba499
--- /dev/null
+++ b/docs/README
@@ -0,0 +1,15 @@
+To build/update documentation:
+
+1. make sure BAG Python's bin folder is in your path.
+
+2. run:
+
+ ./refresh_api.sh
+
+ to generate API documentations.
+
+3. run:
+
+ make html
+
+ to build the documentation webpage.
diff --git a/docs/refresh_api.sh b/docs/refresh_api.sh
new file mode 100755
index 0000000..a8bd263
--- /dev/null
+++ b/docs/refresh_api.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env tcsh
+
+sphinx-apidoc --force --output-dir=source/api ../bag
diff --git a/pybag b/pybag
new file mode 160000
index 0000000..087e30e
--- /dev/null
+++ b/pybag
@@ -0,0 +1 @@
+Subproject commit 087e30e250047edba8fc22c0350260cfd53de9de
diff --git a/run_scripts/compile_verilog.il b/run_scripts/compile_verilog.il
new file mode 100644
index 0000000..7f3b68f
--- /dev/null
+++ b/run_scripts/compile_verilog.il
@@ -0,0 +1,64 @@
+;; SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+;; Copyright 2018 Regents of the University of California
+;; All rights reserved.
+;;
+;; Redistribution and use in source and binary forms, with or without
+;; modification, are permitted provided that the following conditions are met:
+;;
+;; * Redistributions of source code must retain the above copyright notice, this
+;; list of conditions and the following disclaimer.
+;;
+;; * Redistributions in binary form must reproduce the above copyright notice,
+;; this list of conditions and the following disclaimer in the documentation
+;; and/or other materials provided with the distribution.
+;;
+;; * Neither the name of the copyright holder nor the names of its
+;; contributors may be used to endorse or promote products derived from
+;; this software without specific prior written permission.
+;;
+;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+;; Copyright 2019 Blue Cheetah Analog Design Inc.
+;;
+;; Licensed under the Apache License, Version 2.0 (the "License");
+;; you may not use this file except in compliance with the License.
+;; You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+
+procedure( compile_netlist_views(fname "t")
+ let( (p line info_list lib cell view obj cv)
+ unless( p = infile(fname)
+ error("Cannot open file %s" fname)
+ )
+ while( gets(line p)
+ info_list = parseString(line)
+ lib = car(info_list)
+ cell = cadr(info_list)
+ view = caddr(info_list)
+ obj = ddGetObj(lib cell view "verilog.sv" nil "a")
+ cv = dbOpenCellViewByType(lib cell view "netlist" "ac")
+ dbSetConnCurrent(cv)
+ dbSave(cv)
+ dbClose(cv)
+ )
+ close(p)
+ )
+)
+
+compile_netlist_views("verilog_cell_list.txt")
diff --git a/run_scripts/copy_pytest_outputs.py b/run_scripts/copy_pytest_outputs.py
new file mode 100644
index 0000000..3422ac7
--- /dev/null
+++ b/run_scripts/copy_pytest_outputs.py
@@ -0,0 +1,68 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+from shutil import copy
+from pathlib import Path
+
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Copy pytest output data.')
+ parser.add_argument('data_dir', help='data directory.')
+ parser.add_argument('package', help='package name.')
+ parser.add_argument('--cell',dest='cell', help='cell name.', default='')
+ parser.add_argument('--lay', dest='copy_lay', action='store_true', default=False,
+ help='copy layout files.')
+ args = parser.parse_args()
+ return args
+
+
+def run_main(args: argparse.Namespace) -> None:
+ root_dir = Path(args.data_dir)
+ pkg_name: str = args.package
+ cell_name: str = args.cell
+ copy_lay: bool = args.copy_lay
+
+ # check data directory exists
+ cur_dir = root_dir / pkg_name
+ if not cur_dir.is_dir():
+ raise ValueError(f'package data directory {cur_dir} is not a directory')
+
+ src_dir = Path('pytest_output', pkg_name)
+ if not src_dir.is_dir():
+ raise ValueError(f'Cannot find pytest output directory {src_dir}')
+ for p in src_dir.iterdir():
+ if p.is_dir():
+ tokens = p.name.rsplit('_', maxsplit=1)
+ if not cell_name or tokens[0] == cell_name:
+ dst_dir = cur_dir / p.name
+ if not dst_dir.is_dir():
+ continue
+
+ for fpath in p.iterdir():
+ if not copy_lay and fpath.name.endswith('gds'):
+ continue
+ copy(str(fpath), str(dst_dir / fpath.name))
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ run_main(_args)
diff --git a/run_scripts/dsn_cell.py b/run_scripts/dsn_cell.py
new file mode 100644
index 0000000..9d40081
--- /dev/null
+++ b/run_scripts/dsn_cell.py
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Mapping, Any
+
+import argparse
+
+from bag.io import read_yaml
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+from bag.simulation.design import DesignerBase
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Generate cell from spec file.')
+ parser.add_argument('specs', help='Design specs file name.')
+ parser.add_argument('-x', '--extract', action='store_true', default=False,
+ help='Run extracted simulation')
+ parser.add_argument('-f', '--force_extract', action='store_true', default=False,
+ help='Force RC extraction even if layout/schematic are unchanged')
+ parser.add_argument('-s', '--force_sim', action='store_true', default=False,
+ help='Force simulation even if simulation netlist is unchanged')
+ parser.add_argument('-c', '--gen_sch', action='store_true', default=False,
+ help='Generate testbench schematics for debugging.')
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ specs: Mapping[str, Any] = read_yaml(args.specs)
+
+ DesignerBase.design_cell(prj, specs, extract=args.extract, force_sim=args.force_sim,
+ force_extract=args.force_extract, gen_sch=args.gen_sch)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/gds_filter.py b/run_scripts/gds_filter.py
new file mode 100644
index 0000000..4f05692
--- /dev/null
+++ b/run_scripts/gds_filter.py
@@ -0,0 +1,49 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Transform GDS file.')
+ parser.add_argument('in_file', help='GDS input file.')
+ parser.add_argument('out_file', help='GDS output file.')
+ parser.add_argument('specs_file', help='GDS transform specs file.')
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ prj.remove_gds_labels(args.in_file, args.out_file, args.specs_file)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/gds_import.py b/run_scripts/gds_import.py
new file mode 100644
index 0000000..3da24ba
--- /dev/null
+++ b/run_scripts/gds_import.py
@@ -0,0 +1,48 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Generate cell from spec file.')
+ parser.add_argument('fname', help='GDS file name.')
+ parser.add_argument('lib_name', help='layout library name.')
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ prj.import_gds_file(args.fname, args.lib_name)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/gen_cell.py b/run_scripts/gen_cell.py
new file mode 100644
index 0000000..614ce07
--- /dev/null
+++ b/run_scripts/gen_cell.py
@@ -0,0 +1,82 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+
+from bag.io import read_yaml
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Generate cell from spec file.')
+ parser.add_argument('specs', help='YAML specs file name.')
+ parser.add_argument('-d', '--drc', dest='run_drc', action='store_true', default=False,
+ help='run DRC.')
+ parser.add_argument('-v', '--lvs', dest='run_lvs', action='store_true', default=False,
+ help='run LVS.')
+ parser.add_argument('-x', '--rcx', dest='run_rcx', action='store_true', default=False,
+ help='run RCX.')
+ parser.add_argument('-raw', dest='raw', action='store_true', default=False,
+ help='generate GDS/netlist files instead of OA cellviews.')
+ parser.add_argument('-flat', dest='flat', action='store_true', default=False,
+ help='generate flat netlist.')
+ parser.add_argument('-lef', dest='gen_lef', action='store_true', default=False,
+ help='generate LEF.')
+ parser.add_argument('-hier', '--gen-hier', dest='gen_hier', action='store_true', default=False,
+ help='generate Hierarchy.')
+ parser.add_argument('-mod', '--gen-model', dest='gen_mod', action='store_true', default=False,
+ help='generate behavioral model files.')
+ parser.add_argument('-sim', dest='gen_sim', action='store_true', default=False,
+ help='generate simulation netlist instead.')
+ parser.add_argument('-shell', dest='gen_shell', action='store_true', default=False,
+ help='generate verilog shell file.')
+ parser.add_argument('-lay', dest='export_lay', action='store_true', default=False,
+ help='export layout file.')
+ parser.add_argument('-netlist', dest='gen_netlist', action='store_true', default=False,
+ help='generate netlist file.')
+ parser.add_argument('--no-layout', dest='gen_lay', action='store_false', default=True,
+ help='disable layout.')
+ parser.add_argument('--no-sch', dest='gen_sch', action='store_false', default=True,
+ help='disable schematic.')
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ specs = read_yaml(args.specs)
+ prj.generate_cell(specs, raw=args.raw, gen_lay=args.gen_lay, run_drc=args.run_drc,
+ gen_sch=args.gen_sch, run_lvs=args.run_lvs, run_rcx=args.run_rcx,
+ gen_lef=args.gen_lef, flat=args.flat, sim_netlist=args.gen_sim,
+ gen_hier=args.gen_hier, gen_model=args.gen_mod,
+ gen_shell=args.gen_shell, export_lay=args.export_lay,
+ gen_netlist=args.gen_netlist)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/gen_wrapper.py b/run_scripts/gen_wrapper.py
new file mode 100644
index 0000000..37eebf6
--- /dev/null
+++ b/run_scripts/gen_wrapper.py
@@ -0,0 +1,95 @@
+from typing import Dict, Any, List, cast
+
+import sys
+import argparse
+from pathlib import Path
+
+from pybag.enum import DesignOutput
+from bag.io import read_yaml
+from bag.core import BagProject
+from bag.design.database import ModuleDB
+
+
+def _info(etype, value, tb):
+ if hasattr(sys, 'ps1') or not sys.stderr.isatty():
+ # we are in interactive mode or we don't have a tty-like
+ # device, so we call the default hook
+ sys.__excepthook__(etype, value, tb)
+ else:
+ import pdb
+ import traceback
+ # we are NOT in interactive mode, print the exception...
+ traceback.print_exception(etype, value, tb)
+ print()
+ # ...then start the debugger in post-mortem mode.
+ pdb.post_mortem(tb)
+
+
+sys.excepthook = _info
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Generate cell from spec file.')
+ parser.add_argument('specs', help='YAML specs file name.')
+ parser.add_argument('--no-sch', dest='gen_sch', action='store_false', default=True,
+ help='disable schematic, only netlist')
+ parser.add_argument('--no-layout', dest='gen_lay', action='store_false', default=True,
+ help='disable layout.')
+ parser.add_argument('-x', '--rcx', dest='run_rcx', action='store_true', default=False,
+ help='run RCX.')
+ args = parser.parse_args()
+ return args
+
+
+def generate_wrapper(sch_db: ModuleDB, wrapper_params: Dict[str, Any],
+ cv_info_list: List, dut_netlist: str, gen_sch: bool) -> None:
+ wrapper_lib = wrapper_params.pop('wrapper_lib')
+ wrapper_cell = wrapper_params.pop('wrapper_cell')
+ wrapper_impl_cell = wrapper_params.pop('impl_cell')
+ wrapper_netlist_path = Path(wrapper_params.pop('netlist_file'))
+ wrapper_netlist_path.parent.mkdir(parents=True, exist_ok=True)
+
+ wrapper_cls = sch_db.get_schematic_class(wrapper_lib, wrapper_cell)
+ wrapper_master = sch_db.new_master(wrapper_cls, params=wrapper_params['params'])
+ wrapper_list = [(wrapper_master, wrapper_impl_cell)]
+
+ sch_db.batch_schematic(wrapper_list, output=DesignOutput.SPECTRE,
+ fname=str(wrapper_netlist_path), cv_info_list=cv_info_list,
+ cv_netlist=dut_netlist)
+ print(f'wrapper_netlist: {str(wrapper_netlist_path)}')
+ if gen_sch:
+ sch_db.batch_schematic(wrapper_list, output=DesignOutput.SCHEMATIC)
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ specs = read_yaml(args.specs)
+
+ lay_db = prj.make_template_db(specs['impl_lib']) if args.gen_lay else None
+ sch_db = prj.make_module_db(specs['impl_lib'])
+ cv_info = []
+ dut_params = specs['dut_params']
+ dut_netlist = prj.generate_cell(dut_params, lay_db=lay_db, sch_db=sch_db,
+ gen_lay=args.gen_lay, gen_sch=args.gen_sch,
+ cv_info_out=cv_info,
+ run_rcx=args.run_rcx)
+
+ print(f'dut_netlist: {dut_netlist}')
+ wrapper_params = specs['wrapper_params']
+ prj.replace_dut_in_wrapper(wrapper_params['params'], dut_params['impl_lib'],
+ dut_params['impl_cell'])
+ generate_wrapper(sch_db, wrapper_params, cv_info, dut_netlist,
+ gen_sch=args.gen_sch)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/generate_netlist_config.sh b/run_scripts/generate_netlist_config.sh
new file mode 100755
index 0000000..f857fd8
--- /dev/null
+++ b/run_scripts/generate_netlist_config.sh
@@ -0,0 +1,62 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generate configurations files needed to netlist from BAG.
+# This script must be run from the working directory.
+
+source .bashrc_pypath
+
+# disable QT session manager warnings
+unset SESSION_MANAGER
+
+if [ -z ${BAG_PYTHON+x} ]
+then
+ echo "BAG_PYTHON is unset"
+ exit 1
+fi
+
+export OUTDIR=${BAG_TECH_CONFIG_DIR##*/}/netlist_setup
+export CONF=${OUTDIR}/gen_config.yaml
+
+${BAG_PYTHON} BAG_framework/run_scripts/netlist_config.py ${CONF} ${OUTDIR}
diff --git a/run_scripts/generate_verilog.py b/run_scripts/generate_verilog.py
new file mode 100644
index 0000000..fc1ca96
--- /dev/null
+++ b/run_scripts/generate_verilog.py
@@ -0,0 +1,78 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from jinja2 import Environment, FileSystemLoader
+
+from bag.io.file import read_yaml
+
+
+def run_main():
+ verilog_dir = 'verilog_models'
+ cell_map_fname = 'verilog_cell_map.yaml'
+ skill_read_fname = 'verilog_cell_list.txt'
+ lib_name = 'AAAMODEL_QDR_HYBRID3'
+ lib_loc = 'gen_libs'
+ view_name = 'systemVerilog'
+ model_fname = 'verilog.sv'
+
+ cell_map = read_yaml(cell_map_fname)
+
+ jinja_env = Environment(loader=FileSystemLoader(verilog_dir))
+
+ with open(skill_read_fname, 'w') as g:
+ for cell_name, fname in cell_map.items():
+ root_dir = os.path.join(lib_loc, lib_name, cell_name, view_name)
+ os.makedirs(root_dir, exist_ok=True)
+
+ content = jinja_env.get_template(fname).render(cell_name=cell_name)
+
+ with open(os.path.join(root_dir, model_fname), 'w') as f:
+ f.write(content)
+
+ g.write('%s %s %s\n' % (lib_name, cell_name, view_name))
+
+
+if __name__ == '__main__':
+ run_main()
diff --git a/run_scripts/klayout.sh b/run_scripts/klayout.sh
new file mode 100755
index 0000000..40ade88
--- /dev/null
+++ b/run_scripts/klayout.sh
@@ -0,0 +1,17 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+exec klayout -l ${BAG_TECH_CONFIG_DIR}/gds_setup/layer_map.lyp $@ &
diff --git a/run_scripts/meas_cell.py b/run_scripts/meas_cell.py
new file mode 100644
index 0000000..8428988
--- /dev/null
+++ b/run_scripts/meas_cell.py
@@ -0,0 +1,68 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Mapping, Any
+
+import argparse
+
+from pybag.enum import LogLevel
+
+from bag.io import read_yaml
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Measure cell from spec file.')
+ parser.add_argument('specs', help='Design specs file name.')
+ parser.add_argument('-x', '--extract', action='store_true', default=False,
+ help='Run extracted simulation')
+ parser.add_argument('-c', '--gen_sch', action='store_true', default=False,
+ help='Generate testbench schematics for debugging.')
+ parser.add_argument('-q', '--quiet', action='store_true', default=False,
+ help='Print only warning messages or above.')
+ parser.add_argument('-f', '--fake', action='store_true', default=False,
+ help='Enable fake measurement.')
+ parser.add_argument('--force_extract', action='store_true', default=False,
+ help='Force RC extraction even if layout/schematic are unchanged')
+ parser.add_argument('--force_sim', action='store_true', default=False,
+ help='Force simulation even if simulation netlist is unchanged')
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ specs: Mapping[str, Any] = read_yaml(args.specs)
+
+ log_level = LogLevel.WARN if args.quiet else LogLevel.INFO
+ prj.measure_cell(specs, extract=args.extract, force_sim=args.force_sim,
+ force_extract=args.force_extract, gen_sch=args.gen_sch,
+ log_level=log_level, fake=args.fake)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/meas_cell_old.py b/run_scripts/meas_cell_old.py
new file mode 100644
index 0000000..8b4aac0
--- /dev/null
+++ b/run_scripts/meas_cell_old.py
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import pprint
+import argparse
+
+from bag.io import read_yaml
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Simulate cell from spec file.')
+ parser.add_argument('specs', help='YAML specs file name.')
+ parser.add_argument('-x', '--extract', dest='extract', action='store_true', default=False,
+ help='generate extracted netlist.')
+ parser.add_argument('--no-dut', dest='gen_dut', action='store_false', default=True,
+ help='disable DUT generation. Good if only loading from file.')
+ parser.add_argument('--load_file', dest='load_from_file', action='store_true', default=False,
+ help='load from file whenever possible.')
+ parser.add_argument('-mismatch', '--do-mismatch', dest='mismatch', action='store_true',
+ default=False, help='enables mismatch analysis')
+
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ specs = read_yaml(args.specs)
+ ans = prj.measure_cell_old(specs, gen_dut=args.gen_dut, load_from_file=args.load_from_file,
+ extract=args.extract, mismatch=args.mismatch)
+ print('measurement results:')
+ pprint.pprint(ans)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/netlist_config.py b/run_scripts/netlist_config.py
new file mode 100644
index 0000000..8287a2e
--- /dev/null
+++ b/run_scripts/netlist_config.py
@@ -0,0 +1,687 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generate setup yaml files for various netlist outputs
+
+Please run this script through the generate_netlist_config.sh shell script, which will setup
+the PYTHONPATH correctly.
+"""
+
+from typing import Dict, Any, Tuple, List
+
+import copy
+import argparse
+from pathlib import Path
+
+from jinja2 import Environment, DictLoader
+
+from pybag.enum import DesignOutput
+
+from bag.io.file import read_yaml, write_yaml, open_file
+
+netlist_map_default = {
+ 'basic': {
+ 'cds_thru': {
+ 'lib_name': 'basic',
+ 'cell_name': 'cds_thru',
+ 'in_terms': [],
+ 'io_terms': ['src', 'dst'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {},
+ 'ignore': False,
+ },
+ 'noConn': {
+ 'lib_name': 'basic',
+ 'cell_name': 'noConn',
+ 'in_terms': [],
+ 'io_terms': ['noConn'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {},
+ 'ignore': True,
+ },
+ },
+ 'analogLib': {
+ 'cap': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'cap',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'c': [3, ''],
+ 'l': [3, ''],
+ 'm': [3, ''],
+ 'w': [3, ''],
+ }
+ },
+ 'cccs': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'cccs',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'fgain': [3, '1.0'],
+ 'maxm': [3, ''],
+ 'minm': [3, ''],
+ 'vref': [3, ''],
+ }
+ },
+ 'ccvs': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'ccvs',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'hgain': [3, '1.0'],
+ 'maxm': [3, ''],
+ 'minm': [3, ''],
+ 'vref': [3, ''],
+ }
+ },
+ 'dcblock': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'dcblock',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'c': [3, ''],
+ }
+ },
+ 'dcfeed': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'dcfeed',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'l': [3, ''],
+ }
+ },
+ 'idc': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'idc',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'acm': [3, ''],
+ 'acp': [3, ''],
+ 'idc': [3, ''],
+ 'pacm': [3, ''],
+ 'pacp': [3, ''],
+ 'srcType': [3, 'dc'],
+ 'xfm': [3, ''],
+ }
+ },
+ 'ideal_balun': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'ideal_balun',
+ 'in_terms': [],
+ 'io_terms': ['d', 'c', 'p', 'n'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {}
+ },
+ 'ind': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'ind',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'l': [3, ''],
+ 'm': [3, ''],
+ 'r': [3, ''],
+ }
+ },
+ 'iprobe': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'iprobe',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {}
+ },
+ 'ipulse': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'ipulse',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'i1': [3, ''],
+ 'i2': [3, ''],
+ 'idc': [3, ''],
+ 'per': [3, ''],
+ 'pw': [3, ''],
+ 'srcType': [3, 'pulse'],
+ 'td': [3, ''],
+ }
+ },
+ 'isin': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'isin',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'freq': [3, ''],
+ 'ia': [3, ''],
+ 'idc': [3, ''],
+ 'srcType': [3, 'sine'],
+ }
+ },
+ 'gnd': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'gnd',
+ 'in_terms': [],
+ 'io_terms': ['gnd!'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {},
+ 'ignore': True,
+ },
+ 'port': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'port',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'num': [3, ''],
+ 'r': [3, ''],
+ 'srcType': [3, 'sine'],
+ }
+ },
+ 'res': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'res',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'l': [3, ''],
+ 'm': [3, ''],
+ 'r': [3, ''],
+ 'w': [3, ''],
+ }
+ },
+ 'switch': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'switch',
+ 'in_terms': [],
+ 'io_terms': ['N+', 'N-', 'NC+', 'NC-'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'rc': [3, ''],
+ 'ro': [3, ''],
+ 'vt1': [3, ''],
+ 'vt2': [3, ''],
+ }
+ },
+ 'vccs': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'vccs',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS', 'NC+', 'NC-'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'ggain': [3, '1.0'],
+ 'maxm': [3, ''],
+ 'minm': [3, ''],
+ }
+ },
+ 'vcvs': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'vcvs',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS', 'NC+', 'NC-'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'egain': [3, '1.0'],
+ 'maxm': [3, ''],
+ 'minm': [3, ''],
+ }
+ },
+ 'vdc': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'vdc',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'acm': [3, ''],
+ 'acp': [3, ''],
+ 'pacm': [3, ''],
+ 'pacp': [3, ''],
+ 'srcType': [3, 'dc'],
+ 'vdc': [3, ''],
+ 'xfm': [3, ''],
+ }
+ },
+ 'vpulse': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'vpulse',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'per': [3, ''],
+ 'pw': [3, ''],
+ 'srcType': [3, 'pulse'],
+ 'td': [3, ''],
+ 'v1': [3, ''],
+ 'v2': [3, ''],
+ 'vdc': [3, ''],
+ }
+ },
+ 'vpwlf': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'vpwlf',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'fileName': [3, ''],
+ 'srcType': [3, 'pwl'],
+ }
+ },
+ 'vsin': {
+ 'lib_name': 'analogLib',
+ 'cell_name': 'vsin',
+ 'in_terms': [],
+ 'io_terms': ['PLUS', 'MINUS'],
+ 'is_prim': True,
+ 'nets': [],
+ 'out_terms': [],
+ 'props': {
+ 'freq': [3, ''],
+ 'srcType': [3, 'sine'],
+ 'va': [3, ''],
+ 'vdc': [3, ''],
+ }
+ },
+ },
+}
+
+mos_default = {
+ 'lib_name': 'BAG_prim',
+ 'cell_name': '',
+ 'in_terms': [],
+ 'out_terms': [],
+ 'io_terms': ['B', 'D', 'G', 'S'],
+ 'nets': [],
+ 'is_prim': True,
+ 'props': {
+ 'l': [3, ''],
+ 'w': [3, ''],
+ 'nf': [3, ''],
+ },
+}
+
+dio_default = {
+ 'lib_name': 'BAG_prim',
+ 'cell_name': '',
+ 'in_terms': [],
+ 'out_terms': [],
+ 'io_terms': ['MINUS', 'PLUS'],
+ 'nets': [],
+ 'is_prim': True,
+ 'props': {
+ 'l': [3, ''],
+ 'w': [3, ''],
+ },
+}
+
+res_metal_default = {
+ 'lib_name': 'BAG_prim',
+ 'cell_name': '',
+ 'in_terms': [],
+ 'out_terms': [],
+ 'io_terms': ['MINUS', 'PLUS'],
+ 'nets': [],
+ 'is_prim': True,
+ 'props': {
+ 'l': [3, ''],
+ 'w': [3, ''],
+ },
+}
+
+mos_cdl_fmt = """.SUBCKT {{ cell_name }} B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B {{ model_name }}{% for key, val in param_list %} {{ key }}={{ val }}{% endfor %}
+.ENDS
+"""
+
+dio_cdl_fmt = """.SUBCKT {{ cell_name }} MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+XD0 {{ ports[0] }} {{ ports[1] }} {{ model_name }}{% for key, val in param_list %} {{ key }}={{ val }}{% endfor %}
+.ENDS
+"""
+
+dio_cdl_fmt_static = """.SUBCKT {{ cell_name }} MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+XD0 {{ ports[0] }} {{ ports[1] }} {{ model_name }}
+.ENDS
+"""
+
+res_metal_cdl_fmt = """.SUBCKT {{ cell_name }} MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS {{ model_name }} {% for key, val in param_list %} {{ key }}={{ val }}{% endfor %}
+.ENDS
+"""
+
+mos_spectre_fmt = """subckt {{ cell_name }} B D G S
+parameters l w nf
+MM0 D G S B {{ model_name }}{% for key, val in param_list %} {{ key }}={{ val }}{% endfor %}
+ends {{ cell_name }}
+"""
+
+dio_spectre_fmt = """subckt {{ cell_name }} MINUS PLUS
+parameters l w
+XD0 {{ ports[0] }} {{ ports[1] }} {{ model_name }}{% for key, val in param_list %} {{ key }}={{ val }}{% endfor %}
+ends {{ cell_name }}
+"""
+
+dio_spectre_fmt_static = """subckt {{ cell_name }} MINUS PLUS
+parameters l w
+XD0 {{ ports[0] }} {{ ports[1] }} {{ model_name }}
+ends {{ cell_name }}
+"""
+
+res_metal_spectre_fmt = """subckt {{ cell_name }} MINUS PLUS
+parameters l w
+RR0 PLUS MINUS {{ model_name }} {% for key, val in param_list %} {{ key }}={{ val }}{% endfor %}
+ends {{ cell_name }}
+"""
+
+mos_verilog_fmt = """module {{ cell_name }}(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+"""
+
+scs_ideal_balun = """subckt ideal_balun d c p n
+ K0 d 0 p c transformer n1=2
+ K1 d 0 c n transformer n1=2
+ends ideal_balun
+"""
+
+supported_formats = {
+ DesignOutput.CDL: {
+ 'fname': 'bag_prim.cdl',
+ 'mos': 'mos_cdl',
+ 'diode': 'diode_cdl',
+ 'diode_static': 'diode_cdl_static',
+ 'res_metal': 'res_metal_cdl',
+ },
+ DesignOutput.SPECTRE: {
+ 'fname': 'bag_prim.scs',
+ 'mos': 'mos_scs',
+ 'diode': 'diode_scs',
+ 'diode_static': 'diode_scs_static',
+ 'res_metal': 'res_metal_scs',
+ },
+ DesignOutput.VERILOG: {
+ 'fname': 'bag_prim.v',
+ 'mos': '',
+ 'diode': '',
+ 'diode_static': '',
+ 'res_metal': '',
+ },
+ DesignOutput.SYSVERILOG: {
+ 'fname': 'bag_prim.sv',
+ 'mos': '',
+ 'diode': '',
+ 'diode_static': '',
+ 'res_metal': '',
+ },
+}
+
+jinja_env = Environment(
+ loader=DictLoader(
+ {'mos_cdl': mos_cdl_fmt,
+ 'mos_scs': mos_spectre_fmt,
+ 'mos_verilog': mos_verilog_fmt,
+ 'diode_cdl': dio_cdl_fmt,
+ 'diode_scs': dio_spectre_fmt,
+ 'diode_cdl_static': dio_cdl_fmt_static,
+ 'diode_scs_static': dio_spectre_fmt_static,
+ 'res_metal_cdl': res_metal_cdl_fmt,
+ 'res_metal_scs': res_metal_spectre_fmt}),
+ keep_trailing_newline=True,
+)
+
+
+def populate_header(config: Dict[str, Any], inc_lines: Dict[DesignOutput, List[str]],
+ inc_list: Dict[int, List[str]]) -> None:
+ for v, lines in inc_lines.items():
+ inc_list[v.value] = config[v.name]['includes']
+
+
+def populate_mos(config: Dict[str, Any], netlist_map: Dict[str, Any],
+ inc_lines: Dict[DesignOutput, List[str]]) -> None:
+ for cell_name, model_name in config['types']:
+ # populate netlist_map
+ cur_info = copy.deepcopy(mos_default)
+ cur_info['cell_name'] = cell_name
+ netlist_map[cell_name] = cur_info
+
+ # write bag_prim netlist
+ for v, lines in inc_lines.items():
+ param_list = config[v.name]
+ template_name = supported_formats[v]['mos']
+ if template_name:
+ mos_template = jinja_env.get_template(template_name)
+ lines.append('\n')
+ lines.append(
+ mos_template.render(
+ cell_name=cell_name,
+ model_name=model_name,
+ param_list=param_list,
+ ))
+
+
+def populate_diode(config: Dict[str, Any], netlist_map: Dict[str, Any],
+ inc_lines: Dict[DesignOutput, List[str]]) -> None:
+ template_key = 'diode_static' if config['static'] else 'diode'
+
+ for cell_name, model_name in config['types']:
+ # populate netlist_map
+ cur_info = copy.deepcopy(dio_default)
+ cur_info['cell_name'] = cell_name
+ netlist_map[cell_name] = cur_info
+ ports = config['port_order'][cell_name]
+
+ # write bag_prim netlist
+ for v, lines in inc_lines.items():
+ param_list = config[v.name]
+ template_name = supported_formats[v][template_key]
+ if template_name:
+ jinja_template = jinja_env.get_template(template_name)
+ lines.append('\n')
+ lines.append(
+ jinja_template.render(
+ cell_name=cell_name,
+ model_name=model_name,
+ ports=ports,
+ param_list=param_list,
+ ))
+
+
+def populate_res_metal(config: Dict[str, Any], netlist_map: Dict[str, Any],
+ inc_lines: Dict[DesignOutput, List[str]]) -> None:
+ for idx, (cell_name, model_name) in enumerate(config['types']):
+ # populate netlist_map
+ cur_info = copy.deepcopy(res_metal_default)
+ cur_info['cell_name'] = cell_name
+ netlist_map[cell_name] = cur_info
+
+ # write bag_prim netlist
+ for v, lines in inc_lines.items():
+ param_list = config[v.name]
+ template_name = supported_formats[v]['res_metal']
+ write_res_val = config.get('write_res_val', False)
+ new_param_list = param_list.copy()
+ if write_res_val:
+ res_val = config['res_map'][idx + 1]
+ new_param_list.append(['r', '{}*l/w'.format(res_val)])
+ if template_name:
+ res_metal_template = jinja_env.get_template(template_name)
+ lines.append('\n')
+ lines.append(
+ res_metal_template.render(
+ cell_name=cell_name,
+ model_name=model_name,
+ param_list=new_param_list,
+ ))
+
+
+def populate_custom_cells(inc_lines: Dict[DesignOutput, List[str]]):
+ scs_lines = inc_lines[DesignOutput.SPECTRE]
+ scs_lines.append('\n')
+ scs_lines.append(scs_ideal_balun)
+
+
+def get_info(config: Dict[str, Any], output_dir: Path
+ ) -> Tuple[Dict[str, Any], Dict[int, List[str]], Dict[int, str]]:
+ netlist_map = {}
+ inc_lines = {v: [] for v in supported_formats}
+
+ inc_list: Dict[int, List[str]] = {}
+ populate_header(config['header'], inc_lines, inc_list)
+ populate_mos(config['mos'], netlist_map, inc_lines)
+ populate_diode(config['diode'], netlist_map, inc_lines)
+ populate_res_metal(config['res_metal'], netlist_map, inc_lines)
+ populate_custom_cells(inc_lines)
+
+ prim_files: Dict[int, str] = {}
+ for v, lines in inc_lines.items():
+ fpath = output_dir / supported_formats[v]['fname']
+ if lines:
+ prim_files[v.value] = str(fpath)
+ with open_file(fpath, 'w') as f:
+ f.writelines(lines)
+ else:
+ prim_files[v.value] = ''
+
+ return {'BAG_prim': netlist_map}, inc_list, prim_files
+
+
+def parse_options() -> Tuple[str, Path]:
+ parser = argparse.ArgumentParser(description='Generate netlist setup file.')
+ parser.add_argument(
+ 'config_fname', type=str, help='YAML file containing technology information.')
+ parser.add_argument('output_dir', type=str, help='Output directory.')
+ args = parser.parse_args()
+ return args.config_fname, Path(args.output_dir)
+
+
+def main() -> None:
+ config_fname, output_dir = parse_options()
+
+ output_dir.mkdir(parents=True, exist_ok=True)
+
+ config = read_yaml(config_fname)
+
+ netlist_map, inc_list, prim_files = get_info(config, output_dir)
+ netlist_map.update(netlist_map_default)
+ result = {
+ 'prim_files': prim_files,
+ 'inc_list': inc_list,
+ 'netlist_map': netlist_map,
+ }
+
+ write_yaml(output_dir / 'netlist_setup.yaml', result)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/run_scripts/reformat_schematic.py b/run_scripts/reformat_schematic.py
new file mode 100755
index 0000000..eb12f46
--- /dev/null
+++ b/run_scripts/reformat_schematic.py
@@ -0,0 +1,119 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Reformat BAG2 schematic generator files to BAG3.
+
+NOTE: This is an alpha script, please double check your results.
+"""
+
+from typing import Tuple
+
+import os
+import glob
+import argparse
+
+repl_header = r'''# -*- coding: utf-8 -*-
+
+from typing import Dict, Any
+
+import os
+import pkg_resources
+
+from bag.util.immutable import Param
+from bag.design.module import Module
+from bag.design.database import ModuleDB
+
+
+# noinspection PyPep8Naming
+class {lib_name}__{cell_name}(Module):
+ """Module for library {lib_name} cell {cell_name}.
+
+ Fill in high level description here.
+ """
+
+ yaml_file = pkg_resources.resource_filename(__name__,
+ os.path.join('netlist_info',
+ '{cell_name}.yaml'))
+
+ def __init__(self, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, self.yaml_file, database, params, **kwargs)
+'''
+
+
+def parse_options() -> Tuple[str, str]:
+ parser = argparse.ArgumentParser(description='Convert BAG2 schematic generators to BAG3.')
+ parser.add_argument('root_path', type=str,
+ help='path to schematic generator files.')
+ parser.add_argument('lib_name', type=str,
+ help='schematic library name.')
+
+ args = parser.parse_args()
+ return args.root_path, args.lib_name
+
+
+def main() -> None:
+ root_path, lib_name = parse_options()
+ os.chdir(root_path)
+ for fname in glob.iglob('*.py'):
+ if fname == '__init__.py':
+ continue
+
+ cell_name = fname[:-3]
+ with open(fname, 'r') as f:
+ lines = f.readlines()
+
+ new_header = repl_header.format(lib_name=lib_name, cell_name=cell_name)
+ with open(fname, 'w') as f:
+ f.write(new_header)
+ start_write = False
+ for l in lines:
+ if start_write:
+ f.write(l)
+ else:
+ tmp = l.lstrip()
+ if '.__init__(' in tmp:
+ start_write = True
+
+
+if __name__ == '__main__':
+ main()
diff --git a/run_scripts/reset_repo.sh b/run_scripts/reset_repo.sh
new file mode 100755
index 0000000..e7cc6b0
--- /dev/null
+++ b/run_scripts/reset_repo.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -x
+
+git submodule foreach --recursive 'git reset --hard || :'
+git submodule foreach --recursive 'git clean -fd || :'
+git submodule foreach --recursive 'git pull || :'
+
+git reset --hard
+git clean -fd
+git pull
diff --git a/run_scripts/run_bag.sh b/run_scripts/run_bag.sh
new file mode 100755
index 0000000..1eee0c8
--- /dev/null
+++ b/run_scripts/run_bag.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source .bashrc_pypath
+
+if [ -z ${BAG_PYTHON} ]
+then
+ echo "BAG_PYTHON is unset"
+ exit 1
+fi
+
+exec ${BAG_PYTHON} $@
diff --git a/run_scripts/setup_submodules.py b/run_scripts/setup_submodules.py
new file mode 100755
index 0000000..8760a4d
--- /dev/null
+++ b/run_scripts/setup_submodules.py
@@ -0,0 +1,247 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# crazy black magic from:
+# https://unix.stackexchange.com/questions/20880/how-can-i-use-environment-variables-in-my-shebang
+# this block of code is valid in both bash and python.
+# this means if this script is run under bash, it'll
+# call this script again using BAG_PYTHON. If
+# this script is run under Python, this block of code
+# effectively does nothing.
+if "true" : '''\'
+then
+if [[ $BAG_PYTHON ]]; then
+exec ${BAG_PYTHON} "$0" "$@"
+else
+echo "ERROR! BAG_PYTHON environment variable is not set"
+fi
+exit 127
+fi
+'''
+from typing import List, Tuple, Optional, Dict, Any
+
+import subprocess
+from pathlib import Path
+
+from ruamel.yaml import YAML
+yaml = YAML()
+
+BAG_DIR = 'BAG_framework'
+
+
+def write_to_file(fname: str, lines: List[str]) -> None:
+ with open(fname, 'w') as f:
+ f.writelines((l + '\n' for l in lines))
+ add_git_file(fname)
+
+
+def setup_python_path(module_list: List[Tuple[str, Dict[str, Any]]], repo_path: Path) -> None:
+ lines = ['#!/usr/bin/env bash',
+ '',
+ 'export PYTHONPATH="${BAG_FRAMEWORK}/src"',
+ 'export PYTHONPATH="${PYTHONPATH}:${BAG_FRAMEWORK}/pybag/_build/lib"',
+ 'export PYTHONPATH="${PYTHONPATH}:${BAG_TECH_CONFIG_DIR}/src"',
+ ]
+ template = 'export PYTHONPATH="${PYTHONPATH}:${BAG_WORK_DIR}/%s'
+ for mod_name, _ in module_list:
+ if mod_name != BAG_DIR:
+ root = template % mod_name
+ lines.append(root + '/src"')
+ inc_path = repo_path / mod_name / 'bag_pypath'
+ if inc_path.is_file():
+ # noinspection PyTypeChecker
+ with open(inc_path, 'r') as f:
+ for line in f:
+ cur = line.strip()
+ if cur:
+ lines.append(f'{root}/{cur}"')
+
+ lines.append('export PYTHONPATH="${PYTHONPATH}:${PYTHONPATH_CUSTOM:-}"')
+ write_to_file('.bashrc_pypath', lines)
+
+
+def get_oa_libraries(mod_name: str) -> List[str]:
+ root_dir = Path(mod_name, 'OA').resolve()
+ if root_dir.is_dir():
+ return [str(sub.stem) for sub in root_dir.iterdir() if sub.is_dir()]
+
+ return []
+
+
+def get_sch_libraries(mod_name: str) -> List[str]:
+ root_dir = Path(mod_name, 'src')
+ if root_dir.is_dir():
+ return [str(sub.stem) for sub in root_dir.iterdir()
+ if sub.is_dir() and (sub / 'schematic').is_dir()]
+ return []
+
+
+def setup_libs_def(module_list: List[Tuple[str, Dict[str, Any]]]) -> None:
+ lines = ['BAG_prim']
+ for mod_name, _ in module_list:
+ for lib_name in get_sch_libraries(mod_name):
+ lines.append(lib_name)
+
+ write_to_file('bag_libs.def', lines)
+
+
+def setup_cds_lib(module_list: List[Tuple[str, Dict[str, Any]]]) -> None:
+ lines = ['DEFINE BAG_prim $BAG_TECH_CONFIG_DIR/OA/BAG_prim']
+ template = 'DEFINE {} $BAG_WORK_DIR/{}/OA/{}'
+ for mod_name, _ in module_list:
+ for lib_name in get_oa_libraries(mod_name):
+ lines.append(template.format(lib_name, mod_name, lib_name))
+
+ write_to_file('cds.lib.bag', lines)
+
+
+def run_command(cmd: List[str], cwd: Optional[str] = None, get_output: bool = False) -> str:
+ timeout = 5
+ print(f'cmd: {" ".join(cmd)}, cwd: {cwd}')
+ proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE if get_output else None)
+ output = ''
+ try:
+ output = proc.communicate()[0]
+ if output is not None:
+ output = output.decode('utf-8').strip()
+ except KeyboardInterrupt:
+ print('Ctrl-C detected, terminating')
+ if proc.returncode is None:
+ proc.terminate()
+ print('terminating process...')
+ try:
+ proc.wait(timeout=timeout)
+ print('process terminated')
+ except subprocess.TimeoutExpired:
+ proc.kill()
+ print('process did not terminate, try killing...')
+ try:
+ proc.wait(timeout=timeout)
+ print('process killed')
+ except subprocess.TimeoutExpired:
+ print('cannot kill process...')
+
+ if proc.returncode is None:
+ raise ValueError('Ctrl-C detected, but cannot kill process')
+ elif proc.returncode < 0:
+ raise ValueError('process terminated with return code = %d' % proc.returncode)
+ elif proc.returncode > 0:
+ raise ValueError('command %s failed' % ' '.join(cmd))
+
+ if get_output:
+ print('output: ' + output)
+ return output
+
+
+def add_git_submodule(module_name: str, url: str, branch: str) -> None:
+ mpath = Path(module_name)
+ if not mpath.exists():
+ run_command(['git', 'submodule', 'add', '-b', branch, url])
+
+
+def add_git_file(fname: str) -> None:
+ run_command(['git', 'add', '-f', fname])
+
+
+def link_submodule(repo_path: str, module_name: str) -> None:
+ mpath = Path(module_name)
+ if mpath.exists():
+ # skip if already exists
+ return
+
+ src = Path(repo_path, module_name)
+ if not src.is_dir():
+ raise ValueError('Cannot find submodule %s in %s' % (module_name, repo_path))
+ mpath.symlink_to(src)
+ add_git_file(module_name)
+
+
+def setup_git_submodules(module_list: List[Tuple[str, Dict[str, Any]]]) -> None:
+ for module_name, module_info in module_list:
+ add_git_submodule(module_name, module_info['url'], module_info.get('branch', 'master'))
+
+
+def setup_submodule_links(module_list: List[Tuple[str, Dict[str, Any]]], repo_path: str) -> None:
+ for module_name, _ in module_list:
+ link_submodule(repo_path, module_name)
+
+
+def run_main() -> None:
+ default_submodules = {
+ BAG_DIR: {
+ 'url': 'git@github.com:bluecheetah/bag.git',
+ },
+ }
+
+ with open('bag_submodules.yaml', 'r') as f:
+ modules_info = yaml.load(f)
+
+ # add default submodules
+ for name, info in default_submodules.items():
+ if name not in modules_info:
+ modules_info[name] = info
+
+ module_list = [(key, modules_info[key]) for key in sorted(modules_info.keys())]
+
+ # error checking
+ bag_dir_path = Path(BAG_DIR).resolve()
+ if not bag_dir_path.is_dir():
+ raise ValueError('Cannot find directory %s' % BAG_DIR)
+
+ # get real absolute path of parent directory of BAG_framework
+ repo_path = bag_dir_path.parent
+ cur_path = Path('.').resolve()
+ if cur_path == repo_path:
+ # BAG_framework is an actual directory in this repo; add dependencies as git submodules
+ setup_git_submodules(module_list)
+ else:
+ setup_submodule_links(module_list, repo_path)
+
+ setup_python_path(module_list, repo_path)
+ setup_libs_def(module_list)
+ setup_cds_lib(module_list)
+
+
+if __name__ == '__main__':
+ run_main()
diff --git a/run_scripts/sim_cell.py b/run_scripts/sim_cell.py
new file mode 100644
index 0000000..290c04e
--- /dev/null
+++ b/run_scripts/sim_cell.py
@@ -0,0 +1,60 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+
+from bag.io import read_yaml
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Simulate cell from spec file.')
+ parser.add_argument('specs', help='YAML specs file name.')
+ parser.add_argument('-x', '--extract', dest='extract', action='store_true', default=False,
+ help='generate extracted netlist.')
+ parser.add_argument('--no-tb', dest='gen_tb', action='store_false', default=True,
+ help='disable testbench generation; simulate using existing files')
+ parser.add_argument('--no-sim', dest='simulate', action='store_false', default=True,
+ help='disable simulation; only generate the netlists')
+ parser.add_argument('-mismatch', '--do-mismatch', dest='mismatch', action='store_true',
+ default=False, help='enables mismatch analysis')
+ parser.add_argument('--gen-oa', dest='raw', action='store_false',
+ default=True, help='enables oa view generation')
+
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ specs = read_yaml(args.specs)
+ prj.simulate_cell(specs, extract=args.extract, gen_tb=args.gen_tb, simulate=args.simulate,
+ mismatch=args.mismatch, raw=args.raw)
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/start_bag.il b/run_scripts/start_bag.il
new file mode 100644
index 0000000..3e4da13
--- /dev/null
+++ b/run_scripts/start_bag.il
@@ -0,0 +1,2067 @@
+;; SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+;; Copyright 2018 Regents of the University of California
+;; All rights reserved.
+;;
+;; Redistribution and use in source and binary forms, with or without
+;; modification, are permitted provided that the following conditions are met:
+;;
+;; * Redistributions of source code must retain the above copyright notice, this
+;; list of conditions and the following disclaimer.
+;;
+;; * Redistributions in binary form must reproduce the above copyright notice,
+;; this list of conditions and the following disclaimer in the documentation
+;; and/or other materials provided with the distribution.
+;;
+;; * Neither the name of the copyright holder nor the names of its
+;; contributors may be used to endorse or promote products derived from
+;; this software without specific prior written permission.
+;;
+;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+;; Copyright 2019 Blue Cheetah Analog Design Inc.
+;;
+;; Licensed under the Apache License, Version 2.0 (the "License");
+;; you may not use this file except in compliance with the License.
+;; You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+
+/* Note:
+
+Due to licensing reasons, this skill script is missing the function
+CCSinvokeCdfCallbacks() from Cadence solution 11018344, which executes
+CDF parameters callback from skill.
+
+If you do not need to instantiate a pcell instance, this method
+is not needed.
+
+Eric Chang, Mar 2, 2017.
+
+*/
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Virtuoso Database operations functions ;;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+; reads a skill data structure from file
+procedure( parse_data_from_file( fname "t" )
+ let( (p ans)
+ unless( p = infile( fname )
+ error("Cannot open file %s" fname)
+ )
+ ans = parse_data_from_file_helper(p)
+ close( p )
+ ans
+ )
+)
+
+; recursive helper for parse_data_from_file
+procedure( parse_data_from_file_helper( p )
+ let( (line item ans finish key)
+ gets( line p )
+ ; remove newline
+ line = substring(line 1 strlen(line) - 1)
+ ; printf("read line: %s\n" line)
+ cond(
+ (line == "#list"
+ ; parse a list
+ ans = tconc(nil 0)
+ while( nequal(item = parse_data_from_file_helper(p) "#end")
+ tconc(ans item)
+ )
+ ; printf("returning list ")
+ ; print(cdar(ans))
+ ; printf("\n")
+ cdar(ans)
+ )
+ (line == "#prop_list"
+ ; parse a disembodied property list
+ ans = ncons(nil)
+ finish = nil
+ while( !finish
+ key = parse_data_from_file_helper(p)
+ if( key == "#end" then
+ finish = 't
+ else
+ item = parse_data_from_file_helper(p)
+ putprop(ans item key)
+ )
+ )
+ ans
+ )
+ ; parse a float
+ (strncmp( line "#float" 6 ) == 0
+ cdfParseFloatString(cadr(parseString(line)))
+ )
+ ; parse an int
+ (strncmp( line "#int" 4 ) == 0
+ atoi(cadr(parseString(line)))
+ )
+ ; parse a boolean
+ (strncmp( line "#bool" 5 ) == 0
+ if( atoi(cadr(parseString(line))) == 1 then
+ 't
+ else
+ nil
+ )
+ )
+ ; parse a string token or #end
+ ('t
+ ; printf("returning str %s\n" line)
+ line
+ )
+ )
+ )
+)
+
+; return a list of cells in the given library.
+procedure( get_cells_in_library( lib_name "t" )
+ let( ( lib_obj ans )
+ if( lib_obj = ddGetObj(lib_name nil nil nil nil "r") then
+ ans = ddGetObjChildren(lib_obj)~>name
+ ddReleaseObj(lib_obj)
+ else
+ ; library does not exist, return empty list
+ ans = '()
+ )
+ ans
+ )
+)
+
+; return a list of cells in the given library.
+procedure( get_cells_in_library_file( lib_name fname "tt" )
+ let( ( p )
+ p = outfile( fname "w" )
+ foreach( cell get_cells_in_library(lib_name)
+ fprintf(p "%s\n" cell)
+ )
+ close(p)
+ )
+)
+
+; Returns the directory corresponding to the given library.
+procedure( get_lib_directory(lib_name "t")
+ let( ( lib_obj ans )
+ if( lib_obj = ddGetObj(lib_name nil nil nil nil "r") then
+ ans = lib_obj~>readPath
+ ddReleaseObj(lib_obj)
+ else
+ ; library does not exist, return empty list
+ ans = ""
+ )
+ ans
+ )
+)
+
+; Parse the netlist of the given cellview.
+; Works on schematic and veriloga.
+procedure( parse_cad_sch(lib_name cell_name file_name "ttt")
+ let( (cv cell_type p indent direction term_names tb_list tb_match
+ inst_lib_name inst_cell_name inst_cnt)
+ indent = ""
+ cell_type = "schematic"
+ unless( cv = dbOpenCellViewByType( lib_name cell_name "schematic" nil "r" )
+ cell_type = "veriloga"
+ unless( cv = dbOpenCellViewByType( lib_name cell_name "veriloga" nil "r" )
+ error( "Cannot find schematic or veriloga view of cell %s__%s" lib_name cell_name )
+ )
+ )
+ p = outfile( file_name "w" )
+
+ ; print cellview information
+ printf( "*INFO* Writing cell %s__%s (%s) netlist to %s\n" lib_name cell_name cell_type file_name )
+ fprintf( p "%slib_name: %s\n" indent lib_name )
+ fprintf( p "%scell_name: %s\n" indent cell_name )
+
+ ; print pins
+ fprintf( p "%spins: [ " indent )
+ if( cell_type == "veriloga" then
+ term_names = reverse(cv~>terminals~>name)
+ else
+ term_names = cv~>terminals~>name
+ )
+ ; add quotes around pin names to escape array pins
+ term_names = mapcar( lambda( (x) sprintf(nil "\"%s\"" x) ) term_names )
+ fprintf( p "%s ]\n" buildString(term_names ", "))
+
+ ; print instances
+ if( not(cv~>instances) then
+ fprintf( p "%sinstances: {}\n" indent )
+ else
+ inst_cnt = 0
+ fprintf( p "%sinstances:\n" indent )
+ foreach( inst cv~>instances
+ inst_cnt++
+ ; print entry for instance
+ indent = " "
+ fprintf( p "%s%s:\n" indent inst~>name )
+ ; print instance master information.
+ indent = " "
+ fprintf( p "%slib_name: %s\n" indent inst~>libName )
+ fprintf( p "%scell_name: %s\n" indent inst~>cellName )
+ ; print instance terminal information
+ if( !(inst~>instTerms) then
+ fprintf( p "%sinstpins: {}\n" indent )
+ else
+ fprintf( p "%sinstpins:\n" indent )
+ foreach( inst_term inst~>instTerms
+ unless( direction = inst_term~>direction
+ direction = ""
+ )
+ indent = " "
+ fprintf( p "%s%s:\n" indent inst_term~>name )
+ indent = " "
+ fprintf( p "%sdirection: %s\n" indent direction )
+ fprintf( p "%snet_name: \"%s\"\n" indent inst_term~>net~>name )
+ fprintf( p "%snum_bits: %d\n" indent inst_term~>numBits )
+ )
+ )
+ )
+ when(inst_cnt == 0
+ fprintf( p " {}\n" )
+ )
+ )
+
+ ; close resources
+ close(p)
+ dbClose(cv)
+ )
+)
+
+; Delete a cellview if it exists. Currently used to delete old calibre file.
+procedure( delete_cellview(lib_name cell_name view_name "ttt")
+ let( (obj)
+ obj = ddGetObj(lib_name cell_name view_name)
+ if( obj then
+ ddDeleteObj(obj)
+ else
+ 't
+ )
+ )
+)
+
+; Parse the structure of the given cellview.
+; Works on layout.
+procedure( parse_cad_layout(lib_name cell_name file_name "ttt")
+ let( (cv cell_type p indent rect_cnt label_cnt inst_cnt)
+
+ indent = ""
+ cell_type = "layout"
+ unless( cv = dbOpenCellViewByType( lib_name cell_name cell_type nil "r" )
+ error( "Cannot find layout view of cell %s__%s" lib_name cell_name )
+ )
+ p = outfile( file_name "w" )
+
+ ; print cellview information
+ printf( "*INFO* Writing cell %s__%s (%s) netlist to %s\n" lib_name cell_name cell_type file_name )
+ fprintf( p "%slib_name: %s\n" indent lib_name )
+ fprintf( p "%scell_name: %s\n" indent cell_name )
+
+ ; print rects
+ if( not(cv~>shapes) then
+ fprintf( p "%srects: {}\n" indent )
+ else
+ rect_cnt = 0
+ fprintf( p "%srects:\n" indent )
+ foreach( shape cv~>shapes
+ if( (shape~>objType == "rect") then
+ rect_cnt++
+ ; print entry for rect
+ indent = " "
+ fprintf( p "%s%d:\n" indent rect_cnt )
+ ; print rect master information.
+ indent = " "
+ fprintf( p "%slayer: %s %s\n" indent nthelem(1 shape~>lpp) nthelem(2 shape~>lpp))
+ fprintf( p "%sbBox: [[%f, %f], [%f, %f]]\n" indent
+ nthelem(1 nthelem(1 shape~>bBox)) nthelem(2 nthelem(1 shape~>bBox))
+ nthelem(1 nthelem(2 shape~>bBox)) nthelem(2 nthelem(2 shape~>bBox))
+ );fprintf
+ )
+ );if
+ if((rect_cnt == 0) then
+ fprintf( p " {}\n" )
+ );if
+ )
+
+ ; print labels
+ indent = ""
+ if( not(cv~>shapes) then
+ fprintf( p "%slabels: {}\n" indent )
+ else
+ label_cnt = 0
+ fprintf( p "%slabels:\n" indent )
+ foreach( shape cv~>shapes
+ if( (shape~>objType == "label") then
+ label_cnt++
+ ; print entry for label
+ indent = " "
+ fprintf( p "%s%d:\n" indent label_cnt )
+ ; print label master information.
+ indent = " "
+ fprintf( p "%slabel: %s\n" indent shape~>theLabel )
+ fprintf( p "%slayer: %s %s\n" indent nthelem(1 shape~>lpp) nthelem(2 shape~>lpp))
+ fprintf( p "%sxy: [%f, %f]\n" indent nthelem(1 shape~>xy) nthelem(2 shape~>xy))
+ )
+ if( (shape~>objType == "textDisplay") then ;some labels are instantiated as text displays
+ label_cnt++
+ ; print entry for label
+ indent = " "
+ fprintf( p "%s%d:\n" indent label_cnt )
+ ; print label master information.
+ indent = " "
+ fprintf( p "%slabel: %s\n" indent shape~>owner~>name )
+ fprintf( p "%slayer: %s %s\n" indent nthelem(1 shape~>lpp) nthelem(2 shape~>lpp))
+ fprintf( p "%sxy: [%f, %f]\n" indent nthelem(1 shape~>xy) nthelem(2 shape~>xy))
+ )
+ );if
+ if((label_cnt == 0) then
+ fprintf( p " {}\n" )
+ );if
+ )
+
+ ; print instances
+ indent = ""
+ if( not(cv~>instances) then
+ fprintf( p "%sinstances: {}\n" indent )
+ else
+ inst_cnt = 0
+ fprintf( p "%sinstances:\n" indent )
+ foreach( inst cv~>instances
+ inst_cnt++
+ ; print entry for instance
+ indent = " "
+ fprintf( p "%s%s:\n" indent inst~>name )
+ ; print instance master information.
+ indent = " "
+ fprintf( p "%slib_name: %s\n" indent inst~>libName )
+ fprintf( p "%scell_name: %s\n" indent inst~>cellName )
+ fprintf( p "%sxy: [%f, %f]\n" indent nthelem(1 inst~>xy) nthelem(2 inst~>xy))
+ if( (inst~>objType == "mosaic") then
+ fprintf( p "%scols: %d\n" indent inst~>columns)
+ fprintf( p "%srows: %d\n" indent inst~>rows)
+ fprintf( p "%ssp_cols: %f\n" indent inst~>uX)
+ fprintf( p "%ssp_rows: %f\n" indent inst~>uY)
+ fprintf( p "%srotation: %s\n" indent car(inst~>tileArray))
+ else
+ fprintf( p "%srotation: %s\n" indent inst~>orient)
+ );if
+ )
+ when(inst_cnt == 0
+ fprintf( p " {}\n" )
+ )
+ )
+
+ ; close resources
+ close(p)
+ dbClose(cv)
+ )
+)
+
+; get a list of cells containing in the specficied library
+procedure( get_cell_list(lib_name file_name "tt")
+ let( (lib cellname p)
+ lib=ddGetObj(lib_name)
+ p = outfile( file_name "w" )
+ fprintf( p "%s: [" lib_name)
+ foreach( cellname lib~>cells~>name
+ fprintf( p "%s, " cellname)
+ );foreach
+ fprintf( p "] \n" )
+ ; close resources
+ close(p)
+ );let
+)
+
+; if library with lib_name does not exists, create a new
+; library with that name. Otherwise, if erase is true,
+; remove all cells in that library. Returns the library
+; database object.
+procedure( create_or_erase_library(lib_name tech_lib lib_path erase "tttg")
+ let( (lib_obj)
+ if( lib_obj = ddGetObj(lib_name nil nil nil nil "r") then
+ when( erase
+ ; delete all cells in the library
+ foreach( cell lib_obj~>cells
+ unless( ddDeleteObj(cell)
+ error("cannot delete cell %s in library %s\n" cell~>name lib_name)
+ )
+ )
+ )
+ ddReleaseObj(lib_obj)
+ 't
+ else
+ ; create library if not exist
+ when( and(lib_path (lib_path != "."))
+ lib_path = strcat(lib_path "/" lib_name)
+ )
+ lib_obj = ddCreateLib(lib_name lib_path)
+ ; attach technology file
+ techBindTechFile(lib_obj tech_lib)
+ ; close library
+ ddReleaseObj(lib_obj)
+ 't
+ )
+ )
+)
+
+; copy all template cells to the given library.
+; template list is a list of three-element lists with the format
+; '("master_lib_name" "master_cell_name" "target_cell_name")
+; any existing cellviews will be overwritten.
+procedure( copy_templates_to_library(lib_name template_list "tl")
+ let( (current remaining src_gdm targ_gdm table master_lib master_cell target_cell key cnt
+ empty_spec targ_lib_obj test_cv)
+
+ current = template_list
+ remaining = '()
+ empty_spec = gdmCreateSpecList()
+ targ_lib_obj = ddGetObj(lib_name nil nil nil nil "r")
+
+ ; ccpCopy cannot copy the same cell to multiple different cells.
+ ; because of this, we need to copy a set of unique cells at a time,
+ ; hence the while loop.
+ while( current
+ ; Create GDMSpecList used to copy all cells
+ src_gdm = gdmCreateSpecList()
+ targ_gdm = gdmCreateSpecList()
+ ; table to keep track of seen cells.
+ table = makeTable("mytable" 0)
+ ; Populate GDMSpecList
+ foreach( template_info current
+ master_lib = car(template_info)
+ master_cell = cadr(template_info)
+ target_cell = caddr(template_info)
+
+ ; check if we copied this cell on this iteration yet
+ key = list(master_lib master_cell)
+ if( table[key] == 1 then
+ ; wait for the next iteration
+ remaining = cons(template_info remaining)
+ else
+ ; purge target cellview if exist
+ when( targ_lib_obj
+ test_cv = dbFindOpenCellView(targ_lib_obj target_cell "schematic")
+ when( test_cv
+ dbPurge(test_cv)
+ )
+ test_cv = dbFindOpenCellView(targ_lib_obj target_cell "symbol")
+ when( test_cv
+ dbPurge(test_cv)
+ )
+ ; hard remove adexl state if it exists
+ test_cv = ddGetObj(lib_name target_cell "adexl")
+ when( test_cv
+ ddDeleteObj(test_cv)
+ )
+ )
+ gdmAddSpecToSpecList(gdmCreateSpec(master_lib master_cell nil nil "CDBA") src_gdm)
+ gdmAddSpecToSpecList(gdmCreateSpec(lib_name target_cell nil nil "CDBA") targ_gdm)
+ table[key] = 1
+ )
+ )
+ ; Perform copy
+ ccpCopy(src_gdm targ_gdm 't 'CCP_EXPAND_COMANAGED nil nil "" "" 'CCP_UPDATE_FROM_LIBLIST empty_spec)
+
+ ; set current and remaining
+ current = remaining
+ remaining = '()
+
+ ; debug printing
+ ; printstruct(table)
+ )
+ )
+ 't
+)
+
+; returns a unique terminal name in the given cellview.
+; name_base is the suffix of the returned terminal name.
+procedure( get_unique_term_name( cvid name_base "gt")
+ let( (cnt new_term_name)
+ cnt = 1
+ sprintf( new_term_name "temp%d_%s" cnt name_base )
+ while( dbFindTermByName(cvid new_term_name)
+ cnt = cnt + 1
+ sprintf( new_term_name "temp%d_%s" cnt name_base )
+ )
+ new_term_name
+ )
+)
+
+; helper method to open pin master
+procedure( open_pin_master(cvid pin_cv_info)
+ let( (pin_master mpin_lib mpin_cell mpin_view)
+ mpin_lib = car(pin_cv_info)
+ mpin_cell = cadr(pin_cv_info)
+ mpin_view = caddr(pin_cv_info)
+ unless( pin_master = dbOpenCellViewByType( mpin_lib mpin_cell mpin_view nil "r" )
+ dbClose(cvid)
+ error( "Cannot find pin master cellview: %s__%s (%s)" mpin_lib mpin_cell mpin_view)
+ )
+ pin_master
+ )
+)
+
+; update pins of a schematic
+; cvid is the opened cellview id of the schematic. It must be in append mode.
+; pin_map is a list of two-element lists of old pin names and new pin names, respectively.
+; ipin, opin, and iopin are lists of three strings for input/output/inout pins, respectively.
+; first element is the pin master library, second element is the pin mater cell, and third element
+; is the pin master cellview.
+procedure( update_schematic_pin(cvid pin_map new_pins ipin opin iopin "glllll")
+ let( (snap_dist cur_term_name new_term_name term pin pin_orient pin_location pin_direction
+ temp_new_term_name pin_master ipin_master opin_master iopin_master
+ pin_xy_info npin_xl npin_yl npin_xr npin_yr npin_name npin_type)
+
+ snap_dist = schGetEnv("schSnapSpacing")
+
+ ; open pin masters
+ ipin_master = open_pin_master(cvid ipin)
+ opin_master = open_pin_master(cvid opin)
+ iopin_master = open_pin_master(cvid iopin)
+ pin_master = nil
+
+ ; get new pin locations before any pin addition/substraction.
+ pin_xy_info = get_new_pin_locations(cvid snap_dist)
+
+ ; rename or remove pins
+ foreach( p pin_map
+ cur_term_name = car(p)
+ new_term_name = cadr(p)
+ ; printf("%s %s\n" cur_term_name new_term_name)
+ when(cur_term_name != new_term_name
+ unless( term = dbFindTermByName(cvid cur_term_name)
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Terminal %s not found." cur_term_name )
+ )
+ when( term~>pinCount != 1
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Terminal %s does not have exactly one pin." cur_term_name)
+ )
+ pin = car(term~>pins)
+
+ if( strlen(new_term_name) != 0 then
+ ; rename pin
+ pin_orient = pin~>fig~>orient
+ pin_location = pin~>fig~>xy
+ pin_direction = term~>direction
+
+ ; create new pin figure
+ cond( ( pin_direction == "input" pin_master = ipin_master)
+ ( pin_direction == "output" pin_master = opin_master)
+ ( 't pin_master = iopin_master)
+ )
+
+ ; delete pin
+ unless( dbDeleteObject(pin~>fig)
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Cannot delete pin for terminal %s" cur_term_name )
+ )
+
+ ; create a temporary terminal with a unique name so we can change the number of bits without getting an error
+ temp_new_term_name = get_unique_term_name(cvid new_term_name)
+ schCreatePin(cvid pin_master temp_new_term_name pin_direction nil pin_location "R0" )
+
+ ; now rename the new terminal
+ new_term = dbFindTermByName(cvid temp_new_term_name )
+ new_term~>name = new_term_name
+ else
+ ; remove pin
+ dbDeleteObject(pin~>fig)
+ )
+ )
+ )
+
+ ; add new pins
+ when( new_pins
+ ; get location for new pins
+ npin_xl = xCoord(car(pin_xy_info))
+ npin_yl = yCoord(car(pin_xy_info)) - 2 * snap_dist
+ npin_xr = xCoord(cadr(pin_xy_info))
+ npin_yr = yCoord(cadr(pin_xy_info)) - 2 * snap_dist
+ foreach( npin_info new_pins
+ npin_name = car(npin_info)
+ npin_type = cadr(npin_info)
+
+ ; verify that this pin does not exist yet
+ when(dbFindTermByName(cvid npin_name)
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Terminal %s already exists" npin_name)
+ )
+
+ ; get pin location based on pin type
+ cond( ( npin_type == "input" pin_master = ipin_master pin_location = npin_xl:npin_yl npin_yl = npin_yl - 2 * snap_dist)
+ ( npin_type == "output" pin_master = opin_master pin_location = npin_xr:npin_yr npin_yr = npin_yr - 2 * snap_dist)
+ ( 't pin_master = iopin_master pin_location = npin_xl:npin_yl npin_yl = npin_yl - 2 * snap_dist)
+ )
+ ; create pin
+ schCreatePin(cvid pin_master npin_name npin_type nil pin_location "R0")
+ )
+ )
+
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ )
+)
+
+; find X and Y coordinates to insert new symbol pins
+procedure( get_new_pin_locations(cvid snap_dist)
+ let( (pin bbox pin_x pin_y xl xr yl yr)
+ ; find the left-most/right-most pin X coordinates, and find the lowst
+ ; Y coordinate of the left-most/right-most pins
+ xl = nil
+ xr = nil
+ yl = nil
+ yr = nil
+ foreach( term cvid->terminals
+ when( term~>pinCount != 1
+ dbClose(cvid)
+ error( "Terminal %s does not have exactly one pin" term~>name)
+ )
+ pin = car(term~>pins)
+ bbox = pin~>fig~>bBox
+ pin_x = round2((xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0 / snap_dist)
+ pin_y = round2((yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0 / snap_dist)
+ if( xl == nil then
+ xl = pin_x
+ xr = pin_x
+ yl = pin_y
+ yr = pin_y
+ else
+ cond( (pin_x < xl xl = pin_x yl = pin_y)
+ (pin_x == xl yl = min(yl pin_y)))
+ cond( (pin_x > xr xr = pin_x yr = pin_y)
+ (pin_x == xr yr = min(yr pin_y)))
+ )
+ )
+ when(xl == nil
+ ; default values if schematic has no terminals
+ ; this usually means you have a testbench schematic
+ xl = 0
+ yl = 0
+ xr = 10
+ yr = 0
+ )
+ list((xl * snap_dist):(yl * snap_dist) (xr * snap_dist):(yr * snap_dist))
+ )
+)
+
+; update pins of a symbol
+; pin_map is a list of two-element lists, first element is old pin name, second element is new pin name.
+; sympin is a 3-element list of strings. first element is the pin master library,
+; second element is the pin mater cell, and third element is the pin master cellview.
+; simulators is a list of simulator names for which termOrder should be updated.
+; Usually simulators = '("auLvs" "auCdl" "spectre" "hspiceD")
+procedure( update_symbol_pin(lib_name cell_name pin_map new_pins sympin simulators "ttllll")
+ let( (snap_dist cvid pin_master cur_term_name new_term_name term pin bbox pin_x pin_y pin_location pin_direction
+ label_location label_rel_location temp_new_term_name new_term new_port_order cell_obj bc
+ mpin_lib mpin_cell mpin_view pin_xy_info npin_xl npin_yl npin_xr npin_yr npin_name npin_type
+ modified_pins)
+
+ snap_dist = schGetEnv("schSnapSpacing")
+ modified_pins = nil
+ mpin_lib = car(sympin)
+ mpin_cell = cadr(sympin)
+ mpin_view = caddr(sympin)
+ unless( pin_master = dbOpenCellViewByType(mpin_lib mpin_cell mpin_view nil "r")
+ error("Cannot open symbol pin cellview %s__%s (%s)." mpin_lib mpin_cell mpin_view)
+ )
+ unless( cvid = dbOpenCellViewByType(lib_name cell_name "symbol" nil "a")
+ dbClose(pin_master)
+ error("Cannot open cellview %s__%s (symbol)." lib_name cell_name)
+ )
+
+ ; get new pin locations before any pin addition/substraction.
+ pin_xy_info = get_new_pin_locations(cvid snap_dist)
+
+ ; modify existing pins
+ new_port_order = tconc(nil "")
+ foreach( p pin_map
+ cur_term_name = car(p)
+ new_term_name = cadr(p)
+ new_port_order = tconc(new_port_order new_term_name)
+ when( cur_term_name != new_term_name
+ modified_pins = 't
+ ; printf("%s %s\n" cur_term_name new_term_name)
+ unless( term = dbFindTermByName(cvid cur_term_name)
+ dbClose(pin_master)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error( "Terminal %s not found." cur_term_name )
+ )
+ when( term~>pinCount != 1
+ dbClose(pin_master)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error( "Terminal %s does not have exactly one pin." cur_term_name)
+ )
+ pin = car(term~>pins)
+
+ if( strlen(new_term_name) != 0 then
+ ; rename pin
+ bbox = pin~>fig~>bBox
+ pin_x = (xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0
+ pin_y = (yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0
+ pin_location = round2(pin_x / snap_dist) * snap_dist:round2(pin_y / snap_dist) * snap_dist
+ pin_direction = term~>direction
+
+ ; change label
+ prog( (label_orientation label_font label_font_size label_type label_text)
+ foreach( label pin~>fig~>children
+ when( label~>objType == "label"
+ label_location = label~>xy
+ label_orientation = label~>orient
+ label_rel_location = label~>justify
+ label_font = label~>font
+ label_font_size = label~>height
+ label_type = label~>labelType
+ label_text = label~>theLabel
+ when( label_text == cur_term_name
+ schCreateSymbolLabel(cvid label_location "pin label" new_term_name label_rel_location
+ label_orientation label_font label_font_size label_type)
+ return('t)
+ )
+ )
+ )
+ return(nil)
+ )
+
+ dbDeleteObject(pin~>fig)
+ dbDeleteObject(pin)
+
+ ;create a temporary terminal with a unique name so we can change the number of bits without getting an error
+ temp_new_term_name = get_unique_term_name(cvid new_term_name)
+ schCreateSymbolPin(cvid pin_master temp_new_term_name pin_direction pin_location "R0" )
+
+ new_term = dbFindTermByName(cvid temp_new_term_name )
+ dbDeleteObject(term)
+ new_term~>name = new_term_name
+ else
+ ; remove pin
+ dbDeleteObject(pin~>fig)
+ dbDeleteObject(pin)
+ dbDeleteObject(term)
+ )
+ )
+ )
+
+ ; add new pins
+ when( new_pins
+ modified_pins = 't
+ ; get location for new pins
+ npin_xl = xCoord(car(pin_xy_info))
+ npin_yl = yCoord(car(pin_xy_info)) - 2 * snap_dist
+ npin_xr = xCoord(cadr(pin_xy_info))
+ npin_yr = yCoord(cadr(pin_xy_info)) - 2 * snap_dist
+ foreach( npin_info new_pins
+ npin_name = car(npin_info)
+ npin_type = cadr(npin_info)
+
+ ; verify that this pin does not exist yet
+ when(dbFindTermByName(cvid npin_name)
+ dbClose(pin_master)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error( "Terminal %s already exists" npin_name)
+ )
+
+ ; update pin order
+ new_port_order = tconc(new_port_order npin_name)
+
+ ; get pin location based on pin type
+ if( equal(npin_type "output") then
+ label_location = npin_xr:npin_yr
+ label_rel_location = "lowerLeft"
+ npin_yr = npin_yr - 2 * snap_dist
+ else
+ label_location = npin_xl:npin_yl
+ label_rel_location = "lowerRight"
+ npin_yl = npin_yl - 2 * snap_dist
+ )
+
+ ; create label and pin
+ schCreateSymbolLabel(cvid label_location "pin label" npin_name label_rel_location
+ "R0" "stick" snap_dist "normalLabel")
+ schCreateSymbolPin(cvid pin_master npin_name npin_type label_location "R0")
+ )
+ )
+
+ dbClose(pin_master)
+
+ when( modified_pins
+ ; update pin order
+ new_port_order = cdar(new_port_order)
+ schEditPinOrder(cvid new_port_order 't)
+ dbSave(cvid)
+
+ ; update termOrder for each simulators
+ cell_obj = ddGetObj(lib_name cell_name nil nil nil "r")
+ unless( bc = cdfGetBaseCellCDF(cell_obj)
+ ddReleaseObj(cell_obj)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error("Cannot find CDF parameters for %s__%s. Delete generated cell and try again" lib_name cell_name)
+ )
+ foreach( simu simulators
+ get(bc->simInfo simu)->termOrder = new_port_order
+ )
+ unless( cdfSaveCDF(bc)
+ ddReleaseObj(cell_obj)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error("Cannot save termOrder CDF for %s__%s." lib_name cell_name)
+ )
+ ddReleaseObj(cell_obj)
+ )
+ ; opening schematic will open all symbols inside that schematic.
+ ; as the result, dbClose may not close this symbol view. To get rid
+ ; of edit lock, we use dbReopen so even if dbClose fails the edit lock
+ ; will be gone.
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ )
+)
+
+; record an association list from pin name to pin location in units of snap distances.
+; the pin name is sorted alphabetically so we can use the equal function to test
+; for equality.
+procedure( get_instance_pin_info(inst "g")
+ let( (snap_dist term_name pin_fig xval yval inst_term_xy ans)
+ ans = nil
+ snap_dist = schGetEnv("schSnapSpacing")
+ foreach( term inst->master->terminals
+ term_name = term~>name
+ ; get terminal coordinate in symbol
+ pin_fig = car(term~>pins)~>fig
+ bbox = pin_fig~>bBox
+ xval = (xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0
+ yval = (yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0
+ ; quantize to schematic snap spacing to avoid floating point rounding error.
+ inst_term_xy = round2(xval / snap_dist):round2(yval / snap_dist)
+ ans = cons(list(term_name inst_term_xy) ans)
+ )
+ sortcar(ans nil)
+ )
+)
+
+; get all the wire objects connected to terminals of the given instance.
+; we assume each terminal has exactly one pin with 1 wire connected, with a
+; single label on the wire. The wire doesn't connect to anything else.
+; returns an association list from terminal name to a list of net name and wire figure object.
+procedure( get_instance_terminal_wires(sch inst "gg")
+ let( (snap_dist term_name pin_fig xval yval inst_term_xy net_name ans net_map)
+ ans = nil
+ net_map = nil
+ snap_dist = schGetEnv("schSnapSpacing")
+ foreach( inst_term inst~>instTerms
+ term_name = inst_term~>name
+ ; printf("terminal name: %s\n" term_name)
+ when( inst_term~>term~>pinCount != 1
+ dbClose(sch)
+ error("Terminal %s must have exactly one pin." term_name)
+ )
+ unless( pin_fig = car(inst_term~>term~>pins)~>fig
+ dbClose(sch)
+ error("Cannot find pin figure for terminal %s" term_name)
+ )
+ ; get instance terminal coordinate in schematic
+ bbox = dbTransformBBox(pin_fig~>bBox inst~>transform)
+ ; printf("terminal pin fig bbox: %A\n" bbox)
+ xval = xCoord(car(bbox)) + (xCoord(cadr(bbox)) - xCoord(car(bbox))) / 2.0
+ yval = yCoord(car(bbox)) + (yCoord(cadr(bbox)) - yCoord(car(bbox))) / 2.0
+ ; quantize to schematic snap spacing to avoid floating point rounding error.
+ inst_term_xy = round2(xval / snap_dist) * snap_dist:round2(yval / snap_dist) * snap_dist
+ net_name = inst_term~>net~>name
+ net_map = cons(list(term_name net_name) net_map)
+ ; printf("terminal pin x/y: %A\n" inst_term_xy)
+ foreach( fig inst_term~>net~>figs
+ points = fig~>points
+ ; printf("figure points: %A\n" points)
+ when( member(inst_term_xy points)
+ when( length(points) != 2
+ error("pin for terminal %s must be connected to a single wire with label" term_name)
+ )
+ ; printf("adding figure for terminal %s\n" term_name)
+ ans = cons(list(term_name fig) ans)
+ )
+ )
+ )
+ list(ans net_map)
+ )
+)
+
+; Modify the instance terminal connections of the given instance.
+; we assume each terminal to modify has at most 1 wire connected,
+; if it exists, the wire connects to nothing else, and it has a label.
+; In this way, this function just have to change the label text.
+;
+; if wire_list is not empty, then that means each terminal has exactly one
+; wire connected. This function will update the label on the wires according
+; to term_mapping.
+;
+; if wire_list is empty, then that means no wires are connected to terminals.
+; this function will attach labels directly to each terminal. The labels are
+; determined first from term_mapping, then from net_map
+;
+; sch is the schematic database object. Must be opened in append/write mode.
+; inst is the instance object to modify.
+; term_mapping is a list of key-value pairs, where keys are old net names,
+; and values are new net names.
+procedure( modify_instance_terminal(sch inst wire_list net_map term_mapping "gglll")
+ let( (snap_dist key_val old_name new_name fig points mid_point new_wire inst_term inst_pin
+ bbox xval yval term_map_final db_term)
+ ; get schematic snap distance spacing.
+ snap_dist = schGetEnv("schSnapSpacing")
+ if( wire_list then
+ foreach( wire_info wire_list
+ old_name = car(wire_info)
+ when(key_val = assoc(old_name term_mapping)
+ new_name = cadr(key_val)
+ fig = cadr(wire_info)
+ points = fig~>points
+ mid_point = foreach(mapcar (c1 c2) car(points) cadr(points) (c1 + c2) / 2.0)
+ ; delete old wire, then add wire back with new label.
+ schDelete(fig)
+ new_wire = car(schCreateWire(sch "draw" "full" points snap_dist snap_dist 0))
+ schCreateWireLabel(sch new_wire mid_point new_name "lowerCenter" "R0" "stick" 0.0625 nil)
+ )
+ )
+ 't
+ else
+ ; combine net_map and term_mapping
+ term_map_final = copy(term_mapping)
+ foreach( net_info net_map
+ old_name = car(net_info)
+ unless( assoc(old_name term_map_final)
+ ; add net mapping only if it's not in term_mapping
+ term_map_final = cons(net_info term_map_final)
+ )
+ )
+ foreach( net_info term_map_final
+ old_name = car(net_info)
+ new_name = cadr(net_info)
+
+ when(db_term = dbFindTermByName(inst->master old_name)
+ ; only create terminal that's present in the current master
+ inst_term = dbCreateInstTerm(nil inst db_term)
+ inst_pin = car(inst_term~>term~>pins)~>fig
+ bbox = dbTransformBBox(inst_pin~>bBox inst~>transform)
+ xval = (xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0
+ yval = (yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0
+ xval = round2(xval / snap_dist) * snap_dist
+ yval = round2(yval / snap_dist) * snap_dist
+
+ new_wire = car(schCreateWire(sch "draw" "full" list(xval-snap_dist:yval-snap_dist xval:yval)
+ snap_dist snap_dist 0))
+ schCreateWireLabel(sch new_wire xval:yval new_name "lowerCenter" "R0" "stick" 0.0625 nil)
+ )
+ )
+ 't
+ )
+ )
+)
+
+; given a copied template cell, modify it to a concrete schematic.
+procedure( check_and_save_cell(lib_name cell_name view_name "ttt")
+ let( (cv errs )
+ ; attempt to open schematic in append mode
+ unless( cv = dbOpenCellViewByType(lib_name cell_name view_name nil "a")
+ error("Cannot open %s__%s (%s) in append mode." lib_name cell_name view_name)
+ )
+ ; check and save schematic
+ check_and_save_schematic(sch)
+ )
+)
+
+; Perform check-and-save on the given schematic database object, then close it.
+procedure( check_and_save_schematic(sch "g")
+ let( (errs)
+ schSetEnv( "checkHierSave" 't)
+ schSetEnv( "saveAction" "Save")
+ errs = schCheckHier(sch "schematic symbol" "")
+ foreach( ex errs
+ warn( "%s__%s (%s) has %d errors." car(ex)~>lib~>name car(ex)~>cellName car(ex)~>viewName cadr(ex))
+ )
+ ; make sure all edit locks are gone by reopening in read mode
+ dbReopen(sch, "r")
+ dbClose(sch)
+ )
+)
+
+
+; modify a schematic cell. Used to convert copied template cells into concrete instantiation.
+;
+; inst_list is an association list of (inst_name, rinst_list) pairs. Where:
+;
+; inst_name : name of the instance in the template cell.
+; rinst_list : a list of rinsts, which are instances to replace the original instance by.
+; If this list is empty, the original instance should be deleted. If the list
+; has more than one element, we should array the original instance.
+;
+; Each rinst is a disembodied property lists, with the properties:
+;
+; rinst->name : the name of this rinst.
+; rinst->lib_name : the instance master library.
+; rinst->cell_name : the instance master cell.
+; rinst->params : an association list of the CDF params of this rinst. The values are always string.
+; rinst->term_mapping : an association list of the modified terminal connections of this rinst.
+; if no connections are changed, this list should be empty.
+;
+; (You can read more about disembodied property lists and association list in the skill
+; language user guide).
+;
+; For each instance, this function does the following:
+; 1. Find the instance with the given name.
+; 2. If rinst_list is nil, delete this instance.
+; 3. If rinst_list has exactly one element:
+; i. rename the instance name to rinst's name.
+; ii. change the instance master of the instance.
+; iii. change the CDF parameters (this should only happen with BAG primitives).
+; iv. change the port connections of this instance.
+; 4. If rinst_list has more than one element, for each additional element,
+; copy the original instance and perform step 3 on that instance.
+;
+; This procedure allows one to delete or array any instances in the schematic template.
+procedure( modify_schematic_content(sch_cv inst_list "gl")
+ let( (inst_obj inst_name rinst_list rinst_len cur_inst wire_list net_map par_val xl xr transform
+ snap_dist errmsg pin_info tmp_result)
+ snap_dist = schGetEnv("schSnapSpacing")
+ foreach( inst inst_list
+ inst_name = car(inst)
+ unless( inst_obj = dbFindAnyInstByName(sch_cv inst_name)
+ dbClose(sch_cv)
+ error( "Cannot find instance %s" inst_name )
+ )
+ rinst_list = cadr(inst)
+ rinst_len = length(rinst_list)
+ last_inst = nil
+ if( rinst_len == 0 then
+ ; no instances to replace by, delete.
+ wire_list = car(get_instance_terminal_wires(sch_cv inst_obj))
+ ; delete wires connected to instance
+ foreach( wire_info wire_list
+ schDelete(cadr(wire_info))
+ )
+ ; delete instance
+ dbDeleteObject(inst_obj)
+ else
+ cur_inst = nil
+ pin_info = nil
+ foreach( rinst rinst_list
+ if( !cur_inst then
+ cur_inst = inst_obj
+ ; printf("inst %s lib = %s, cell = %s\n" inst_name inst_obj->master->libName inst_obj->master->cellName)
+ tmp_result = get_instance_terminal_wires(sch_cv cur_inst)
+ net_map = cadr(tmp_result)
+ wire_list = car(tmp_result)
+ pin_info = get_instance_pin_info(cur_inst)
+ ; printf("%s wire_list: %A\n" inst_name wire_list)
+ ; figure out bounding box for potential future array
+ ; printf("instance %s bbox: %A\n" cur_inst~>name cur_inst~>bBox)
+ xl = xCoord(car(cur_inst~>bBox))
+ xr = xCoord(cadr(cur_inst~>bBox))
+ foreach( wire_info wire_list
+ ; printf("instance %s wire: %A %A\n" cur_inst~>name xCoord(car(cadr(wire_info)~>bBox)) xCoord(cadr(cadr(wire_info)~>bBox)))
+ xl = min(xl xCoord(car(cadr(wire_info)~>bBox)))
+ xr = max(xr xCoord(cadr(cadr(wire_info)~>bBox)))
+ )
+ transform = list(round2((xr - xl + snap_dist) / snap_dist) * snap_dist:0 "R0" 1.0)
+ ; printf("instance %s transform: %A\n" cur_inst~>name transform)
+ else
+ ; more than 1 rinst, copy cur_inst, do not copy wires
+ wire_list = nil
+ ; copy instance
+ cur_inst = dbCopyFig(cur_inst nil transform)
+ )
+ ; change instance name and master
+ when(cur_inst->name != rinst->name
+ cur_inst->name = rinst->name
+ )
+ schReplaceProperty(list(cur_inst) "master" sprintf(nil "%s %s %s" rinst->lib_name
+ rinst->cell_name cur_inst->viewName))
+ ; set parameters
+ foreach( cdf_par cdfGetInstCDF(cur_inst)~>parameters
+ par_val = cadr(assoc(cdf_par->name rinst->params))
+ ; change CDF parameter value only if specified in given parameters
+ when( par_val != nil
+ cdf_par->value = par_val
+ )
+ )
+ when( wire_list
+ ; if wire_list is not empty, check that the pins match. If so, keep wires around,
+ ; otherwise, delete wires
+ unless( equal(pin_info get_instance_pin_info(cur_inst))
+ ; delete wires connected to instance
+ foreach( wire_info wire_list
+ schDelete(cadr(wire_info))
+ )
+ wire_list = nil
+ )
+ )
+ ; modify connections, keeping old wires around
+ ; printf("instance %s wire_list: %A net_map: %A term_map: %A\n" cur_inst~>name wire_list net_map rinst->term_mapping)
+ modify_instance_terminal(sch_cv cur_inst wire_list net_map rinst->term_mapping)
+ )
+ )
+ )
+ )
+)
+
+; given a copied template cell, modify it to a concrete schematic.
+procedure( convert_template_cells(lib_name cell_name pin_map new_pins inst_list sympin ipin opin iopin simulators)
+ let( (sym_cv sch)
+ ; update symbol view first.
+ if( sym_cv = dbOpenCellViewByType(lib_name cell_name "symbol" nil "r") then
+ printf("*INFO* Updating %s__%s symbol pins.\n" lib_name cell_name)
+ update_symbol_pin(lib_name cell_name pin_map new_pins sympin simulators)
+ else
+ warn("Did not find symbol for %s__%s. Skipping. Is it testbench?" lib_name cell_name)
+ )
+
+ ; attempt to open schematic in append mode
+ unless( sch = dbOpenCellViewByType(lib_name cell_name "schematic" nil "a")
+ error("Cannot open %s__%s (schematic) in append mode." lib_name cell_name)
+ )
+ ; update schematic content
+ printf("*INFO* Updating %s__%s instances and connections.\n" lib_name cell_name)
+ modify_schematic_content(sch inst_list)
+ ; update schematic pins
+ printf("*INFO* Updating %s__%s schematic pins.\n" lib_name cell_name)
+ update_schematic_pin(sch pin_map new_pins ipin opin iopin)
+ check_and_save_schematic(sch)
+ )
+)
+
+; create concrete schematics
+procedure( create_concrete_schematic( lib_name tech_lib lib_path temp_file change_file
+ sympin ipin opin iopin simulators copy "tttttlllllg" )
+ let( (template_list change_list cell_name pin_map inst_list)
+ printf("*INFO* Reading template and change list from file\n")
+ template_list = parse_data_from_file( temp_file )
+ change_list = parse_data_from_file( change_file )
+ when( copy
+ printf("*INFO* Creating library: %s\n" lib_name)
+ create_or_erase_library( lib_name tech_lib lib_path nil )
+ printf("*INFO* Copying templates to library: %s\n" lib_name)
+ copy_templates_to_library( lib_name template_list )
+ )
+ foreach( change change_list
+ cell_name = change->name
+ pin_map = change->pin_map
+ new_pins = change->new_pins
+ inst_list = change->inst_list
+ printf("*INFO* Updating cell %s__%s\n" lib_name cell_name)
+ convert_template_cells( lib_name cell_name pin_map new_pins inst_list
+ sympin ipin opin iopin simulators )
+ )
+ 't
+ )
+)
+
+; create a new layout view then instantiate a single pcell instance.
+; this method also copy all the labels in the pcell top level. In this way LVS/PEX will
+; work correctly.
+; params is a list of (variable_name type_string value) lists.
+; pin_mapping is a list of (old_pin new_pin) lists.
+procedure( create_layout_with_pcell(lib_name cell_name view_name inst_lib inst_cell params_f pin_mapping_f "ttttttt")
+ let( (lay_cv inst_master inst inst_shapes label_location label_orientation label_lpp
+ label_just label_font label_height label_type label_text params pin_mapping)
+ unless( lay_cv = dbOpenCellViewByType(lib_name cell_name view_name "maskLayout" "w")
+ error("Cannot open cellview %s__%s (%s)." lib_name cell_name view_name)
+ )
+ unless( inst_master = dbOpenCellViewByType(inst_lib inst_cell "layout" "maskLayout" "r")
+ dbClose(lay_cv)
+ error("Cannot open cellview %s__%s (layout)." inst_lib inst_cell)
+ )
+
+ params = parse_data_from_file(params_f)
+ pin_mapping = parse_data_from_file(pin_mapping_f)
+
+ inst = dbCreateParamInst(lay_cv inst_master "XTOP" '(0 0) "R0" 1 params)
+ inst_shapes = inst~>master~>shapes
+
+ foreach(shape inst_shapes
+ when( shape->objType == "label"
+ label_location = shape~>xy
+ label_orientation = shape~>orient
+ label_lpp = shape~>lpp
+ label_just = shape~>justify
+ label_font = shape~>font
+ label_height = shape~>height
+ label_type = shape~>labelType
+ label_text = shape~>theLabel
+ when( cadr(assoc(label_text pin_mapping))
+ label_text = cadr(assoc(label_text pin_mapping))
+ )
+ dbCreateLabel(lay_cv label_lpp label_location label_text label_just label_orientation label_font label_height )
+ )
+ )
+
+ dbClose(inst_master)
+ dbSave(lay_cv)
+ dbClose(lay_cv)
+ )
+)
+
+; helper for creating a path segment
+procedure( create_path_seg_helper(cv lay p0 p1 width start_s end_s)
+ let( (diag_ext info_list bext eext)
+ if( and(car(p0) != car(p1) cadr(p0) != cadr(p1)) then
+ diag_ext = width / 2
+ width = width * sqrt(2)
+ else
+ diag_ext = width * sqrt(2) / 2
+ )
+
+ bext = 0
+ eext = 0
+ when( start_s == "round"
+ bext = width / 2
+ start_s = "custom"
+ )
+ when( end_s == "round"
+ eext = width / 2
+ end_s = "custom"
+ )
+ info_list = list(bext eext list(diag_ext diag_ext width/2 diag_ext diag_ext width/2))
+ dbCreatePathSeg(cv lay p0 p1 width start_s end_s info_list)
+ )
+)
+
+
+; helper for creating a path
+procedure( create_path_helper( cv path )
+ let( (lay width points estyle jstyle p0 p1 plen idx start_s end_s)
+ lay = path->layer
+ width = path->width
+ points = path->points
+ estyle = path->end_style
+ jstyle = path->join_style
+ p0 = nil
+ plen = length(points)
+ idx = 0
+ foreach( cur_point points
+ p1 = cur_point
+ when( idx > 0
+ if( idx == 1 then
+ start_s = estyle
+ else
+ start_s = jstyle
+ )
+ if( idx == plen - 1 then
+ end_s = estyle
+ else
+ end_s = jstyle
+ )
+ create_path_seg_helper(cv lay p0 p1 width start_s end_s)
+ )
+ p0 = p1
+ idx = idx + 1
+ )
+ )
+)
+
+
+; helper for creating a single layout view
+procedure( create_layout_helper( cv tech_file inst_list rect_list via_list pin_list path_list
+ blockage_list boundary_list polygon_list "ggllllllll" )
+ let( (inst_cv obj via_def via_enc1 via_enc2 enc1 enc2 off1 off2 via_params make_pin_rect
+ pin_bb pin_w pin_h pin_xc pin_yc pin_orient label_h param_order orig_shape arr_dx arr_dy)
+
+ ; create instances
+ foreach( inst inst_list
+ if( inst_cv = dbOpenCellViewByType( inst->lib inst->cell inst->view nil "r" ) then
+
+ if( and( inst->num_rows==1 inst->num_cols==1) then
+ if( inst->params != nil then
+ ; create pcell instance
+ obj = dbCreateParamInst(cv inst_cv inst->name inst->loc inst->orient 1 inst->params)
+ ; execute parameter callbacks
+ when( obj
+ if( inst->param_order != nil then
+ param_order = inst->param_order
+ else
+ param_order = mapcar( lambda( (x) car(x) ) inst->params )
+ )
+ CCSinvokeCdfCallbacks(obj ?order param_order)
+ )
+ else
+ obj = dbCreateInst(cv inst_cv inst->name inst->loc inst->orient)
+ )
+ else
+ if( inst->params != nil then
+ ; create pcell mosaic
+ obj = dbCreateParamSimpleMosaic(cv inst_cv inst->name inst->loc inst->orient
+ inst->num_rows inst->num_cols inst->sp_rows inst->sp_cols
+ inst->params)
+ ; execute parameter callbacks
+ when( obj
+ if( inst->param_order != nil then
+ param_order = inst->param_order
+ else
+ param_order = mapcar( lambda( (x) car(x) ) inst->params )
+ )
+ CCSinvokeCdfCallbacks(obj ?order param_order)
+ )
+ else
+ obj = dbCreateSimpleMosaic(cv inst_cv inst->name inst->loc inst->orient
+ inst->num_rows inst->num_cols inst->sp_rows inst->sp_cols)
+ )
+ )
+ unless( obj
+ warn("Error creating instance %s of %s__%s (%s). Skipping." inst->name inst->lib inst->cell inst->view)
+ )
+
+ else
+ warn("Cannot find instance %s__%s (%s). Skipping." inst->lib inst->cell inst->view)
+ )
+ )
+
+ ; create rectangles
+ foreach( rect rect_list
+ orig_shape = dbCreateRect(cv rect->layer rect->bbox)
+ if( not(orig_shape) then
+ warn("Error creating rectangle of layer %A. Skipping." rect->layer)
+ else
+ when( rect->arr_nx != nil
+ for(icol 2 rect->arr_nx
+ arr_dx = rect->arr_spx * (icol - 1)
+ for(irow 1 rect->arr_ny
+ arr_dy = rect->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(arr_dx:arr_dy "R0" 1))
+ )
+ )
+ for(irow 2 rect->arr_ny
+ arr_dy = rect->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(0:arr_dy "R0" 1))
+ )
+ )
+ )
+ )
+
+ ; create paths
+ foreach( path path_list
+ create_path_helper(cv path)
+ )
+
+ ; create polygons
+ foreach( poly polygon_list
+ dbCreatePolygon(cv poly->layer poly->points)
+ )
+
+ ; create blockages
+ foreach( block blockage_list
+ if( block->btype == "placement" then
+ dbCreateAreaBlockage(cv block->points)
+ else
+ dbCreateLayerBlockage(cv block->layer block->btype block->points)
+ )
+ )
+
+ ; create boundaries
+ foreach( bound boundary_list
+ cond( (bound->btype == "PR"
+ dbCreatePRBoundary(cv bound->points))
+ (bound->btype == "snap"
+ dbCreateSnapBoundary(cv bound->points))
+ (bound->btype == "area"
+ dbCreateAreaBoundary(cv bound->points))
+ ('t
+ warn("Unknown boundary type %s. Skipping." bound->btype))
+ )
+ )
+
+ ; create vias
+ foreach( via via_list
+ if( via_def = techFindViaDefByName(tech_file via->id) then
+ ; compute via parameter list
+ via_enc1 = via->enc1
+ via_enc2 = via->enc2
+ enc1 = list( (car(via_enc1) + cadr(via_enc1)) / 2.0
+ (caddr(via_enc1) + cadr(cddr(via_enc1))) / 2.0 )
+ enc2 = list( (car(via_enc2) + cadr(via_enc2)) / 2.0
+ (caddr(via_enc2) + cadr(cddr(via_enc2))) / 2.0 )
+ off1 = list( (cadr(via_enc1) - car(via_enc1)) / 2.0
+ (caddr(via_enc1) - cadr(cddr(via_enc1))) / 2.0 )
+ off2 = list( (cadr(via_enc2) - car(via_enc2)) / 2.0
+ (caddr(via_enc2) - cadr(cddr(via_enc2))) / 2.0 )
+
+ via_params = list( list("cutRows" via->num_rows)
+ list("cutColumns" via->num_cols)
+ list("cutSpacing" list(via->sp_cols via->sp_rows))
+ list("layer1Enc" enc1)
+ list("layer2Enc" enc2)
+ list("layer1Offset" off1)
+ list("layer2Offset" off2) )
+
+ ; if via width and height given, add to via_params
+ when( via->cut_width != nil
+ via_params = cons( list("cutWidth" via->cut_width) via_params)
+ )
+ when( via->cut_height != nil
+ via_params = cons( list("cutHeight" via->cut_height) via_params)
+ )
+
+ ; create actual via
+ orig_shape = dbCreateVia(cv via_def via->loc via->orient via_params)
+ if( not(orig_shape) then
+ warn("Error creating via %s. Skipping." via->id)
+ else
+ when( via->arr_nx != nil
+ for(icol 2 via->arr_nx
+ arr_dx = via->arr_spx * (icol - 1)
+ for(irow 1 via->arr_ny
+ arr_dy = via->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(arr_dx:arr_dy "R0" 1))
+ )
+ )
+ for(irow 2 via->arr_ny
+ arr_dy = via->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(0:arr_dy "R0" 1))
+ )
+ )
+ )
+ else
+ warn("Via %s not found. Skipping." via->id)
+ )
+ )
+
+ ; create pins
+ foreach( pin pin_list
+ pin_bb = pin->bbox
+ pin_w = caadr(pin_bb) - caar(pin_bb)
+ pin_h = cadr(cadr(pin_bb)) - cadr(car(pin_bb))
+ pin_xc = (caar(pin_bb) + caadr(pin_bb)) / 2.0
+ pin_yc = (cadr(car(pin_bb)) + cadr(cadr(pin_bb))) / 2.0
+
+ if( pin_w >= pin_h then
+ pin_orient = "R0"
+ label_h = pin_h
+ else
+ pin_orient = "R90"
+ label_h = pin_w
+ )
+
+ ; get make_pin_rect, true if both net_name and pin_name are non-empty
+ make_pin_rect = pin->net_name != "" && pin->pin_name != ""
+ when( pin->make_rect != nil
+ make_pin_rect = pin->make_rect
+ )
+ ; printf("make_pin_rect: %A\n" make_pin_rect)
+ ; create pin object only if make_pin_rect is True.
+ when( make_pin_rect != 0 && make_pin_rect != nil
+ ; printf("making pin.\n")
+ dbCreatePin( dbMakeNet(cv pin->net_name) dbCreateRect(cv pin->layer pin_bb) pin->pin_name )
+ )
+ ; printf("%A %A %A %A\n" pin->label pin->layer pin_xc pin_yc)
+ dbCreateLabel( cv pin->layer list(pin_xc pin_yc) pin->label "centerCenter" pin_orient "roman" label_h )
+ )
+ )
+)
+
+; create a new layout view with the given geometries
+; inst_f, rect_f, via_f, and pin_f are files containing list of disembodied property lists.
+procedure( create_layout( lib_name view_name via_tech layout_f "ttt" )
+ let( (tech_file layout_info cell_name inst_list rect_list via_list pin_list
+ path_list blockage_list boundary_list polygon_list cv)
+
+ unless( tech_file = techGetTechFile(ddGetObj(via_tech))
+ error("Via technology file %s not found." via_tech)
+ )
+
+ layout_info = parse_data_from_file(layout_f)
+ foreach( info layout_info
+ cell_name = nthelem(1 info)
+ inst_list = nthelem(2 info)
+ rect_list = nthelem(3 info)
+ via_list = nthelem(4 info)
+ pin_list = nthelem(5 info)
+ path_list = nthelem(6 info)
+ blockage_list = nthelem(7 info)
+ boundary_list = nthelem(8 info)
+ polygon_list = nthelem(9 info)
+
+ unless( cv = dbOpenCellViewByType( lib_name cell_name view_name "maskLayout" "w" )
+ error("Cannot create new layout cell %s__%s (%s)." lib_name cell_name view_name)
+ )
+
+ printf("Creating %s__%s (%s)\n" lib_name cell_name view_name)
+ create_layout_helper(cv tech_file inst_list rect_list via_list pin_list path_list
+ blockage_list boundary_list polygon_list)
+
+ dbSave(cv)
+ dbClose(cv)
+ )
+
+ t
+ )
+)
+
+
+; close all opened cellviews
+procedure( close_all_cellviews()
+ foreach( cv dbGetOpenCellViews()
+ dbPurge(cv)
+ )
+ 't
+)
+
+
+; release write locks from all the given cellviews
+procedure( release_write_locks( lib_name cell_view_list_f "tt" )
+ let( (cell_view_list lib_obj cv)
+ cell_view_list = parse_data_from_file(cell_view_list_f)
+ when( lib_obj = ddGetObj(lib_name nil nil nil nil "r")
+ foreach( info cell_view_list
+ when( cv = dbFindOpenCellView( lib_obj car(info) cadr(info) )
+ dbReopen(cv, "r")
+ dbClose(cv)
+ )
+ )
+ ddReleaseObj(lib_obj)
+ )
+ t
+ )
+)
+
+; refresh all given cell views
+procedure( refresh_cellviews( lib_name cell_view_list_f "tt" )
+ let( (cell_view_list lib_obj cv)
+ cell_view_list = parse_data_from_file(cell_view_list_f)
+ when( lib_obj = ddGetObj(lib_name nil nil nil nil "r")
+ foreach( info cell_view_list
+ when( cv = dbFindOpenCellView( lib_obj car(info) cadr(info) )
+ dbRefreshCellView(cv)
+ )
+ )
+ ddReleaseObj(lib_obj)
+ )
+ t
+ )
+)
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Simulation/Testbench related functions ;;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+
+; set an entry in an association list
+; returns the modified association list.
+procedure( set_assoc_list(mylist mykey myval)
+ let( (tmp)
+ when( tmp = assoc(mykey mylist)
+ ; print("replacing")
+ rplacd(tmp list(myval))
+ )
+ )
+ mylist
+)
+
+; Copy the schematic of a testbench, and replace the DUT instance.
+;
+; This procedure copies the schematic of a testbench to a new library and cell, then finds all
+; instances with the name prefix "XDUT", then change their instance master to dut_lib and dut_cell.
+;
+procedure( copy_testbench(master_lib master_cell targ_lib
+ dut_lib dut_cell tech_lib new_lib_path "ttttttt")
+ let( (tlib_obj sch replace_count inst_prefix new_master)
+ inst_prefix = "XDUT"
+
+ printf("Copying testbench %s__%s to %s__%s\n" master_lib master_cell targ_lib master_cell)
+
+ ; create target library if does not exist
+ unless( tlib_obj = ddGetObj(targ_lib nil nil nil nil "r")
+ when( and(new_lib_path (new_lib_path != "."))
+ new_lib_path = strcat(new_lib_path "/" lib_name)
+ )
+ tlib_obj = ddCreateLib(targ_lib new_lib_path)
+ ; attach technology file
+ techBindTechFile(tlib_obj tech_lib)
+ )
+
+ ; copy testbench to new library
+ src_gdm = gdmCreateSpecList()
+ gdmAddSpecToSpecList(gdmCreateSpec(master_lib master_cell nil nil "CDBA") src_gdm)
+ targ_gdm = gdmCreateSpecList()
+ gdmAddSpecToSpecList(gdmCreateSpec(targ_lib master_cell nil nil "CDBA") targ_gdm)
+ ccpCopy(src_gdm targ_gdm 't 'CCP_EXPAND_COMANAGED)
+
+ ; open copied schematic
+ unless( sch = dbOpenCellViewByType(tlib_obj master_cell "schematic" nil "a")
+ ddReleaseObj(tlib_obj)
+ error("Cannot open testbench schematic %s__%s" targ_lib master_cell)
+ )
+
+ ; replace instances
+ replace_count = 0
+ sprintf(new_master "%s %s symbol" dut_lib dut_cell)
+ foreach( inst sch~>instances
+ when( strncmp( inst~>name inst_prefix strlen(inst_prefix) ) == 0
+ replace_count = replace_count + 1
+ schReplaceProperty(list(inst) "master" new_master)
+ )
+ )
+
+ ; save and close resources
+ check_and_save_schematic(sch)
+ ddReleaseObj(tlib_obj)
+
+ ; error if nothing is replaced
+ when( replace_count == 0
+ error("Cannot find any instances in %s__%s with name prefix %s" targ_lib master_cell inst_prefix)
+ )
+ 't
+ )
+)
+
+; opens an adexl session. Returns a list of session name and setup database handle.
+procedure( open_adexl_session(tb_lib tb_cell tb_view session_name mode "ttttt")
+ let( (session sdb)
+ unless( session = axlCreateSession(session_name)
+ error("Cannot create temporary adexl session: %s" session_name)
+ )
+ unless( sdb = axlSetMainSetupDBLCV(session tb_lib tb_cell tb_view)
+ axlCloseSession(session)
+ error("Cannot load adexl database from %s__%s (%s)" tb_lib tb_cell tb_view)
+ )
+ list(session sdb)
+ )
+)
+
+; Enables only the given corners in the simulation setup database.
+procedure( enable_adexl_corners( sdb corner_list env_param_list "gll")
+ let( (env_name par_val_list corner)
+ foreach(cur_name cadr(axlGetCorners(sdb))
+ axlSetEnabled( axlGetCorner(sdb cur_name) member(cur_name corner_list) )
+ )
+ foreach(env_par_obj env_param_list
+ env_name = car(env_par_obj)
+ par_val_list = cadr(env_par_obj)
+ corner = axlGetCorner(sdb env_name)
+ foreach(par_val par_val_list
+ axlPutVar(corner car(par_val) cadr(par_val))
+ )
+ )
+ )
+)
+
+; Set testbench parameters
+; val_list is an association list from variable names to variable values as string, which
+; could be a constant value or a parametric sweep string
+procedure( set_adexl_parameters(sdb par_val_list "gl")
+ foreach( var_spec par_val_list
+ axlPutVar(sdb car(var_spec) cadr(var_spec))
+ )
+)
+
+; Create a new config view for a testbench.
+;
+; lib_name : testbench library name.
+; cell_name : testbench cell name.
+; view_name : name of the config view (a testbench can have multiple config views)
+; libs : a string of global libraries, separated by spaces.
+; views : a string of cellviews to use, separated by spaces.
+; stops : a string of cellviews to stop at, separated by spaces.
+procedure( create_config_view(lib_name cell_name view_name libs views stops "tttttt")
+ let( (conf conf_bag)
+ printf("Creating config view %s__%s (%s)\n" lib_name cell_name view_name)
+
+ unless( conf = hdbOpen(lib_name cell_name view_name "w")
+ error("Cannot open config view %s__%s (%s)." lib_name cell_name view_name)
+ )
+ hdbSetTopCellViewName(conf lib_name cell_name "schematic")
+ hdbSetDefaultLibListString(conf libs)
+ hdbSetDefaultViewListString(conf views)
+ hdbSetDefaultStopListString(conf stops)
+ hdbSaveAs(conf lib_name cell_name view_name)
+
+ ; close configuration
+ conf_bag = hdbCreateConfigBag()
+ hdbAddConfigToBag(conf_bag conf)
+ hdbCloseConfigsInBag(conf_bag)
+ )
+)
+
+; edit the config view of a testbench. Use to control whether we're simulating with
+; schematic or post-extraction.
+;
+; lib_name : testbench library name.
+; cell_name : testbench cell name.
+; view_name : name of the config view (a testbench can have multiple config views)
+; conf_list : a list of (, , ) configurations. Where each entry
+; means that view should be used for the cell in library .
+procedure( edit_config_view(lib_name cell_name view_name conf_list "tttl")
+ let( (conf lib cell view conf_bag netlist_list)
+ unless( conf = hdbOpen(lib_name cell_name view_name "a")
+ error("Cannot open config view %s__%s (%s)." lib_name cell_name view_name)
+ )
+ netlist_list = '()
+ foreach( cell_config conf_list
+ lib = car(cell_config)
+ cell = cadr(cell_config)
+ view = caddr(cell_config)
+ if( view == "netlist" then
+ ; set to use extracted netlist
+ netlist_list = cons(list(lib cell) netlist_list)
+ else
+ ; set to use extracted cellview
+ hdbSetObjBindRule(conf list(list(lib cell nil nil))
+ list('hdbcBindingRule list(nil nil view)))
+ )
+ )
+ hdbSaveAs(conf lib_name cell_name view_name)
+
+ ; close configuration
+ conf_bag = hdbCreateConfigBag()
+ hdbAddConfigToBag(conf_bag conf)
+ hdbCloseConfigsInBag(conf_bag)
+
+ ; update netlist source files
+ edit_config_source_files(lib_name cell_name view_name netlist_list)
+ )
+)
+
+; HACKERMAN FUNCTION:
+; so as usual, cadence is so terrible they don't have skill API to set source files.
+; instead, spice/spectre source files are defined in a secret ASCII prop.cfg file.
+; this hacky method will create the right prop.cfg file for you.
+procedure( edit_config_source_files(lib_name cell_name view_name netlist_list "tttl")
+ let( (p lib_dir cell_lib_dir)
+ lib_dir = get_lib_directory(lib_name)
+ p = outfile( sprintf(nil "%s/%s/%s/%s" lib_dir cell_name view_name "prop.cfg") "w" )
+ ; common header
+ fprintf( p "file-format-id 1.1;\ndefault\n{\n}\n" )
+ foreach( lib_cell netlist_list
+ lib = car(lib_cell)
+ cell = cadr(lib_cell)
+ cell_lib_dir = get_lib_directory(lib)
+ fprintf( p "cell %s.%s\n{\n" lib cell )
+ fprintf( p " non-inherited string prop sourcefile = \"%s/%s/netlist/netlist\";\n}\n"
+ cell_lib_dir cell )
+ )
+ close(p)
+ )
+)
+
+; Write testbench information to file.
+procedure( write_testbench_info_to_file(sdb result_file output_list en_corner_list)
+ let( (p output_count)
+
+ ; write testbench information to result_file
+ p = outfile(result_file "w")
+
+ fprintf(p "corners:\n")
+ foreach( corn cadr(axlGetCorners(sdb))
+ fprintf(p " - %s\n" corn)
+ )
+ fprintf(p "enabled_corners:\n")
+ foreach( corn en_corner_list
+ fprintf(p " - %s\n" corn)
+ )
+ fprintf(p "parameters:\n")
+ if( var_list = cadr(axlGetVars(sdb)) then
+ foreach( var_name var_list
+ fprintf(p " %s: \"%s\"\n" var_name axlGetVarValue(axlGetVar(sdb var_name)))
+ )
+ else
+ fprintf(p " {}\n")
+ )
+ fprintf(p "outputs:\n")
+ output_count = 0
+ foreach( out_obj output_list
+ if( rexMatchp( "\"" out_obj->name) then
+ warn("Output expression name (%s) have quotes, skipping" out_obj->name)
+ else
+ fprintf(p " \"%s\": !!str %A\n" out_obj->name out_obj->expression)
+ output_count = output_count + 1
+ )
+ )
+ when( output_count == 0
+ fprintf(p " {}\n")
+ )
+ close(p)
+ )
+)
+
+; Instantiates a testbench.
+;
+; Copy a testbench template to the desired location, replace instances, make config view,
+; and also setup corner settings in adexl.
+; this method will also record list of corners, global variables, and output expressions
+; to result_file
+procedure( instantiate_testbench(tb_cell targ_lib
+ config_libs config_views config_stops
+ default_corner corner_file def_files
+ tech_lib result_file
+ "tttttttltt")
+ let( (session_name session_sdb session sdb test_names test_name test tool_args corner_list
+ ade_symbol ade_session output_list tmp_state_name state_obj success)
+
+ tmp_state_name = "orig_state"
+
+ ; check if temporary ADE session state already exists, if so, delete it
+ state_obj = ddGetObj(targ_lib tb_cell tmp_state_name)
+ when( state_obj
+ success = ddDeleteObj(state_obj)
+ unless( success
+ error("Cannot delete orig_state cellview.")
+ )
+ )
+
+ ; create config view
+ create_config_view(targ_lib tb_cell "config" config_libs config_views config_stops)
+
+ ; session_name = "modify_adexl"
+ session_name = sprintf(nil "modify_adexl_%d" bag_modify_adexl_counter)
+ bag_modify_adexl_counter = bag_modify_adexl_counter + 1
+
+ session_sdb = open_adexl_session(targ_lib tb_cell "adexl" session_name "a")
+ session = car(session_sdb)
+ sdb = cadr(session_sdb)
+
+ ; check that only one test is defined
+ test_names = cadr(axlGetTests(sdb))
+ when(length(test_names) != 1
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ error("ADEXL testbench must have exactly 1 test defined.")
+ )
+
+ ; save current test setup state
+ axlSaveSetupState(session "adexl_default" "All")
+
+ ; change all tests to use config view, and set all test's definition files
+ ; also get a list of defined output expressions
+ ; step 1: get ADE session
+ test_name = car(test_names)
+ ade_symbol = axlGetToolSession(session_name test_name)
+ ade_session = asiGetSession(ade_symbol)
+ ; step 2: save original ADE session
+ asiSaveState(ade_session ?name tmp_state_name ?option 'cellview ?lib targ_lib ?cell tb_cell)
+ ; step 3: change test library
+ test = axlGetTest(sdb test_name)
+ tool_args = axlGetTestToolArgs(test)
+ set_assoc_list(tool_args "view" "config")
+ set_assoc_list(tool_args "lib" targ_lib)
+ set_assoc_list(tool_args "cell" tb_cell)
+ axlSetTestToolArgs(test tool_args)
+ ; step 4: reopen ADE session, then load original ADE state
+ ade_symbol = axlGetToolSession(session_name test_name)
+ ade_session = asiGetSession(ade_symbol)
+ asiLoadState(ade_session ?name tmp_state_name ?option 'cellview)
+ asiSetEnvOptionVal(ade_session 'definitionFiles def_files)
+ output_list = setof(ele asiGetOutputList(ade_session) ele->name)
+ ; step 5: delete temporary ADE session state
+ state_obj = ddGetObj(targ_lib tb_cell tmp_state_name)
+ ddDeleteObj(state_obj)
+
+ axlMainAppSaveSetup(session_name)
+
+ ; load corner
+ unless(axlLoadCorners(sdb corner_file)
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ error("Error loading corner file %s to %s__%s (%s)" corner_file lib_name cell_name view_name)
+ )
+
+ ; set default corner
+ corner_list = list(default_corner)
+ enable_adexl_corners(sdb corner_list nil)
+
+ ; write testbench information to file
+ write_testbench_info_to_file(sdb result_file output_list corner_list)
+
+ ; save and close
+ axlSaveSetupState(session "adexl_default" "All")
+ axlSaveSetupState(session "ocean_default" "All")
+ axlMainAppSaveSetup(session_name)
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ )
+)
+
+; Returns parameter and corner information of a testbench.
+procedure( get_testbench_info(tb_lib tb_cell result_file "ttt")
+ let( (session_name session_sdb session sdb test_names test_name ade_symbol asi_sess
+ output_list corner_list en_list success)
+ session_name = "read_adexl"
+ session_sdb = open_adexl_session(tb_lib tb_cell "adexl" session_name "r")
+ session = car(session_sdb)
+ sdb = cadr(session_sdb)
+
+ ; check that only one test is defined
+ test_names = cadr(axlGetTests(sdb))
+ when(length(test_names) != 1
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ error("ADEXL testbench must have exactly 1 test defined.")
+ )
+
+ ; get output list
+ test_name = car(test_names)
+ ade_symbol = axlGetToolSession(session_name test_name)
+ asi_sess = sevEnvironment(ade_symbol)
+ output_list = setof(ele asiGetOutputList(asi_sess) ele->name)
+
+ ; get enabled corners
+ corner_list = cadr(axlGetCorners(sdb))
+ en_list = setof(corner corner_list axlGetEnabled(axlGetCorner(sdb corner)))
+
+ ; write testbench information to file
+ write_testbench_info_to_file(sdb result_file output_list en_list)
+
+ ; close
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ )
+)
+
+; Configure run options. Used to setup monte carlo parameters.
+; run_params is an association list of run options and their values. The key "mode"
+; corresponds to the run mode.
+procedure( set_run_options(session sdb run_params "ggl")
+ let( (run_mode opt_list run_opt)
+ when( run_mode = cadr(assoc("mode" run_params))
+ ; no options for single run/sweep mode.
+ cond( (run_mode == "Single Run, Sweeps and Corners"
+ opt_list = nil)
+ (run_mode == "Monte Carlo Sampling"
+ opt_list = '("mcnumpoints" "mcmethod") )
+ ('t
+ axlCloseSession(session)
+ error("Unsupported run mode: %s" run_mode) )
+ )
+ foreach( opt_name opt_list
+ when( opt_val = cadr(assoc(opt_name run_params))
+ run_opt = axlPutRunOption(sdb run_mode opt_name)
+ axlSetRunOptionValue(run_opt opt_val)
+ )
+ )
+ axlSetCurrentRunMode(sdb run_mode)
+ )
+ )
+)
+
+; modify the given testbench.
+; tb_lib and tb_cell describes the library and cell of the testbench to simulate.
+; conf_file contains the config view settings.
+; opt_file contains the association list of run mode options.
+; corner_file contains a list of corners to simulate.
+; param_file contains the association list of parameter values.
+procedure( modify_testbench(tb_lib tb_cell conf_file opt_file corner_file param_file env_params_file "ttttttt")
+ let( (tmp_list session sdb conf_list run_params corner_list param_values env_param_values session_name)
+ sprintf(session_name "bag_sim_adexl_%s" getCurrentTime())
+
+ ; read inputs from file.
+ conf_list = parse_data_from_file(conf_file)
+ run_params = parse_data_from_file(opt_file)
+ corner_list = parse_data_from_file(corner_file)
+ param_values = parse_data_from_file(param_file)
+ env_param_values = parse_data_from_file(env_params_file)
+
+ ; modify config view
+ when( conf_list
+ edit_config_view(tb_lib tb_cell "config" conf_list)
+ )
+
+ tmp_list = open_adexl_session(tb_lib tb_cell "adexl" session_name "a")
+ session = car(tmp_list)
+ sdb = cadr(tmp_list)
+
+ ; change corners, parameters, and run options
+ enable_adexl_corners( sdb corner_list env_param_values)
+ set_adexl_parameters( sdb param_values )
+ set_run_options( session sdb run_params )
+
+ ; save and close
+ axlSaveSetupState(session "adexl_default" "All")
+ axlSaveSetupState(session "ocean_default" "All")
+ axlMainAppSaveSetup(session_name)
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ )
+)
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; BAG server related functions ;;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+procedure( stdoutHandler(ipcId data)
+ let( (result result_str)
+ if( bag_server_started > 0 then
+ printf("*INFO* Evaluate expression from BAG process: %s\n" data)
+ if( result = errsetstring(data 't) then
+ sprintf(result_str "%A\n" car(result))
+ else
+ sprintf(result_str "%s\n" car(nthelem(5 errset.errset)))
+ )
+ printf("*INFO* Sending result to BAG process: %s" result_str)
+ ipcWriteProcess(ipcId sprintf(nil "%d\n" strlen(result_str)))
+ ipcWriteProcess(ipcId result_str)
+ 't
+ else
+ if( data == "BAG skill server has started. Yay!\n" then
+ bag_server_started = 1
+ printf("*INFO* BAG skill server started.\n")
+ else
+ printf("*INFO* Waiting for BAG skill server. Message: %s\n" data)
+ )
+ )
+ )
+)
+
+procedure( stderrHandler(ipcId data)
+ warn("BAG server process error: %s\n" data)
+ warn("Shutting down BAG server.")
+ ipcKillProcess(ipcId)
+ 't
+)
+
+procedure( exitHandler(ipcId exitId)
+ printf("*INFO* BAG server process exited with status: %d\n" exitId)
+ 't
+)
+
+procedure( start_bag_server()
+ bag_server_started = 0
+ printf("*INFO* Starting BAG server process.\n")
+ ipcBeginProcess("bash virt_server.sh" "" 'stdoutHandler 'stderrHandler 'exitHandler "")
+)
+
+bag_server_started = 0
+bag_modify_adexl_counter = 0
+bag_proc = start_bag_server()
diff --git a/run_scripts/start_bag.sh b/run_scripts/start_bag.sh
new file mode 100755
index 0000000..afcfec3
--- /dev/null
+++ b/run_scripts/start_bag.sh
@@ -0,0 +1,50 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source .bashrc_pypath
+
+# disable QT session manager warnings
+unset SESSION_MANAGER
+
+exec ${BAG_PYTHON} -m IPython
diff --git a/run_scripts/start_bag_ICADV12d3.il b/run_scripts/start_bag_ICADV12d3.il
new file mode 100644
index 0000000..233fe6f
--- /dev/null
+++ b/run_scripts/start_bag_ICADV12d3.il
@@ -0,0 +1,2067 @@
+;; SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+;; Copyright 2018 Regents of the University of California
+;; All rights reserved.
+;;
+;; Redistribution and use in source and binary forms, with or without
+;; modification, are permitted provided that the following conditions are met:
+;;
+;; * Redistributions of source code must retain the above copyright notice, this
+;; list of conditions and the following disclaimer.
+;;
+;; * Redistributions in binary form must reproduce the above copyright notice,
+;; this list of conditions and the following disclaimer in the documentation
+;; and/or other materials provided with the distribution.
+;;
+;; * Neither the name of the copyright holder nor the names of its
+;; contributors may be used to endorse or promote products derived from
+;; this software without specific prior written permission.
+;;
+;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+;; AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+;; IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+;; DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+;; FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+;; SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+;; CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+;; OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+;; OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+;; Copyright 2019 Blue Cheetah Analog Design Inc.
+;;
+;; Licensed under the Apache License, Version 2.0 (the "License");
+;; you may not use this file except in compliance with the License.
+;; You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+
+/* Note:
+
+Due to licensing reasons, this skill script is missing the function
+CCSinvokeCdfCallbacks() from Cadence solution 11018344, which executes
+CDF parameters callback from skill.
+
+If you do not need to instantiate a pcell instance, this method
+is not needed.
+
+Eric Chang, Mar 2, 2017.
+
+*/
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Virtuoso Database operations functions ;;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+; reads a skill data structure from file
+procedure( parse_data_from_file( fname "t" )
+ let( (p ans)
+ unless( p = infile( fname )
+ error("Cannot open file %s" fname)
+ )
+ ans = parse_data_from_file_helper(p)
+ close( p )
+ ans
+ )
+)
+
+; recursive helper for parse_data_from_file
+procedure( parse_data_from_file_helper( p )
+ let( (line item ans finish key)
+ gets( line p )
+ ; remove newline
+ line = substring(line 1 strlen(line) - 1)
+ ; printf("read line: %s\n" line)
+ cond(
+ (line == "#list"
+ ; parse a list
+ ans = tconc(nil 0)
+ while( nequal(item = parse_data_from_file_helper(p) "#end")
+ tconc(ans item)
+ )
+ ; printf("returning list ")
+ ; print(cdar(ans))
+ ; printf("\n")
+ cdar(ans)
+ )
+ (line == "#prop_list"
+ ; parse a disembodied property list
+ ans = ncons(nil)
+ finish = nil
+ while( !finish
+ key = parse_data_from_file_helper(p)
+ if( key == "#end" then
+ finish = 't
+ else
+ item = parse_data_from_file_helper(p)
+ putprop(ans item key)
+ )
+ )
+ ans
+ )
+ ; parse a float
+ (strncmp( line "#float" 6 ) == 0
+ cdfParseFloatString(cadr(parseString(line)))
+ )
+ ; parse an int
+ (strncmp( line "#int" 4 ) == 0
+ atoi(cadr(parseString(line)))
+ )
+ ; parse a boolean
+ (strncmp( line "#bool" 5 ) == 0
+ if( atoi(cadr(parseString(line))) == 1 then
+ 't
+ else
+ nil
+ )
+ )
+ ; parse a string token or #end
+ ('t
+ ; printf("returning str %s\n" line)
+ line
+ )
+ )
+ )
+)
+
+; return a list of cells in the given library.
+procedure( get_cells_in_library( lib_name "t" )
+ let( ( lib_obj ans )
+ if( lib_obj = ddGetObj(lib_name nil nil nil nil "r") then
+ ans = ddGetObjChildren(lib_obj)~>name
+ ddReleaseObj(lib_obj)
+ else
+ ; library does not exist, return empty list
+ ans = '()
+ )
+ ans
+ )
+)
+
+; return a list of cells in the given library.
+procedure( get_cells_in_library_file( lib_name fname "tt" )
+ let( ( p )
+ p = outfile( fname "w" )
+ foreach( cell get_cells_in_library(lib_name)
+ fprintf(p "%s\n" cell)
+ )
+ close(p)
+ )
+)
+
+; Returns the directory corresponding to the given library.
+procedure( get_lib_directory(lib_name "t")
+ let( ( lib_obj ans )
+ if( lib_obj = ddGetObj(lib_name nil nil nil nil "r") then
+ ans = lib_obj~>readPath
+ ddReleaseObj(lib_obj)
+ else
+ ; library does not exist, return empty list
+ ans = ""
+ )
+ ans
+ )
+)
+
+; Parse the netlist of the given cellview.
+; Works on schematic and veriloga.
+procedure( parse_cad_sch(lib_name cell_name file_name "ttt")
+ let( (cv cell_type p indent direction term_names tb_list tb_match
+ inst_lib_name inst_cell_name inst_cnt)
+ indent = ""
+ cell_type = "schematic"
+ unless( cv = dbOpenCellViewByType( lib_name cell_name "schematic" nil "r" )
+ cell_type = "veriloga"
+ unless( cv = dbOpenCellViewByType( lib_name cell_name "veriloga" nil "r" )
+ error( "Cannot find schematic or veriloga view of cell %s__%s" lib_name cell_name )
+ )
+ )
+ p = outfile( file_name "w" )
+
+ ; print cellview information
+ printf( "*INFO* Writing cell %s__%s (%s) netlist to %s\n" lib_name cell_name cell_type file_name )
+ fprintf( p "%slib_name: %s\n" indent lib_name )
+ fprintf( p "%scell_name: %s\n" indent cell_name )
+
+ ; print pins
+ fprintf( p "%spins: [ " indent )
+ if( cell_type == "veriloga" then
+ term_names = reverse(cv~>terminals~>name)
+ else
+ term_names = cv~>terminals~>name
+ )
+ ; add quotes around pin names to escape array pins
+ term_names = mapcar( lambda( (x) sprintf(nil "\"%s\"" x) ) term_names )
+ fprintf( p "%s ]\n" buildString(term_names ", "))
+
+ ; print instances
+ if( not(cv~>instances) then
+ fprintf( p "%sinstances: {}\n" indent )
+ else
+ inst_cnt = 0
+ fprintf( p "%sinstances:\n" indent )
+ foreach( inst cv~>instances
+ inst_cnt++
+ ; print entry for instance
+ indent = " "
+ fprintf( p "%s%s:\n" indent inst~>name )
+ ; print instance master information.
+ indent = " "
+ fprintf( p "%slib_name: %s\n" indent inst~>libName )
+ fprintf( p "%scell_name: %s\n" indent inst~>cellName )
+ ; print instance terminal information
+ if( !(inst~>instTerms) then
+ fprintf( p "%sinstpins: {}\n" indent )
+ else
+ fprintf( p "%sinstpins:\n" indent )
+ foreach( inst_term inst~>instTerms
+ unless( direction = inst_term~>direction
+ direction = ""
+ )
+ indent = " "
+ fprintf( p "%s%s:\n" indent inst_term~>name )
+ indent = " "
+ fprintf( p "%sdirection: %s\n" indent direction )
+ fprintf( p "%snet_name: \"%s\"\n" indent inst_term~>net~>name )
+ fprintf( p "%snum_bits: %d\n" indent inst_term~>numBits )
+ )
+ )
+ )
+ when(inst_cnt == 0
+ fprintf( p " {}\n" )
+ )
+ )
+
+ ; close resources
+ close(p)
+ dbClose(cv)
+ )
+)
+
+; Delete a cellview if it exists. Currently used to delete old calibre file.
+procedure( delete_cellview(lib_name cell_name view_name "ttt")
+ let( (obj)
+ obj = ddGetObj(lib_name cell_name view_name)
+ if( obj then
+ ddDeleteObj(obj)
+ else
+ 't
+ )
+ )
+)
+
+; Parse the structure of the given cellview.
+; Works on layout.
+procedure( parse_cad_layout(lib_name cell_name file_name "ttt")
+ let( (cv cell_type p indent rect_cnt label_cnt inst_cnt)
+
+ indent = ""
+ cell_type = "layout"
+ unless( cv = dbOpenCellViewByType( lib_name cell_name cell_type nil "r" )
+ error( "Cannot find layout view of cell %s__%s" lib_name cell_name )
+ )
+ p = outfile( file_name "w" )
+
+ ; print cellview information
+ printf( "*INFO* Writing cell %s__%s (%s) netlist to %s\n" lib_name cell_name cell_type file_name )
+ fprintf( p "%slib_name: %s\n" indent lib_name )
+ fprintf( p "%scell_name: %s\n" indent cell_name )
+
+ ; print rects
+ if( not(cv~>shapes) then
+ fprintf( p "%srects: {}\n" indent )
+ else
+ rect_cnt = 0
+ fprintf( p "%srects:\n" indent )
+ foreach( shape cv~>shapes
+ if( (shape~>objType == "rect") then
+ rect_cnt++
+ ; print entry for rect
+ indent = " "
+ fprintf( p "%s%d:\n" indent rect_cnt )
+ ; print rect master information.
+ indent = " "
+ fprintf( p "%slayer: %s %s\n" indent nthelem(1 shape~>lpp) nthelem(2 shape~>lpp))
+ fprintf( p "%sbBox: [[%f, %f], [%f, %f]]\n" indent
+ nthelem(1 nthelem(1 shape~>bBox)) nthelem(2 nthelem(1 shape~>bBox))
+ nthelem(1 nthelem(2 shape~>bBox)) nthelem(2 nthelem(2 shape~>bBox))
+ );fprintf
+ )
+ );if
+ if((rect_cnt == 0) then
+ fprintf( p " {}\n" )
+ );if
+ )
+
+ ; print labels
+ indent = ""
+ if( not(cv~>shapes) then
+ fprintf( p "%slabels: {}\n" indent )
+ else
+ label_cnt = 0
+ fprintf( p "%slabels:\n" indent )
+ foreach( shape cv~>shapes
+ if( (shape~>objType == "label") then
+ label_cnt++
+ ; print entry for label
+ indent = " "
+ fprintf( p "%s%d:\n" indent label_cnt )
+ ; print label master information.
+ indent = " "
+ fprintf( p "%slabel: %s\n" indent shape~>theLabel )
+ fprintf( p "%slayer: %s %s\n" indent nthelem(1 shape~>lpp) nthelem(2 shape~>lpp))
+ fprintf( p "%sxy: [%f, %f]\n" indent nthelem(1 shape~>xy) nthelem(2 shape~>xy))
+ )
+ if( (shape~>objType == "textDisplay") then ;some labels are instantiated as text displays
+ label_cnt++
+ ; print entry for label
+ indent = " "
+ fprintf( p "%s%d:\n" indent label_cnt )
+ ; print label master information.
+ indent = " "
+ fprintf( p "%slabel: %s\n" indent shape~>owner~>name )
+ fprintf( p "%slayer: %s %s\n" indent nthelem(1 shape~>lpp) nthelem(2 shape~>lpp))
+ fprintf( p "%sxy: [%f, %f]\n" indent nthelem(1 shape~>xy) nthelem(2 shape~>xy))
+ )
+ );if
+ if((label_cnt == 0) then
+ fprintf( p " {}\n" )
+ );if
+ )
+
+ ; print instances
+ indent = ""
+ if( not(cv~>instances) then
+ fprintf( p "%sinstances: {}\n" indent )
+ else
+ inst_cnt = 0
+ fprintf( p "%sinstances:\n" indent )
+ foreach( inst cv~>instances
+ inst_cnt++
+ ; print entry for instance
+ indent = " "
+ fprintf( p "%s%s:\n" indent inst~>name )
+ ; print instance master information.
+ indent = " "
+ fprintf( p "%slib_name: %s\n" indent inst~>libName )
+ fprintf( p "%scell_name: %s\n" indent inst~>cellName )
+ fprintf( p "%sxy: [%f, %f]\n" indent nthelem(1 inst~>xy) nthelem(2 inst~>xy))
+ if( (inst~>objType == "mosaic") then
+ fprintf( p "%scols: %d\n" indent inst~>columns)
+ fprintf( p "%srows: %d\n" indent inst~>rows)
+ fprintf( p "%ssp_cols: %f\n" indent inst~>uX)
+ fprintf( p "%ssp_rows: %f\n" indent inst~>uY)
+ fprintf( p "%srotation: %s\n" indent car(inst~>tileArray))
+ else
+ fprintf( p "%srotation: %s\n" indent inst~>orient)
+ );if
+ )
+ when(inst_cnt == 0
+ fprintf( p " {}\n" )
+ )
+ )
+
+ ; close resources
+ close(p)
+ dbClose(cv)
+ )
+)
+
+; get a list of cells containing in the specficied library
+procedure( get_cell_list(lib_name file_name "tt")
+ let( (lib cellname p)
+ lib=ddGetObj(lib_name)
+ p = outfile( file_name "w" )
+ fprintf( p "%s: [" lib_name)
+ foreach( cellname lib~>cells~>name
+ fprintf( p "%s, " cellname)
+ );foreach
+ fprintf( p "] \n" )
+ ; close resources
+ close(p)
+ );let
+)
+
+; if library with lib_name does not exists, create a new
+; library with that name. Otherwise, if erase is true,
+; remove all cells in that library. Returns the library
+; database object.
+procedure( create_or_erase_library(lib_name tech_lib lib_path erase "tttg")
+ let( (lib_obj)
+ if( lib_obj = ddGetObj(lib_name nil nil nil nil "r") then
+ when( erase
+ ; delete all cells in the library
+ foreach( cell lib_obj~>cells
+ unless( ddDeleteObj(cell)
+ error("cannot delete cell %s in library %s\n" cell~>name lib_name)
+ )
+ )
+ )
+ ddReleaseObj(lib_obj)
+ 't
+ else
+ ; create library if not exist
+ when( and(lib_path (lib_path != "."))
+ lib_path = strcat(lib_path "/" lib_name)
+ )
+ lib_obj = ddCreateLib(lib_name lib_path)
+ ; attach technology file
+ techBindTechFile(lib_obj tech_lib)
+ ; close library
+ ddReleaseObj(lib_obj)
+ 't
+ )
+ )
+)
+
+; copy all template cells to the given library.
+; template list is a list of three-element lists with the format
+; '("master_lib_name" "master_cell_name" "target_cell_name")
+; any existing cellviews will be overwritten.
+procedure( copy_templates_to_library(lib_name template_list "tl")
+ let( (current remaining src_gdm targ_gdm table master_lib master_cell target_cell key cnt
+ empty_spec targ_lib_obj test_cv)
+
+ current = template_list
+ remaining = '()
+ empty_spec = gdmCreateSpecList()
+ targ_lib_obj = ddGetObj(lib_name nil nil nil nil "r")
+
+ ; ccpCopy cannot copy the same cell to multiple different cells.
+ ; because of this, we need to copy a set of unique cells at a time,
+ ; hence the while loop.
+ while( current
+ ; Create GDMSpecList used to copy all cells
+ src_gdm = gdmCreateSpecList()
+ targ_gdm = gdmCreateSpecList()
+ ; table to keep track of seen cells.
+ table = makeTable("mytable" 0)
+ ; Populate GDMSpecList
+ foreach( template_info current
+ master_lib = car(template_info)
+ master_cell = cadr(template_info)
+ target_cell = caddr(template_info)
+
+ ; check if we copied this cell on this iteration yet
+ key = list(master_lib master_cell)
+ if( table[key] == 1 then
+ ; wait for the next iteration
+ remaining = cons(template_info remaining)
+ else
+ ; purge target cellview if exist
+ when( targ_lib_obj
+ test_cv = dbFindOpenCellView(targ_lib_obj target_cell "schematic")
+ when( test_cv
+ dbPurge(test_cv)
+ )
+ test_cv = dbFindOpenCellView(targ_lib_obj target_cell "symbol")
+ when( test_cv
+ dbPurge(test_cv)
+ )
+ ; hard remove adexl state if it exists
+ test_cv = ddGetObj(lib_name target_cell "adexl")
+ when( test_cv
+ ddDeleteObj(test_cv)
+ )
+ )
+ gdmAddSpecToSpecList(gdmCreateSpec(master_lib master_cell nil nil "CDBA") src_gdm)
+ gdmAddSpecToSpecList(gdmCreateSpec(lib_name target_cell nil nil "CDBA") targ_gdm)
+ table[key] = 1
+ )
+ )
+ ; Perform copy
+ ccpCopy(src_gdm targ_gdm 't 'CCP_EXPAND_COMANAGED nil nil "" "" 'CCP_UPDATE_FROM_LIBLIST empty_spec)
+
+ ; set current and remaining
+ current = remaining
+ remaining = '()
+
+ ; debug printing
+ ; printstruct(table)
+ )
+ )
+ 't
+)
+
+; returns a unique terminal name in the given cellview.
+; name_base is the suffix of the returned terminal name.
+procedure( get_unique_term_name( cvid name_base "gt")
+ let( (cnt new_term_name)
+ cnt = 1
+ sprintf( new_term_name "temp%d_%s" cnt name_base )
+ while( dbFindTermByName(cvid new_term_name)
+ cnt = cnt + 1
+ sprintf( new_term_name "temp%d_%s" cnt name_base )
+ )
+ new_term_name
+ )
+)
+
+; helper method to open pin master
+procedure( open_pin_master(cvid pin_cv_info)
+ let( (pin_master mpin_lib mpin_cell mpin_view)
+ mpin_lib = car(pin_cv_info)
+ mpin_cell = cadr(pin_cv_info)
+ mpin_view = caddr(pin_cv_info)
+ unless( pin_master = dbOpenCellViewByType( mpin_lib mpin_cell mpin_view nil "r" )
+ dbClose(cvid)
+ error( "Cannot find pin master cellview: %s__%s (%s)" mpin_lib mpin_cell mpin_view)
+ )
+ pin_master
+ )
+)
+
+; update pins of a schematic
+; cvid is the opened cellview id of the schematic. It must be in append mode.
+; pin_map is a list of two-element lists of old pin names and new pin names, respectively.
+; ipin, opin, and iopin are lists of three strings for input/output/inout pins, respectively.
+; first element is the pin master library, second element is the pin mater cell, and third element
+; is the pin master cellview.
+procedure( update_schematic_pin(cvid pin_map new_pins ipin opin iopin "glllll")
+ let( (snap_dist cur_term_name new_term_name term pin pin_orient pin_location pin_direction
+ temp_new_term_name pin_master ipin_master opin_master iopin_master
+ pin_xy_info npin_xl npin_yl npin_xr npin_yr npin_name npin_type)
+
+ snap_dist = schGetEnv("schSnapSpacing")
+
+ ; open pin masters
+ ipin_master = open_pin_master(cvid ipin)
+ opin_master = open_pin_master(cvid opin)
+ iopin_master = open_pin_master(cvid iopin)
+ pin_master = nil
+
+ ; get new pin locations before any pin addition/substraction.
+ pin_xy_info = get_new_pin_locations(cvid snap_dist)
+
+ ; rename or remove pins
+ foreach( p pin_map
+ cur_term_name = car(p)
+ new_term_name = cadr(p)
+ ; printf("%s %s\n" cur_term_name new_term_name)
+ when(cur_term_name != new_term_name
+ unless( term = dbFindTermByName(cvid cur_term_name)
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Terminal %s not found." cur_term_name )
+ )
+ when( term~>pinCount != 1
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Terminal %s does not have exactly one pin." cur_term_name)
+ )
+ pin = car(term~>pins)
+
+ if( strlen(new_term_name) != 0 then
+ ; rename pin
+ pin_orient = pin~>fig~>orient
+ pin_location = pin~>fig~>xy
+ pin_direction = term~>direction
+
+ ; create new pin figure
+ cond( ( pin_direction == "input" pin_master = ipin_master)
+ ( pin_direction == "output" pin_master = opin_master)
+ ( 't pin_master = iopin_master)
+ )
+
+ ; delete pin
+ unless( dbDeleteObject(pin~>fig)
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Cannot delete pin for terminal %s" cur_term_name )
+ )
+
+ ; create a temporary terminal with a unique name so we can change the number of bits without getting an error
+ temp_new_term_name = get_unique_term_name(cvid new_term_name)
+ schCreatePin(cvid pin_master temp_new_term_name pin_direction nil pin_location "R0" )
+
+ ; now rename the new terminal
+ new_term = dbFindTermByName(cvid temp_new_term_name )
+ new_term~>name = new_term_name
+ else
+ ; remove pin
+ dbDeleteObject(pin~>fig)
+ )
+ )
+ )
+
+ ; add new pins
+ when( new_pins
+ ; get location for new pins
+ npin_xl = xCoord(car(pin_xy_info))
+ npin_yl = yCoord(car(pin_xy_info)) - 2 * snap_dist
+ npin_xr = xCoord(cadr(pin_xy_info))
+ npin_yr = yCoord(cadr(pin_xy_info)) - 2 * snap_dist
+ foreach( npin_info new_pins
+ npin_name = car(npin_info)
+ npin_type = cadr(npin_info)
+
+ ; verify that this pin does not exist yet
+ when(dbFindTermByName(cvid npin_name)
+ dbClose(cvid)
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ error( "Terminal %s already exists" npin_name)
+ )
+
+ ; get pin location based on pin type
+ cond( ( npin_type == "input" pin_master = ipin_master pin_location = npin_xl:npin_yl npin_yl = npin_yl - 2 * snap_dist)
+ ( npin_type == "output" pin_master = opin_master pin_location = npin_xr:npin_yr npin_yr = npin_yr - 2 * snap_dist)
+ ( 't pin_master = iopin_master pin_location = npin_xl:npin_yl npin_yl = npin_yl - 2 * snap_dist)
+ )
+ ; create pin
+ schCreatePin(cvid pin_master npin_name npin_type nil pin_location "R0")
+ )
+ )
+
+ dbClose(ipin_master)
+ dbClose(opin_master)
+ dbClose(iopin_master)
+ )
+)
+
+; find X and Y coordinates to insert new symbol pins
+procedure( get_new_pin_locations(cvid snap_dist)
+ let( (pin bbox pin_x pin_y xl xr yl yr)
+ ; find the left-most/right-most pin X coordinates, and find the lowst
+ ; Y coordinate of the left-most/right-most pins
+ xl = nil
+ xr = nil
+ yl = nil
+ yr = nil
+ foreach( term cvid->terminals
+ when( term~>pinCount != 1
+ dbClose(cvid)
+ error( "Terminal %s does not have exactly one pin" term~>name)
+ )
+ pin = car(term~>pins)
+ bbox = pin~>fig~>bBox
+ pin_x = round2((xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0 / snap_dist)
+ pin_y = round2((yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0 / snap_dist)
+ if( xl == nil then
+ xl = pin_x
+ xr = pin_x
+ yl = pin_y
+ yr = pin_y
+ else
+ cond( (pin_x < xl xl = pin_x yl = pin_y)
+ (pin_x == xl yl = min(yl pin_y)))
+ cond( (pin_x > xr xr = pin_x yr = pin_y)
+ (pin_x == xr yr = min(yr pin_y)))
+ )
+ )
+ when(xl == nil
+ ; default values if schematic has no terminals
+ ; this usually means you have a testbench schematic
+ xl = 0
+ yl = 0
+ xr = 10
+ yr = 0
+ )
+ list((xl * snap_dist):(yl * snap_dist) (xr * snap_dist):(yr * snap_dist))
+ )
+)
+
+; update pins of a symbol
+; pin_map is a list of two-element lists, first element is old pin name, second element is new pin name.
+; sympin is a 3-element list of strings. first element is the pin master library,
+; second element is the pin mater cell, and third element is the pin master cellview.
+; simulators is a list of simulator names for which termOrder should be updated.
+; Usually simulators = '("auLvs" "auCdl" "spectre" "hspiceD")
+procedure( update_symbol_pin(lib_name cell_name pin_map new_pins sympin simulators "ttllll")
+ let( (snap_dist cvid pin_master cur_term_name new_term_name term pin bbox pin_x pin_y pin_location pin_direction
+ label_location label_rel_location temp_new_term_name new_term new_port_order cell_obj bc
+ mpin_lib mpin_cell mpin_view pin_xy_info npin_xl npin_yl npin_xr npin_yr npin_name npin_type
+ modified_pins)
+
+ snap_dist = schGetEnv("schSnapSpacing")
+ modified_pins = nil
+ mpin_lib = car(sympin)
+ mpin_cell = cadr(sympin)
+ mpin_view = caddr(sympin)
+ unless( pin_master = dbOpenCellViewByType(mpin_lib mpin_cell mpin_view nil "r")
+ error("Cannot open symbol pin cellview %s__%s (%s)." mpin_lib mpin_cell mpin_view)
+ )
+ unless( cvid = dbOpenCellViewByType(lib_name cell_name "symbol" nil "a")
+ dbClose(pin_master)
+ error("Cannot open cellview %s__%s (symbol)." lib_name cell_name)
+ )
+
+ ; get new pin locations before any pin addition/substraction.
+ pin_xy_info = get_new_pin_locations(cvid snap_dist)
+
+ ; modify existing pins
+ new_port_order = tconc(nil "")
+ foreach( p pin_map
+ cur_term_name = car(p)
+ new_term_name = cadr(p)
+ new_port_order = tconc(new_port_order new_term_name)
+ when( cur_term_name != new_term_name
+ modified_pins = 't
+ ; printf("%s %s\n" cur_term_name new_term_name)
+ unless( term = dbFindTermByName(cvid cur_term_name)
+ dbClose(pin_master)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error( "Terminal %s not found." cur_term_name )
+ )
+ when( term~>pinCount != 1
+ dbClose(pin_master)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error( "Terminal %s does not have exactly one pin." cur_term_name)
+ )
+ pin = car(term~>pins)
+
+ if( strlen(new_term_name) != 0 then
+ ; rename pin
+ bbox = pin~>fig~>bBox
+ pin_x = (xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0
+ pin_y = (yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0
+ pin_location = round2(pin_x / snap_dist) * snap_dist:round2(pin_y / snap_dist) * snap_dist
+ pin_direction = term~>direction
+
+ ; change label
+ prog( (label_orientation label_font label_font_size label_type label_text)
+ foreach( label pin~>fig~>children
+ when( label~>objType == "label"
+ label_location = label~>xy
+ label_orientation = label~>orient
+ label_rel_location = label~>justify
+ label_font = label~>font
+ label_font_size = label~>height
+ label_type = label~>labelType
+ label_text = label~>theLabel
+ when( label_text == cur_term_name
+ schCreateSymbolLabel(cvid label_location "pin label" new_term_name label_rel_location
+ label_orientation label_font label_font_size label_type)
+ return('t)
+ )
+ )
+ )
+ return(nil)
+ )
+
+ dbDeleteObject(pin~>fig)
+ dbDeleteObject(pin)
+
+ ;create a temporary terminal with a unique name so we can change the number of bits without getting an error
+ temp_new_term_name = get_unique_term_name(cvid new_term_name)
+ schCreateSymbolPin(cvid pin_master temp_new_term_name pin_direction pin_location "R0" )
+
+ new_term = dbFindTermByName(cvid temp_new_term_name )
+ dbDeleteObject(term)
+ new_term~>name = new_term_name
+ else
+ ; remove pin
+ dbDeleteObject(pin~>fig)
+ dbDeleteObject(pin)
+ dbDeleteObject(term)
+ )
+ )
+ )
+
+ ; add new pins
+ when( new_pins
+ modified_pins = 't
+ ; get location for new pins
+ npin_xl = xCoord(car(pin_xy_info))
+ npin_yl = yCoord(car(pin_xy_info)) - 2 * snap_dist
+ npin_xr = xCoord(cadr(pin_xy_info))
+ npin_yr = yCoord(cadr(pin_xy_info)) - 2 * snap_dist
+ foreach( npin_info new_pins
+ npin_name = car(npin_info)
+ npin_type = cadr(npin_info)
+
+ ; verify that this pin does not exist yet
+ when(dbFindTermByName(cvid npin_name)
+ dbClose(pin_master)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error( "Terminal %s already exists" npin_name)
+ )
+
+ ; update pin order
+ new_port_order = tconc(new_port_order npin_name)
+
+ ; get pin location based on pin type
+ if( equal(npin_type "output") then
+ label_location = npin_xr:npin_yr
+ label_rel_location = "lowerLeft"
+ npin_yr = npin_yr - 2 * snap_dist
+ else
+ label_location = npin_xl:npin_yl
+ label_rel_location = "lowerRight"
+ npin_yl = npin_yl - 2 * snap_dist
+ )
+
+ ; create label and pin
+ schCreateSymbolLabel(cvid label_location "pin label" npin_name label_rel_location
+ "R0" "stick" snap_dist "normalLabel")
+ schCreateSymbolPin(cvid pin_master npin_name npin_type label_location "R0")
+ )
+ )
+
+ dbClose(pin_master)
+
+ when( modified_pins
+ ; update pin order
+ new_port_order = cdar(new_port_order)
+ schEditPinOrder(cvid new_port_order 't)
+ dbSave(cvid)
+
+ ; update termOrder for each simulators
+ cell_obj = ddGetObj(lib_name cell_name nil nil nil "r")
+ unless( bc = cdfGetBaseCellCDF(cell_obj)
+ ddReleaseObj(cell_obj)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error("Cannot find CDF parameters for %s__%s. Delete generated cell and try again" lib_name cell_name)
+ )
+ foreach( simu simulators
+ get(bc->simInfo simu)->termOrder = new_port_order
+ )
+ unless( cdfSaveCDF(bc)
+ ddReleaseObj(cell_obj)
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ error("Cannot save termOrder CDF for %s__%s." lib_name cell_name)
+ )
+ ddReleaseObj(cell_obj)
+ )
+ ; opening schematic will open all symbols inside that schematic.
+ ; as the result, dbClose may not close this symbol view. To get rid
+ ; of edit lock, we use dbReopen so even if dbClose fails the edit lock
+ ; will be gone.
+ dbReopen(cvid, "r")
+ dbClose(cvid)
+ )
+)
+
+; record an association list from pin name to pin location in units of snap distances.
+; the pin name is sorted alphabetically so we can use the equal function to test
+; for equality.
+procedure( get_instance_pin_info(inst "g")
+ let( (snap_dist term_name pin_fig xval yval inst_term_xy ans)
+ ans = nil
+ snap_dist = schGetEnv("schSnapSpacing")
+ foreach( term inst->master->terminals
+ term_name = term~>name
+ ; get terminal coordinate in symbol
+ pin_fig = car(term~>pins)~>fig
+ bbox = pin_fig~>bBox
+ xval = (xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0
+ yval = (yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0
+ ; quantize to schematic snap spacing to avoid floating point rounding error.
+ inst_term_xy = round2(xval / snap_dist):round2(yval / snap_dist)
+ ans = cons(list(term_name inst_term_xy) ans)
+ )
+ sortcar(ans nil)
+ )
+)
+
+; get all the wire objects connected to terminals of the given instance.
+; we assume each terminal has exactly one pin with 1 wire connected, with a
+; single label on the wire. The wire doesn't connect to anything else.
+; returns an association list from terminal name to a list of net name and wire figure object.
+procedure( get_instance_terminal_wires(sch inst "gg")
+ let( (snap_dist term_name pin_fig xval yval inst_term_xy net_name ans net_map)
+ ans = nil
+ net_map = nil
+ snap_dist = schGetEnv("schSnapSpacing")
+ foreach( inst_term inst~>instTerms
+ term_name = inst_term~>name
+ ; printf("terminal name: %s\n" term_name)
+ when( inst_term~>term~>pinCount != 1
+ dbClose(sch)
+ error("Terminal %s must have exactly one pin." term_name)
+ )
+ unless( pin_fig = car(inst_term~>term~>pins)~>fig
+ dbClose(sch)
+ error("Cannot find pin figure for terminal %s" term_name)
+ )
+ ; get instance terminal coordinate in schematic
+ bbox = dbTransformBBox(pin_fig~>bBox inst~>transform)
+ ; printf("terminal pin fig bbox: %A\n" bbox)
+ xval = xCoord(car(bbox)) + (xCoord(cadr(bbox)) - xCoord(car(bbox))) / 2.0
+ yval = yCoord(car(bbox)) + (yCoord(cadr(bbox)) - yCoord(car(bbox))) / 2.0
+ ; quantize to schematic snap spacing to avoid floating point rounding error.
+ inst_term_xy = round2(xval / snap_dist) * snap_dist:round2(yval / snap_dist) * snap_dist
+ net_name = inst_term~>net~>name
+ net_map = cons(list(term_name net_name) net_map)
+ ; printf("terminal pin x/y: %A\n" inst_term_xy)
+ foreach( fig inst_term~>net~>figs
+ points = fig~>points
+ ; printf("figure points: %A\n" points)
+ when( member(inst_term_xy points)
+ when( length(points) != 2
+ error("pin for terminal %s must be connected to a single wire with label" term_name)
+ )
+ ; printf("adding figure for terminal %s\n" term_name)
+ ans = cons(list(term_name fig) ans)
+ )
+ )
+ )
+ list(ans net_map)
+ )
+)
+
+; Modify the instance terminal connections of the given instance.
+; we assume each terminal to modify has at most 1 wire connected,
+; if it exists, the wire connects to nothing else, and it has a label.
+; In this way, this function just have to change the label text.
+;
+; if wire_list is not empty, then that means each terminal has exactly one
+; wire connected. This function will update the label on the wires according
+; to term_mapping.
+;
+; if wire_list is empty, then that means no wires are connected to terminals.
+; this function will attach labels directly to each terminal. The labels are
+; determined first from term_mapping, then from net_map
+;
+; sch is the schematic database object. Must be opened in append/write mode.
+; inst is the instance object to modify.
+; term_mapping is a list of key-value pairs, where keys are old net names,
+; and values are new net names.
+procedure( modify_instance_terminal(sch inst wire_list net_map term_mapping "gglll")
+ let( (snap_dist key_val old_name new_name fig points mid_point new_wire inst_term inst_pin
+ bbox xval yval term_map_final db_term)
+ ; get schematic snap distance spacing.
+ snap_dist = schGetEnv("schSnapSpacing")
+ if( wire_list then
+ foreach( wire_info wire_list
+ old_name = car(wire_info)
+ when(key_val = assoc(old_name term_mapping)
+ new_name = cadr(key_val)
+ fig = cadr(wire_info)
+ points = fig~>points
+ mid_point = foreach(mapcar (c1 c2) car(points) cadr(points) (c1 + c2) / 2.0)
+ ; delete old wire, then add wire back with new label.
+ schDelete(fig)
+ new_wire = car(schCreateWire(sch "draw" "full" points snap_dist snap_dist 0))
+ schCreateWireLabel(sch new_wire mid_point new_name "lowerCenter" "R0" "stick" 0.0625 nil)
+ )
+ )
+ 't
+ else
+ ; combine net_map and term_mapping
+ term_map_final = copy(term_mapping)
+ foreach( net_info net_map
+ old_name = car(net_info)
+ unless( assoc(old_name term_map_final)
+ ; add net mapping only if it's not in term_mapping
+ term_map_final = cons(net_info term_map_final)
+ )
+ )
+ foreach( net_info term_map_final
+ old_name = car(net_info)
+ new_name = cadr(net_info)
+
+ when(db_term = dbFindTermByName(inst->master old_name)
+ ; only create terminal that's present in the current master
+ inst_term = dbCreateInstTerm(nil inst db_term)
+ inst_pin = car(inst_term~>term~>pins)~>fig
+ bbox = dbTransformBBox(inst_pin~>bBox inst~>transform)
+ xval = (xCoord(car(bbox)) + xCoord(cadr(bbox))) / 2.0
+ yval = (yCoord(car(bbox)) + yCoord(cadr(bbox))) / 2.0
+ xval = round2(xval / snap_dist) * snap_dist
+ yval = round2(yval / snap_dist) * snap_dist
+
+ new_wire = car(schCreateWire(sch "draw" "full" list(xval-snap_dist:yval-snap_dist xval:yval)
+ snap_dist snap_dist 0))
+ schCreateWireLabel(sch new_wire xval:yval new_name "lowerCenter" "R0" "stick" 0.0625 nil)
+ )
+ )
+ 't
+ )
+ )
+)
+
+; given a copied template cell, modify it to a concrete schematic.
+procedure( check_and_save_cell(lib_name cell_name view_name "ttt")
+ let( (cv errs )
+ ; attempt to open schematic in append mode
+ unless( cv = dbOpenCellViewByType(lib_name cell_name view_name nil "a")
+ error("Cannot open %s__%s (%s) in append mode." lib_name cell_name view_name)
+ )
+ ; check and save schematic
+ check_and_save_schematic(sch)
+ )
+)
+
+; Perform check-and-save on the given schematic database object, then close it.
+procedure( check_and_save_schematic(sch "g")
+ let( (errs)
+ schSetEnv( "checkHierSave" 't)
+ schSetEnv( "saveAction" "Save")
+ errs = schCheckHier(sch "schematic symbol" "")
+ foreach( ex errs
+ warn( "%s__%s (%s) has %d errors." car(ex)~>lib~>name car(ex)~>cellName car(ex)~>viewName cadr(ex))
+ )
+ ; make sure all edit locks are gone by reopening in read mode
+ dbReopen(sch, "r")
+ dbClose(sch)
+ )
+)
+
+
+; modify a schematic cell. Used to convert copied template cells into concrete instantiation.
+;
+; inst_list is an association list of (inst_name, rinst_list) pairs. Where:
+;
+; inst_name : name of the instance in the template cell.
+; rinst_list : a list of rinsts, which are instances to replace the original instance by.
+; If this list is empty, the original instance should be deleted. If the list
+; has more than one element, we should array the original instance.
+;
+; Each rinst is a disembodied property lists, with the properties:
+;
+; rinst->name : the name of this rinst.
+; rinst->lib_name : the instance master library.
+; rinst->cell_name : the instance master cell.
+; rinst->params : an association list of the CDF params of this rinst. The values are always string.
+; rinst->term_mapping : an association list of the modified terminal connections of this rinst.
+; if no connections are changed, this list should be empty.
+;
+; (You can read more about disembodied property lists and association list in the skill
+; language user guide).
+;
+; For each instance, this function does the following:
+; 1. Find the instance with the given name.
+; 2. If rinst_list is nil, delete this instance.
+; 3. If rinst_list has exactly one element:
+; i. rename the instance name to rinst's name.
+; ii. change the instance master of the instance.
+; iii. change the CDF parameters (this should only happen with BAG primitives).
+; iv. change the port connections of this instance.
+; 4. If rinst_list has more than one element, for each additional element,
+; copy the original instance and perform step 3 on that instance.
+;
+; This procedure allows one to delete or array any instances in the schematic template.
+procedure( modify_schematic_content(sch_cv inst_list "gl")
+ let( (inst_obj inst_name rinst_list rinst_len cur_inst wire_list net_map par_val xl xr transform
+ snap_dist errmsg pin_info tmp_result)
+ snap_dist = schGetEnv("schSnapSpacing")
+ foreach( inst inst_list
+ inst_name = car(inst)
+ unless( inst_obj = dbFindAnyInstByName(sch_cv inst_name)
+ dbClose(sch_cv)
+ error( "Cannot find instance %s" inst_name )
+ )
+ rinst_list = cadr(inst)
+ rinst_len = length(rinst_list)
+ last_inst = nil
+ if( rinst_len == 0 then
+ ; no instances to replace by, delete.
+ wire_list = car(get_instance_terminal_wires(sch_cv inst_obj))
+ ; delete wires connected to instance
+ foreach( wire_info wire_list
+ schDelete(cadr(wire_info))
+ )
+ ; delete instance
+ dbDeleteObject(inst_obj)
+ else
+ cur_inst = nil
+ pin_info = nil
+ foreach( rinst rinst_list
+ if( !cur_inst then
+ cur_inst = inst_obj
+ ; printf("inst %s lib = %s, cell = %s\n" inst_name inst_obj->master->libName inst_obj->master->cellName)
+ tmp_result = get_instance_terminal_wires(sch_cv cur_inst)
+ net_map = cadr(tmp_result)
+ wire_list = car(tmp_result)
+ pin_info = get_instance_pin_info(cur_inst)
+ ; printf("%s wire_list: %A\n" inst_name wire_list)
+ ; figure out bounding box for potential future array
+ ; printf("instance %s bbox: %A\n" cur_inst~>name cur_inst~>bBox)
+ xl = xCoord(car(cur_inst~>bBox))
+ xr = xCoord(cadr(cur_inst~>bBox))
+ foreach( wire_info wire_list
+ ; printf("instance %s wire: %A %A\n" cur_inst~>name xCoord(car(cadr(wire_info)~>bBox)) xCoord(cadr(cadr(wire_info)~>bBox)))
+ xl = min(xl xCoord(car(cadr(wire_info)~>bBox)))
+ xr = max(xr xCoord(cadr(cadr(wire_info)~>bBox)))
+ )
+ transform = list(round2((xr - xl + snap_dist) / snap_dist) * snap_dist:0 "R0" 1.0)
+ ; printf("instance %s transform: %A\n" cur_inst~>name transform)
+ else
+ ; more than 1 rinst, copy cur_inst, do not copy wires
+ wire_list = nil
+ ; copy instance
+ cur_inst = dbCopyFig(cur_inst nil transform)
+ )
+ ; change instance name and master
+ when(cur_inst->name != rinst->name
+ cur_inst->name = rinst->name
+ )
+ schReplaceProperty(list(cur_inst) "master" sprintf(nil "%s %s %s" rinst->lib_name
+ rinst->cell_name cur_inst->viewName))
+ ; set parameters
+ foreach( cdf_par cdfGetInstCDF(cur_inst)~>parameters
+ par_val = cadr(assoc(cdf_par->name rinst->params))
+ ; change CDF parameter value only if specified in given parameters
+ when( par_val != nil
+ cdf_par->value = par_val
+ )
+ )
+ when( wire_list
+ ; if wire_list is not empty, check that the pins match. If so, keep wires around,
+ ; otherwise, delete wires
+ unless( equal(pin_info get_instance_pin_info(cur_inst))
+ ; delete wires connected to instance
+ foreach( wire_info wire_list
+ schDelete(cadr(wire_info))
+ )
+ wire_list = nil
+ )
+ )
+ ; modify connections, keeping old wires around
+ ; printf("instance %s wire_list: %A net_map: %A term_map: %A\n" cur_inst~>name wire_list net_map rinst->term_mapping)
+ modify_instance_terminal(sch_cv cur_inst wire_list net_map rinst->term_mapping)
+ )
+ )
+ )
+ )
+)
+
+; given a copied template cell, modify it to a concrete schematic.
+procedure( convert_template_cells(lib_name cell_name pin_map new_pins inst_list sympin ipin opin iopin simulators)
+ let( (sym_cv sch)
+ ; update symbol view first.
+ if( sym_cv = dbOpenCellViewByType(lib_name cell_name "symbol" nil "r") then
+ printf("*INFO* Updating %s__%s symbol pins.\n" lib_name cell_name)
+ update_symbol_pin(lib_name cell_name pin_map new_pins sympin simulators)
+ else
+ warn("Did not find symbol for %s__%s. Skipping. Is it testbench?" lib_name cell_name)
+ )
+
+ ; attempt to open schematic in append mode
+ unless( sch = dbOpenCellViewByType(lib_name cell_name "schematic" nil "a")
+ error("Cannot open %s__%s (schematic) in append mode." lib_name cell_name)
+ )
+ ; update schematic content
+ printf("*INFO* Updating %s__%s instances and connections.\n" lib_name cell_name)
+ modify_schematic_content(sch inst_list)
+ ; update schematic pins
+ printf("*INFO* Updating %s__%s schematic pins.\n" lib_name cell_name)
+ update_schematic_pin(sch pin_map new_pins ipin opin iopin)
+ check_and_save_schematic(sch)
+ )
+)
+
+; create concrete schematics
+procedure( create_concrete_schematic( lib_name tech_lib lib_path temp_file change_file
+ sympin ipin opin iopin simulators copy "tttttlllllg" )
+ let( (template_list change_list cell_name pin_map inst_list)
+ printf("*INFO* Reading template and change list from file\n")
+ template_list = parse_data_from_file( temp_file )
+ change_list = parse_data_from_file( change_file )
+ when( copy
+ printf("*INFO* Creating library: %s\n" lib_name)
+ create_or_erase_library( lib_name tech_lib lib_path nil )
+ printf("*INFO* Copying templates to library: %s\n" lib_name)
+ copy_templates_to_library( lib_name template_list )
+ )
+ foreach( change change_list
+ cell_name = change->name
+ pin_map = change->pin_map
+ new_pins = change->new_pins
+ inst_list = change->inst_list
+ printf("*INFO* Updating cell %s__%s\n" lib_name cell_name)
+ convert_template_cells( lib_name cell_name pin_map new_pins inst_list
+ sympin ipin opin iopin simulators )
+ )
+ 't
+ )
+)
+
+; create a new layout view then instantiate a single pcell instance.
+; this method also copy all the labels in the pcell top level. In this way LVS/PEX will
+; work correctly.
+; params is a list of (variable_name type_string value) lists.
+; pin_mapping is a list of (old_pin new_pin) lists.
+procedure( create_layout_with_pcell(lib_name cell_name view_name inst_lib inst_cell params_f pin_mapping_f "ttttttt")
+ let( (lay_cv inst_master inst inst_shapes label_location label_orientation label_lpp
+ label_just label_font label_height label_type label_text params pin_mapping)
+ unless( lay_cv = dbOpenCellViewByType(lib_name cell_name view_name "maskLayout" "w")
+ error("Cannot open cellview %s__%s (%s)." lib_name cell_name view_name)
+ )
+ unless( inst_master = dbOpenCellViewByType(inst_lib inst_cell "layout" "maskLayout" "r")
+ dbClose(lay_cv)
+ error("Cannot open cellview %s__%s (layout)." inst_lib inst_cell)
+ )
+
+ params = parse_data_from_file(params_f)
+ pin_mapping = parse_data_from_file(pin_mapping_f)
+
+ inst = dbCreateParamInst(lay_cv inst_master "XTOP" '(0 0) "R0" 1 params)
+ inst_shapes = inst~>master~>shapes
+
+ foreach(shape inst_shapes
+ when( shape->objType == "label"
+ label_location = shape~>xy
+ label_orientation = shape~>orient
+ label_lpp = shape~>lpp
+ label_just = shape~>justify
+ label_font = shape~>font
+ label_height = shape~>height
+ label_type = shape~>labelType
+ label_text = shape~>theLabel
+ when( cadr(assoc(label_text pin_mapping))
+ label_text = cadr(assoc(label_text pin_mapping))
+ )
+ dbCreateLabel(lay_cv label_lpp label_location label_text label_just label_orientation label_font label_height )
+ )
+ )
+
+ dbClose(inst_master)
+ dbSave(lay_cv)
+ dbClose(lay_cv)
+ )
+)
+
+; helper for creating a path segment
+procedure( create_path_seg_helper(cv lay p0 p1 width start_s end_s)
+ let( (diag_ext info_list bext eext)
+ if( and(car(p0) != car(p1) cadr(p0) != cadr(p1)) then
+ diag_ext = width / 2
+ width = width * sqrt(2)
+ else
+ diag_ext = width * sqrt(2) / 2
+ )
+
+ bext = 0
+ eext = 0
+ when( start_s == "round"
+ bext = width / 2
+ start_s = "custom"
+ )
+ when( end_s == "round"
+ eext = width / 2
+ end_s = "custom"
+ )
+ info_list = list(bext eext list(diag_ext diag_ext width/2 diag_ext diag_ext width/2))
+ dbCreatePathSeg(cv lay p0 p1 width start_s end_s info_list)
+ )
+)
+
+
+; helper for creating a path
+procedure( create_path_helper( cv path )
+ let( (lay width points estyle jstyle p0 p1 plen idx start_s end_s)
+ lay = path->layer
+ width = path->width
+ points = path->points
+ estyle = path->end_style
+ jstyle = path->join_style
+ p0 = nil
+ plen = length(points)
+ idx = 0
+ foreach( cur_point points
+ p1 = cur_point
+ when( idx > 0
+ if( idx == 1 then
+ start_s = estyle
+ else
+ start_s = jstyle
+ )
+ if( idx == plen - 1 then
+ end_s = estyle
+ else
+ end_s = jstyle
+ )
+ create_path_seg_helper(cv lay p0 p1 width start_s end_s)
+ )
+ p0 = p1
+ idx = idx + 1
+ )
+ )
+)
+
+
+; helper for creating a single layout view
+procedure( create_layout_helper( cv tech_file inst_list rect_list via_list pin_list path_list
+ blockage_list boundary_list polygon_list "ggllllllll" )
+ let( (inst_cv obj via_def via_enc1 via_enc2 enc1 enc2 off1 off2 via_params make_pin_rect
+ pin_bb pin_w pin_h pin_xc pin_yc pin_orient label_h param_order orig_shape arr_dx arr_dy)
+
+ ; create instances
+ foreach( inst inst_list
+ if( inst_cv = dbOpenCellViewByType( inst->lib inst->cell inst->view nil "r" ) then
+
+ if( and( inst->num_rows==1 inst->num_cols==1) then
+ if( inst->params != nil then
+ ; create pcell instance
+ obj = dbCreateParamInst(cv inst_cv inst->name inst->loc inst->orient 1 inst->params)
+ ; execute parameter callbacks
+ when( obj
+ if( inst->param_order != nil then
+ param_order = inst->param_order
+ else
+ param_order = mapcar( lambda( (x) car(x) ) inst->params )
+ )
+ CCSinvokeCdfCallbacks(obj ?order param_order)
+ )
+ else
+ obj = dbCreateInst(cv inst_cv inst->name inst->loc inst->orient)
+ )
+ else
+ if( inst->params != nil then
+ ; create pcell mosaic
+ obj = dbCreateParamSimpleMosaic(cv inst_cv inst->name inst->loc inst->orient
+ inst->num_rows inst->num_cols inst->sp_rows inst->sp_cols
+ inst->params)
+ ; execute parameter callbacks
+ when( obj
+ if( inst->param_order != nil then
+ param_order = inst->param_order
+ else
+ param_order = mapcar( lambda( (x) car(x) ) inst->params )
+ )
+ CCSinvokeCdfCallbacks(obj ?order param_order)
+ )
+ else
+ obj = dbCreateSimpleMosaic(cv inst_cv inst->name inst->loc inst->orient
+ inst->num_rows inst->num_cols inst->sp_rows inst->sp_cols)
+ )
+ )
+ unless( obj
+ warn("Error creating instance %s of %s__%s (%s). Skipping." inst->name inst->lib inst->cell inst->view)
+ )
+
+ else
+ warn("Cannot find instance %s__%s (%s). Skipping." inst->lib inst->cell inst->view)
+ )
+ )
+
+ ; create rectangles
+ foreach( rect rect_list
+ orig_shape = dbCreateRect(cv rect->layer rect->bbox)
+ if( not(orig_shape) then
+ warn("Error creating rectangle of layer %A. Skipping." rect->layer)
+ else
+ when( rect->arr_nx != nil
+ for(icol 2 rect->arr_nx
+ arr_dx = rect->arr_spx * (icol - 1)
+ for(irow 1 rect->arr_ny
+ arr_dy = rect->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(arr_dx:arr_dy "R0" 1))
+ )
+ )
+ for(irow 2 rect->arr_ny
+ arr_dy = rect->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(0:arr_dy "R0" 1))
+ )
+ )
+ )
+ )
+
+ ; create paths
+ foreach( path path_list
+ create_path_helper(cv path)
+ )
+
+ ; create polygons
+ foreach( poly polygon_list
+ dbCreatePolygon(cv poly->layer poly->points)
+ )
+
+ ; create blockages
+ foreach( block blockage_list
+ if( block->btype == "placement" then
+ dbCreateAreaBlockage(cv block->points)
+ else
+ dbCreateLayerBlockage(cv block->layer block->btype block->points)
+ )
+ )
+
+ ; create boundaries
+ foreach( bound boundary_list
+ cond( (bound->btype == "PR"
+ dbCreatePRBoundary(cv bound->points))
+ (bound->btype == "snap"
+ dbCreateSnapBoundary(cv bound->points))
+ (bound->btype == "area"
+ dbCreateAreaBoundary(cv bound->points))
+ ('t
+ warn("Unknown boundary type %s. Skipping." bound->btype))
+ )
+ )
+
+ ; create vias
+ foreach( via via_list
+ if( via_def = techFindViaDefByName(tech_file via->id) then
+ ; compute via parameter list
+ via_enc1 = via->enc1
+ via_enc2 = via->enc2
+ enc1 = list( (car(via_enc1) + cadr(via_enc1)) / 2.0
+ (caddr(via_enc1) + cadr(cddr(via_enc1))) / 2.0 )
+ enc2 = list( (car(via_enc2) + cadr(via_enc2)) / 2.0
+ (caddr(via_enc2) + cadr(cddr(via_enc2))) / 2.0 )
+ off1 = list( (cadr(via_enc1) - car(via_enc1)) / 2.0
+ (caddr(via_enc1) - cadr(cddr(via_enc1))) / 2.0 )
+ off2 = list( (cadr(via_enc2) - car(via_enc2)) / 2.0
+ (caddr(via_enc2) - cadr(cddr(via_enc2))) / 2.0 )
+
+ via_params = list( list("cutRows" via->num_rows)
+ list("cutColumns" via->num_cols)
+ list("cutSpacing" list(via->sp_cols via->sp_rows))
+ list("layer1Enc" enc1)
+ list("layer2Enc" enc2)
+ list("layer1Offset" off1)
+ list("layer2Offset" off2) )
+
+ ; if via width and height given, add to via_params
+ when( via->cut_width != nil
+ via_params = cons( list("cutWidth" via->cut_width) via_params)
+ )
+ when( via->cut_height != nil
+ via_params = cons( list("cutHeight" via->cut_height) via_params)
+ )
+
+ ; create actual via
+ orig_shape = dbCreateVia(cv via_def via->loc via->orient via_params)
+ if( not(orig_shape) then
+ warn("Error creating via %s. Skipping." via->id)
+ else
+ when( via->arr_nx != nil
+ for(icol 2 via->arr_nx
+ arr_dx = via->arr_spx * (icol - 1)
+ for(irow 1 via->arr_ny
+ arr_dy = via->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(arr_dx:arr_dy "R0" 1))
+ )
+ )
+ for(irow 2 via->arr_ny
+ arr_dy = via->arr_spy * (irow - 1)
+ dbCopyFig(orig_shape nil list(0:arr_dy "R0" 1))
+ )
+ )
+ )
+ else
+ warn("Via %s not found. Skipping." via->id)
+ )
+ )
+
+ ; create pins
+ foreach( pin pin_list
+ pin_bb = pin->bbox
+ pin_w = caadr(pin_bb) - caar(pin_bb)
+ pin_h = cadr(cadr(pin_bb)) - cadr(car(pin_bb))
+ pin_xc = (caar(pin_bb) + caadr(pin_bb)) / 2.0
+ pin_yc = (cadr(car(pin_bb)) + cadr(cadr(pin_bb))) / 2.0
+
+ if( pin_w >= pin_h then
+ pin_orient = "R0"
+ label_h = pin_h
+ else
+ pin_orient = "R90"
+ label_h = pin_w
+ )
+
+ ; get make_pin_rect, true if both net_name and pin_name are non-empty
+ make_pin_rect = pin->net_name != "" && pin->pin_name != ""
+ when( pin->make_rect != nil
+ make_pin_rect = pin->make_rect
+ )
+ ; printf("make_pin_rect: %A\n" make_pin_rect)
+ ; create pin object only if make_pin_rect is True.
+ when( make_pin_rect != 0 && make_pin_rect != nil
+ ; printf("making pin.\n")
+ dbCreatePin( dbMakeNet(cv pin->net_name) dbCreateRect(cv pin->layer pin_bb) pin->pin_name )
+ )
+ ; printf("%A %A %A %A\n" pin->label pin->layer pin_xc pin_yc)
+ dbCreateLabel( cv pin->layer list(pin_xc pin_yc) pin->label "centerCenter" pin_orient "roman" label_h )
+ )
+ )
+)
+
+; create a new layout view with the given geometries
+; inst_f, rect_f, via_f, and pin_f are files containing list of disembodied property lists.
+procedure( create_layout( lib_name view_name via_tech layout_f "ttt" )
+ let( (tech_file layout_info cell_name inst_list rect_list via_list pin_list
+ path_list blockage_list boundary_list polygon_list cv)
+
+ unless( tech_file = techGetTechFile(ddGetObj(via_tech))
+ error("Via technology file %s not found." via_tech)
+ )
+
+ layout_info = parse_data_from_file(layout_f)
+ foreach( info layout_info
+ cell_name = nthelem(1 info)
+ inst_list = nthelem(2 info)
+ rect_list = nthelem(3 info)
+ via_list = nthelem(4 info)
+ pin_list = nthelem(5 info)
+ path_list = nthelem(6 info)
+ blockage_list = nthelem(7 info)
+ boundary_list = nthelem(8 info)
+ polygon_list = nthelem(9 info)
+
+ unless( cv = dbOpenCellViewByType( lib_name cell_name view_name "maskLayout" "w" )
+ error("Cannot create new layout cell %s__%s (%s)." lib_name cell_name view_name)
+ )
+
+ printf("Creating %s__%s (%s)\n" lib_name cell_name view_name)
+ create_layout_helper(cv tech_file inst_list rect_list via_list pin_list path_list
+ blockage_list boundary_list polygon_list)
+
+ dbSave(cv)
+ dbClose(cv)
+ )
+
+ t
+ )
+)
+
+
+; close all opened cellviews
+procedure( close_all_cellviews()
+ foreach( cv dbGetOpenCellViews()
+ dbPurge(cv)
+ )
+ 't
+)
+
+
+; release write locks from all the given cellviews
+procedure( release_write_locks( lib_name cell_view_list_f "tt" )
+ let( (cell_view_list lib_obj cv)
+ cell_view_list = parse_data_from_file(cell_view_list_f)
+ when( lib_obj = ddGetObj(lib_name nil nil nil nil "r")
+ foreach( info cell_view_list
+ when( cv = dbFindOpenCellView( lib_obj car(info) cadr(info) )
+ dbReopen(cv, "r")
+ dbClose(cv)
+ )
+ )
+ ddReleaseObj(lib_obj)
+ )
+ t
+ )
+)
+
+; refresh all given cell views
+procedure( refresh_cellviews( lib_name cell_view_list_f "tt" )
+ let( (cell_view_list lib_obj cv)
+ cell_view_list = parse_data_from_file(cell_view_list_f)
+ when( lib_obj = ddGetObj(lib_name nil nil nil nil "r")
+ foreach( info cell_view_list
+ when( cv = dbFindOpenCellView( lib_obj car(info) cadr(info) )
+ dbRefreshCellView(cv)
+ )
+ )
+ ddReleaseObj(lib_obj)
+ )
+ t
+ )
+)
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; Simulation/Testbench related functions ;;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+
+; set an entry in an association list
+; returns the modified association list.
+procedure( set_assoc_list(mylist mykey myval)
+ let( (tmp)
+ when( tmp = assoc(mykey mylist)
+ ; print("replacing")
+ rplacd(tmp list(myval))
+ )
+ )
+ mylist
+)
+
+; Copy the schematic of a testbench, and replace the DUT instance.
+;
+; This procedure copies the schematic of a testbench to a new library and cell, then finds all
+; instances with the name prefix "XDUT", then change their instance master to dut_lib and dut_cell.
+;
+procedure( copy_testbench(master_lib master_cell targ_lib
+ dut_lib dut_cell tech_lib new_lib_path "ttttttt")
+ let( (tlib_obj sch replace_count inst_prefix new_master)
+ inst_prefix = "XDUT"
+
+ printf("Copying testbench %s__%s to %s__%s\n" master_lib master_cell targ_lib master_cell)
+
+ ; create target library if does not exist
+ unless( tlib_obj = ddGetObj(targ_lib nil nil nil nil "r")
+ when( and(new_lib_path (new_lib_path != "."))
+ new_lib_path = strcat(new_lib_path "/" lib_name)
+ )
+ tlib_obj = ddCreateLib(targ_lib new_lib_path)
+ ; attach technology file
+ techBindTechFile(tlib_obj tech_lib)
+ )
+
+ ; copy testbench to new library
+ src_gdm = gdmCreateSpecList()
+ gdmAddSpecToSpecList(gdmCreateSpec(master_lib master_cell nil nil "CDBA") src_gdm)
+ targ_gdm = gdmCreateSpecList()
+ gdmAddSpecToSpecList(gdmCreateSpec(targ_lib master_cell nil nil "CDBA") targ_gdm)
+ ccpCopy(src_gdm targ_gdm 't 'CCP_EXPAND_COMANAGED)
+
+ ; open copied schematic
+ unless( sch = dbOpenCellViewByType(tlib_obj master_cell "schematic" nil "a")
+ ddReleaseObj(tlib_obj)
+ error("Cannot open testbench schematic %s__%s" targ_lib master_cell)
+ )
+
+ ; replace instances
+ replace_count = 0
+ sprintf(new_master "%s %s symbol" dut_lib dut_cell)
+ foreach( inst sch~>instances
+ when( strncmp( inst~>name inst_prefix strlen(inst_prefix) ) == 0
+ replace_count = replace_count + 1
+ schReplaceProperty(list(inst) "master" new_master)
+ )
+ )
+
+ ; save and close resources
+ check_and_save_schematic(sch)
+ ddReleaseObj(tlib_obj)
+
+ ; error if nothing is replaced
+ when( replace_count == 0
+ error("Cannot find any instances in %s__%s with name prefix %s" targ_lib master_cell inst_prefix)
+ )
+ 't
+ )
+)
+
+; opens an adexl session. Returns a list of session name and setup database handle.
+procedure( open_adexl_session(tb_lib tb_cell tb_view session_name mode "ttttt")
+ let( (session sdb)
+ unless( session = axlCreateSession(session_name)
+ error("Cannot create temporary adexl session: %s" session_name)
+ )
+ unless( sdb = axlSetMainSetupDBLCV(session tb_lib tb_cell tb_view ?mode mode)
+ axlCloseSession(session)
+ error("Cannot load adexl database from %s__%s (%s)" tb_lib tb_cell tb_view)
+ )
+ list(session sdb)
+ )
+)
+
+; Enables only the given corners in the simulation setup database.
+procedure( enable_adexl_corners( sdb corner_list env_param_list "gll")
+ let( (env_name par_val_list corner)
+ foreach(cur_name cadr(axlGetCorners(sdb))
+ axlSetEnabled( axlGetCorner(sdb cur_name) member(cur_name corner_list) )
+ )
+ foreach(env_par_obj env_param_list
+ env_name = car(env_par_obj)
+ par_val_list = cadr(env_par_obj)
+ corner = axlGetCorner(sdb env_name)
+ foreach(par_val par_val_list
+ axlPutVar(corner car(par_val) cadr(par_val))
+ )
+ )
+ )
+)
+
+; Set testbench parameters
+; val_list is an association list from variable names to variable values as string, which
+; could be a constant value or a parametric sweep string
+procedure( set_adexl_parameters(sdb par_val_list "gl")
+ foreach( var_spec par_val_list
+ axlPutVar(sdb car(var_spec) cadr(var_spec))
+ )
+)
+
+; Create a new config view for a testbench.
+;
+; lib_name : testbench library name.
+; cell_name : testbench cell name.
+; view_name : name of the config view (a testbench can have multiple config views)
+; libs : a string of global libraries, separated by spaces.
+; views : a string of cellviews to use, separated by spaces.
+; stops : a string of cellviews to stop at, separated by spaces.
+procedure( create_config_view(lib_name cell_name view_name libs views stops "tttttt")
+ let( (conf conf_bag)
+ printf("Creating config view %s__%s (%s)\n" lib_name cell_name view_name)
+
+ unless( conf = hdbOpen(lib_name cell_name view_name "w")
+ error("Cannot open config view %s__%s (%s)." lib_name cell_name view_name)
+ )
+ hdbSetTopCellViewName(conf lib_name cell_name "schematic")
+ hdbSetDefaultLibListString(conf libs)
+ hdbSetDefaultViewListString(conf views)
+ hdbSetDefaultStopListString(conf stops)
+ hdbSaveAs(conf lib_name cell_name view_name)
+
+ ; close configuration
+ conf_bag = hdbCreateConfigBag()
+ hdbAddConfigToBag(conf_bag conf)
+ hdbCloseConfigsInBag(conf_bag)
+ )
+)
+
+; edit the config view of a testbench. Use to control whether we're simulating with
+; schematic or post-extraction.
+;
+; lib_name : testbench library name.
+; cell_name : testbench cell name.
+; view_name : name of the config view (a testbench can have multiple config views)
+; conf_list : a list of (, , ) configurations. Where each entry
+; means that view should be used for the cell in library .
+procedure( edit_config_view(lib_name cell_name view_name conf_list "tttl")
+ let( (conf lib cell view conf_bag netlist_list)
+ unless( conf = hdbOpen(lib_name cell_name view_name "a")
+ error("Cannot open config view %s__%s (%s)." lib_name cell_name view_name)
+ )
+ netlist_list = '()
+ foreach( cell_config conf_list
+ lib = car(cell_config)
+ cell = cadr(cell_config)
+ view = caddr(cell_config)
+ if( view == "netlist" then
+ ; set to use extracted netlist
+ netlist_list = cons(list(lib cell) netlist_list)
+ else
+ ; set to use extracted cellview
+ hdbSetObjBindRule(conf list(list(lib cell nil nil))
+ list('hdbcBindingRule list(nil nil view)))
+ )
+ )
+ hdbSaveAs(conf lib_name cell_name view_name)
+
+ ; close configuration
+ conf_bag = hdbCreateConfigBag()
+ hdbAddConfigToBag(conf_bag conf)
+ hdbCloseConfigsInBag(conf_bag)
+
+ ; update netlist source files
+ edit_config_source_files(lib_name cell_name view_name netlist_list)
+ )
+)
+
+; HACKERMAN FUNCTION:
+; so as usual, cadence is so terrible they don't have skill API to set source files.
+; instead, spice/spectre source files are defined in a secret ASCII prop.cfg file.
+; this hacky method will create the right prop.cfg file for you.
+procedure( edit_config_source_files(lib_name cell_name view_name netlist_list "tttl")
+ let( (p lib_dir cell_lib_dir)
+ lib_dir = get_lib_directory(lib_name)
+ p = outfile( sprintf(nil "%s/%s/%s/%s" lib_dir cell_name view_name "prop.cfg") "w" )
+ ; common header
+ fprintf( p "file-format-id 1.1;\ndefault\n{\n}\n" )
+ foreach( lib_cell netlist_list
+ lib = car(lib_cell)
+ cell = cadr(lib_cell)
+ cell_lib_dir = get_lib_directory(lib)
+ fprintf( p "cell %s.%s\n{\n" lib cell )
+ fprintf( p " non-inherited string prop sourcefile = \"%s/%s/netlist/netlist\";\n}\n"
+ cell_lib_dir cell )
+ )
+ close(p)
+ )
+)
+
+; Write testbench information to file.
+procedure( write_testbench_info_to_file(sdb result_file output_list en_corner_list)
+ let( (p output_count)
+
+ ; write testbench information to result_file
+ p = outfile(result_file "w")
+
+ fprintf(p "corners:\n")
+ foreach( corn cadr(axlGetCorners(sdb))
+ fprintf(p " - %s\n" corn)
+ )
+ fprintf(p "enabled_corners:\n")
+ foreach( corn en_corner_list
+ fprintf(p " - %s\n" corn)
+ )
+ fprintf(p "parameters:\n")
+ if( var_list = cadr(axlGetVars(sdb)) then
+ foreach( var_name var_list
+ fprintf(p " %s: \"%s\"\n" var_name axlGetVarValue(axlGetVar(sdb var_name)))
+ )
+ else
+ fprintf(p " {}\n")
+ )
+ fprintf(p "outputs:\n")
+ output_count = 0
+ foreach( out_obj output_list
+ if( rexMatchp( "\"" out_obj->name) then
+ warn("Output expression name (%s) have quotes, skipping" out_obj->name)
+ else
+ fprintf(p " \"%s\": !!str %A\n" out_obj->name out_obj->expression)
+ output_count = output_count + 1
+ )
+ )
+ when( output_count == 0
+ fprintf(p " {}\n")
+ )
+ close(p)
+ )
+)
+
+; Instantiates a testbench.
+;
+; Copy a testbench template to the desired location, replace instances, make config view,
+; and also setup corner settings in adexl.
+; this method will also record list of corners, global variables, and output expressions
+; to result_file
+procedure( instantiate_testbench(tb_cell targ_lib
+ config_libs config_views config_stops
+ default_corner corner_file def_files
+ tech_lib result_file
+ "tttttttltt")
+ let( (session_name session_sdb session sdb test_names test_name test tool_args corner_list
+ ade_symbol ade_session output_list tmp_state_name state_obj success)
+
+ tmp_state_name = "orig_state"
+
+ ; check if temporary ADE session state already exists, if so, delete it
+ state_obj = ddGetObj(targ_lib tb_cell tmp_state_name)
+ when( state_obj
+ success = ddDeleteObj(state_obj)
+ unless( success
+ error("Cannot delete orig_state cellview.")
+ )
+ )
+
+ ; create config view
+ create_config_view(targ_lib tb_cell "config" config_libs config_views config_stops)
+
+ ; session_name = "modify_adexl"
+ session_name = sprintf(nil "modify_adexl_%d" bag_modify_adexl_counter)
+ bag_modify_adexl_counter = bag_modify_adexl_counter + 1
+
+ session_sdb = open_adexl_session(targ_lib tb_cell "adexl" session_name "a")
+ session = car(session_sdb)
+ sdb = cadr(session_sdb)
+
+ ; check that only one test is defined
+ test_names = cadr(axlGetTests(sdb))
+ when(length(test_names) != 1
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ error("ADEXL testbench must have exactly 1 test defined.")
+ )
+
+ ; save current test setup state
+ axlSaveSetupState(session "adexl_default" "All")
+
+ ; change all tests to use config view, and set all test's definition files
+ ; also get a list of defined output expressions
+ ; step 1: get ADE session
+ test_name = car(test_names)
+ ade_symbol = axlGetToolSession(session_name test_name)
+ ade_session = asiGetSession(ade_symbol)
+ ; step 2: save original ADE session
+ asiSaveState(ade_session ?name tmp_state_name ?option 'cellview ?lib targ_lib ?cell tb_cell)
+ ; step 3: change test library
+ test = axlGetTest(sdb test_name)
+ tool_args = axlGetTestToolArgs(test)
+ set_assoc_list(tool_args "view" "config")
+ set_assoc_list(tool_args "lib" targ_lib)
+ set_assoc_list(tool_args "cell" tb_cell)
+ axlSetTestToolArgs(test tool_args)
+ ; step 4: reopen ADE session, then load original ADE state
+ ade_symbol = axlGetToolSession(session_name test_name)
+ ade_session = asiGetSession(ade_symbol)
+ asiLoadState(ade_session ?name tmp_state_name ?option 'cellview)
+ asiSetEnvOptionVal(ade_session 'definitionFiles def_files)
+ output_list = setof(ele asiGetOutputList(ade_session) ele->name)
+ ; step 5: delete temporary ADE session state
+ state_obj = ddGetObj(targ_lib tb_cell tmp_state_name)
+ ddDeleteObj(state_obj)
+
+ axlMainAppSaveSetup(session_name)
+
+ ; load corner
+ unless(axlLoadCorners(sdb corner_file)
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ error("Error loading corner file %s to %s__%s (%s)" corner_file lib_name cell_name view_name)
+ )
+
+ ; set default corner
+ corner_list = list(default_corner)
+ enable_adexl_corners(sdb corner_list nil)
+
+ ; write testbench information to file
+ write_testbench_info_to_file(sdb result_file output_list corner_list)
+
+ ; save and close
+ axlSaveSetupState(session "adexl_default" "All")
+ axlSaveSetupState(session "ocean_default" "All")
+ axlMainAppSaveSetup(session_name)
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ )
+)
+
+; Returns parameter and corner information of a testbench.
+procedure( get_testbench_info(tb_lib tb_cell result_file "ttt")
+ let( (session_name session_sdb session sdb test_names test_name ade_symbol asi_sess
+ output_list corner_list en_list success)
+ session_name = "read_adexl"
+ session_sdb = open_adexl_session(tb_lib tb_cell "adexl" session_name "r")
+ session = car(session_sdb)
+ sdb = cadr(session_sdb)
+
+ ; check that only one test is defined
+ test_names = cadr(axlGetTests(sdb))
+ when(length(test_names) != 1
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ error("ADEXL testbench must have exactly 1 test defined.")
+ )
+
+ ; get output list
+ test_name = car(test_names)
+ ade_symbol = axlGetToolSession(session_name test_name)
+ asi_sess = sevEnvironment(ade_symbol)
+ output_list = setof(ele asiGetOutputList(asi_sess) ele->name)
+
+ ; get enabled corners
+ corner_list = cadr(axlGetCorners(sdb))
+ en_list = setof(corner corner_list axlGetEnabled(axlGetCorner(sdb corner)))
+
+ ; write testbench information to file
+ write_testbench_info_to_file(sdb result_file output_list en_list)
+
+ ; close
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ )
+)
+
+; Configure run options. Used to setup monte carlo parameters.
+; run_params is an association list of run options and their values. The key "mode"
+; corresponds to the run mode.
+procedure( set_run_options(session sdb run_params "ggl")
+ let( (run_mode opt_list run_opt)
+ when( run_mode = cadr(assoc("mode" run_params))
+ ; no options for single run/sweep mode.
+ cond( (run_mode == "Single Run, Sweeps and Corners"
+ opt_list = nil)
+ (run_mode == "Monte Carlo Sampling"
+ opt_list = '("mcnumpoints" "mcmethod") )
+ ('t
+ axlCloseSession(session)
+ error("Unsupported run mode: %s" run_mode) )
+ )
+ foreach( opt_name opt_list
+ when( opt_val = cadr(assoc(opt_name run_params))
+ run_opt = axlPutRunOption(sdb run_mode opt_name)
+ axlSetRunOptionValue(run_opt opt_val)
+ )
+ )
+ axlSetCurrentRunMode(sdb run_mode)
+ )
+ )
+)
+
+; modify the given testbench.
+; tb_lib and tb_cell describes the library and cell of the testbench to simulate.
+; conf_file contains the config view settings.
+; opt_file contains the association list of run mode options.
+; corner_file contains a list of corners to simulate.
+; param_file contains the association list of parameter values.
+procedure( modify_testbench(tb_lib tb_cell conf_file opt_file corner_file param_file env_params_file "ttttttt")
+ let( (tmp_list session sdb conf_list run_params corner_list param_values env_param_values session_name)
+ sprintf(session_name "bag_sim_adexl_%s" getCurrentTime())
+
+ ; read inputs from file.
+ conf_list = parse_data_from_file(conf_file)
+ run_params = parse_data_from_file(opt_file)
+ corner_list = parse_data_from_file(corner_file)
+ param_values = parse_data_from_file(param_file)
+ env_param_values = parse_data_from_file(env_params_file)
+
+ ; modify config view
+ when( conf_list
+ edit_config_view(tb_lib tb_cell "config" conf_list)
+ )
+
+ tmp_list = open_adexl_session(tb_lib tb_cell "adexl" session_name "a")
+ session = car(tmp_list)
+ sdb = cadr(tmp_list)
+
+ ; change corners, parameters, and run options
+ enable_adexl_corners( sdb corner_list env_param_values)
+ set_adexl_parameters( sdb param_values )
+ set_run_options( session sdb run_params )
+
+ ; save and close
+ axlSaveSetupState(session "adexl_default" "All")
+ axlSaveSetupState(session "ocean_default" "All")
+ axlMainAppSaveSetup(session_name)
+ axlCommitSetupDB(sdb)
+ axlCloseSetupDB(sdb)
+ axlCloseSession(session)
+ )
+)
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+;; BAG server related functions ;;
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+procedure( stdoutHandler(ipcId data)
+ let( (result result_str)
+ if( bag_server_started > 0 then
+ printf("*INFO* Evaluate expression from BAG process: %s\n" data)
+ if( result = errsetstring(data 't) then
+ sprintf(result_str "%A\n" car(result))
+ else
+ sprintf(result_str "%s\n" car(nthelem(5 errset.errset)))
+ )
+ printf("*INFO* Sending result to BAG process: %s" result_str)
+ ipcWriteProcess(ipcId sprintf(nil "%d\n" strlen(result_str)))
+ ipcWriteProcess(ipcId result_str)
+ 't
+ else
+ if( data == "BAG skill server has started. Yay!\n" then
+ bag_server_started = 1
+ printf("*INFO* BAG skill server started.\n")
+ else
+ printf("*INFO* Waiting for BAG skill server. Message: %s\n" data)
+ )
+ )
+ )
+)
+
+procedure( stderrHandler(ipcId data)
+ warn("BAG server process error: %s\n" data)
+ warn("Shutting down BAG server.")
+ ipcKillProcess(ipcId)
+ 't
+)
+
+procedure( exitHandler(ipcId exitId)
+ printf("*INFO* BAG server process exited with status: %d\n" exitId)
+ 't
+)
+
+procedure( start_bag_server()
+ bag_server_started = 0
+ printf("*INFO* Starting BAG server process.\n")
+ ipcBeginProcess("bash virt_server.sh" "" 'stdoutHandler 'stderrHandler 'exitHandler "")
+)
+
+bag_server_started = 0
+bag_modify_adexl_counter = 0
+bag_proc = start_bag_server()
diff --git a/run_scripts/test_generators.sh b/run_scripts/test_generators.sh
new file mode 100755
index 0000000..a20724d
--- /dev/null
+++ b/run_scripts/test_generators.sh
@@ -0,0 +1,59 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source .bashrc_pypath
+
+if [[ $# < 1 ]]
+then
+ echo "Usage: ./test_generators.sh [--package ] [options...]"
+ exit 1
+fi
+
+if [ -z ${BAG_PYTHON+x} ]
+then
+ echo "BAG_PYTHON is unset"
+ exit 1
+fi
+
+exec ${BAG_PYTHON} -m pytest BAG_framework/tests_gen --data_root $@
diff --git a/run_scripts/test_primitives.sh b/run_scripts/test_primitives.sh
new file mode 100755
index 0000000..013cca1
--- /dev/null
+++ b/run_scripts/test_primitives.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source .bashrc_pypath
+
+if [ -z ${BAG_PYTEST+x} ]
+then
+ echo "BAG_PYTEST is unset"
+ exit 1
+fi
+
+exec ${BAG_PYTEST} ${BAG_TECH_CONFIG_DIR}/tests $@
diff --git a/run_scripts/verify.py b/run_scripts/verify.py
new file mode 100644
index 0000000..046c5d8
--- /dev/null
+++ b/run_scripts/verify.py
@@ -0,0 +1,81 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import argparse
+
+from bag.core import BagProject
+from bag.util.misc import register_pdb_hook
+
+register_pdb_hook()
+
+
+def parse_options() -> argparse.Namespace:
+ parser = argparse.ArgumentParser(description='Run DRC/LVS/RCX.')
+ parser.add_argument('lib', help='library name.')
+ parser.add_argument('cell', help='cell name.')
+ parser.add_argument('-d', '--drc', dest='run_drc', action='store_true', default=False,
+ help='run DRC.')
+ parser.add_argument('-v', '--lvs', dest='run_lvs', action='store_true', default=False,
+ help='run LVS. Pass --rcx flag to run LVS for extraction.')
+ parser.add_argument('-x', '--rcx', dest='run_rcx', action='store_true', default=False,
+ help='run RCX.')
+
+ args = parser.parse_args()
+ return args
+
+
+def run_main(prj: BagProject, args: argparse.Namespace) -> None:
+ if args.run_drc:
+ print('Running DRC')
+ success, log = prj.run_drc(args.lib, args.cell)
+ if success:
+ print('DRC passed!')
+ else:
+ print('DRC failed...')
+ print(f'log file: {log}')
+ elif args.run_lvs:
+ mode = 'LVS_RCX' if args.run_rcx else 'LVS'
+ print(f'Running {mode}')
+ success, log = prj.run_lvs(args.lib, args.cell, run_rcx=args.run_rcx)
+ if success:
+ print(f'{mode} passed!')
+ else:
+ print(f'{mode} failed...')
+ print(f'log file: {log}')
+ elif args.run_rcx:
+ print('Running RCX')
+ netlist, log = prj.run_rcx(args.lib, args.cell)
+ if netlist:
+ print('RCX passed!')
+ else:
+ print('RCX failed...')
+ print(f'log file: {log}')
+ else:
+ print('No operation specifiied, do nothing.')
+
+
+if __name__ == '__main__':
+ _args = parse_options()
+
+ local_dict = locals()
+ if 'bprj' not in local_dict:
+ print('creating BAG project')
+ _prj = BagProject()
+ else:
+ print('loading BAG project')
+ _prj = local_dict['bprj']
+
+ run_main(_prj, _args)
diff --git a/run_scripts/virt_server.sh b/run_scripts/virt_server.sh
new file mode 100755
index 0000000..556e3b4
--- /dev/null
+++ b/run_scripts/virt_server.sh
@@ -0,0 +1,53 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+export PYTHONPATH="${BAG_FRAMEWORK}/src"
+
+export cmd="-m bag.virtuoso run_skill_server"
+export min_port=5000
+export max_port=9999
+export port_file="BAG_server_port.txt"
+export log="skill_server.log"
+
+exec ${BAG_PYTHON} ${cmd} ${min_port} ${max_port} ${port_file} ${log}
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..e2c29a6
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from setuptools import setup, find_packages
+
+
+setup(
+ name='bag',
+ version='3.0',
+ description='BAG AMS Generator',
+ classifiers=[
+ 'Development Status :: 3 - Alpha',
+ 'Operating System :: POSIX :: Linux',
+ 'Programming Language :: Python :: 3.7',
+ ],
+ author='Eric Chang',
+ author_email='erichang@bcanalog.com',
+ python_requires='>=3.7',
+ install_requires=[
+ 'setuptools>=18.5',
+ 'ruamel.yaml',
+ 'Jinja2>=2.9',
+ 'numpy>=1.10',
+ 'pexpect>=4.0',
+ 'pyzmq>=15.2.0',
+ 'scipy>=0.17',
+ 'matplotlib>=1.5',
+ 'h5py',
+ 'lark-parser',
+ 'sortedcontainers',
+ ],
+ extras_require={
+ 'mdao': ['openmdao']
+ },
+ tests_require=[
+ 'openmdao',
+ 'pytest',
+ 'pytest-xdist',
+ ],
+ packages=find_packages('src'),
+ package_dir={'': 'src'},
+ package_data={
+ 'bag.interface': ['templates/*'],
+ 'bag.verification': ['templates/*'],
+ },
+)
diff --git a/src/bag/__init__.py b/src/bag/__init__.py
new file mode 100644
index 0000000..707669d
--- /dev/null
+++ b/src/bag/__init__.py
@@ -0,0 +1,52 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is the bag root package.
+"""
+
+import signal
+
+__all__ = []
+
+# make sure that SIGINT will always be catched by python.
+signal.signal(signal.SIGINT, signal.default_int_handler)
diff --git a/src/bag/concurrent/__init__.py b/src/bag/concurrent/__init__.py
new file mode 100644
index 0000000..a7fb612
--- /dev/null
+++ b/src/bag/concurrent/__init__.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package define helper classes used to perform concurrent operations.
+"""
diff --git a/src/bag/concurrent/core.py b/src/bag/concurrent/core.py
new file mode 100644
index 0000000..eb11665
--- /dev/null
+++ b/src/bag/concurrent/core.py
@@ -0,0 +1,364 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module define utility classes for performing concurrent operations.
+"""
+
+from typing import Optional, Sequence, Dict, Union, Tuple, Callable, Any, Awaitable, List, Iterable
+
+import asyncio
+import subprocess
+import collections
+import multiprocessing
+from pathlib import Path
+from asyncio.subprocess import Process
+from concurrent.futures import CancelledError
+
+from .util import gather_err
+
+ProcInfo = Tuple[Union[str, Sequence[str]], str, Optional[Dict[str, str]], Optional[str]]
+FlowInfo = Tuple[Union[str, Sequence[str]], str, Optional[Dict[str, str]], Optional[str],
+ Callable[[Optional[int], str], Any]]
+
+
+def batch_async_task(coro_list: Iterable[Awaitable[Any]]) -> List[Any]:
+ """Execute a list of coroutines or futures concurrently.
+
+ User may press Ctrl-C to cancel all given tasks.
+
+ Parameters
+ ----------
+ coro_list : Iterable[Awaitable[Any]]
+ a list of coroutines or futures to run concurrently.
+
+ Returns
+ -------
+ results : Optional[Tuple[Any]]
+ a list of return values or raised exceptions of given tasks.
+ """
+ return asyncio.run(gather_err(coro_list))
+
+
+class Semaphore:
+ """A modified asyncio Semaphore class that gets the running loop dynamically."""
+ def __init__(self, value: int = 1) -> None:
+ if value < 0:
+ raise ValueError("Semaphore initial value must be >= 0")
+
+ self._value = value
+ self._waiters = collections.deque()
+
+ async def __aenter__(self) -> None:
+ await self.acquire()
+ return None
+
+ async def __aexit__(self, exc_type, exc, tb):
+ self.release()
+
+ def _wake_up_next(self):
+ while self._waiters:
+ waiter = self._waiters.popleft()
+ if not waiter.done():
+ waiter.set_result(None)
+ return
+
+ def locked(self):
+ return self._value == 0
+
+ async def acquire(self):
+ loop = asyncio.get_running_loop()
+ while self._value <= 0:
+ fut = loop.create_future()
+ self._waiters.append(fut)
+ try:
+ await fut
+ except Exception:
+ # See the similar code in Queue.get.
+ fut.cancel()
+ if self._value > 0 and not fut.cancelled():
+ self._wake_up_next()
+ raise
+ self._value -= 1
+ return True
+
+ def release(self):
+ self._value += 1
+ self._wake_up_next()
+
+
+class SubProcessManager:
+ """A class that provides methods to run multiple subprocesses in parallel using asyncio.
+
+ Parameters
+ ----------
+ max_workers : Optional[int]
+ number of maximum allowed subprocesses. If None, defaults to system
+ CPU count.
+ cancel_timeout : float
+ Number of seconds to wait for a process to terminate once SIGTERM or
+ SIGKILL is issued. Defaults to 10 seconds.
+ """
+
+ def __init__(self, max_workers: int = 0, cancel_timeout: float = 10.0) -> None:
+ if max_workers == 0:
+ max_workers = multiprocessing.cpu_count()
+
+ self._cancel_timeout = cancel_timeout
+ self._semaphore = Semaphore(max_workers)
+
+ async def _kill_subprocess(self, proc: Optional[Process]) -> None:
+ """Helper method; send SIGTERM/SIGKILL to a subprocess.
+
+ This method first sends SIGTERM to the subprocess. If the process hasn't terminated
+ after a given timeout, it sends SIGKILL.
+
+ Parameter
+ ---------
+ proc : Optional[Process]
+ the process to attempt to terminate. If None, this method does nothing.
+ """
+ if proc is not None:
+ if proc.returncode is None:
+ try:
+ proc.terminate()
+ try:
+ await asyncio.shield(asyncio.wait_for(proc.wait(), self._cancel_timeout))
+ except CancelledError:
+ pass
+
+ if proc.returncode is None:
+ proc.kill()
+ try:
+ await asyncio.shield(
+ asyncio.wait_for(proc.wait(), self._cancel_timeout))
+ except CancelledError:
+ pass
+ except ProcessLookupError:
+ pass
+
+ async def async_new_subprocess(self,
+ args: Union[str, Sequence[str]],
+ log: str,
+ env: Optional[Dict[str, str]] = None,
+ cwd: Optional[str] = None) -> Optional[int]:
+ """A coroutine which starts a subprocess.
+
+ If this coroutine is cancelled, it will shut down the subprocess gracefully using
+ SIGTERM/SIGKILL, then raise CancelledError.
+
+ Parameters
+ ----------
+ args : Union[str, Sequence[str]]
+ command to run, as string or sequence of strings.
+ log : str
+ the log file name.
+ env : Optional[Dict[str, str]]
+ an optional dictionary of environment variables. None to inherit from parent.
+ cwd : Optional[str]
+ the working directory. None to inherit from parent.
+
+ Returns
+ -------
+ retcode : Optional[int]
+ the return code of the subprocess.
+ """
+ if isinstance(args, str):
+ args = [args]
+
+ # get log file name, make directory if necessary
+ log_path = Path(log).resolve()
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+
+ if cwd is not None:
+ # make sure current working directory exists
+ Path(cwd).mkdir(parents=True, exist_ok=True)
+
+ async with self._semaphore:
+ proc = None
+ with open(log_path, 'w') as logf:
+ logf.write(f'command: {" ".join(args)}\n')
+ logf.flush()
+ try:
+ proc = await asyncio.create_subprocess_exec(*args, stdout=logf,
+ stderr=subprocess.STDOUT,
+ env=env, cwd=cwd)
+ retcode = await proc.wait()
+ return retcode
+ except CancelledError as err:
+ await self._kill_subprocess(proc)
+ raise err
+
+ async def async_new_subprocess_flow(self,
+ proc_info_list: Sequence[FlowInfo]) -> Any:
+ """A coroutine which runs a series of subprocesses.
+
+ If this coroutine is cancelled, it will shut down the current subprocess gracefully using
+ SIGTERM/SIGKILL, then raise CancelledError.
+
+ Parameters
+ ----------
+ proc_info_list : Sequence[FlowInfo]
+ a list of processes to execute in series. Each element is a tuple of:
+
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ vfun : Sequence[Callable[[Optional[int], str], Any]]
+ a function to validate if it is ok to execute the next process. The output of the
+ last function is returned. The first argument is the return code, the second
+ argument is the log file name.
+
+ Returns
+ -------
+ result : Any
+ the return value of the last validate function. None if validate function
+ returns False.
+ """
+ num_proc = len(proc_info_list)
+ if num_proc == 0:
+ return None
+
+ async with self._semaphore:
+ for idx, (args, log, env, cwd, vfun) in enumerate(proc_info_list):
+ if isinstance(args, str):
+ args = [args]
+
+ log_path = Path(log).resolve()
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+
+ if cwd is not None:
+ # make sure current working directory exists
+ Path(cwd).mkdir(parents=True, exist_ok=True)
+
+ proc, retcode = None, None
+ with open(log_path, 'w') as logf:
+ logf.write(f'command: {" ".join(args)}\n')
+ logf.flush()
+ try:
+ proc = await asyncio.create_subprocess_exec(*args, stdout=logf,
+ stderr=subprocess.STDOUT,
+ env=env, cwd=cwd)
+ retcode = await proc.wait()
+ except CancelledError as err:
+ await self._kill_subprocess(proc)
+ raise err
+
+ fun_output = vfun(retcode, str(log_path))
+ if idx == num_proc - 1:
+ return fun_output
+ elif not fun_output:
+ return None
+
+ def batch_subprocess(self, proc_info_list: Sequence[ProcInfo]
+ ) -> Optional[Sequence[Union[int, Exception]]]:
+ """Run all given subprocesses in parallel.
+
+ Parameters
+ ----------
+ proc_info_list : Sequence[ProcInfo]
+ a list of process information. Each element is a tuple of:
+
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+
+ Returns
+ -------
+ results : Optional[Sequence[Union[int, Exception]]]
+ if user cancelled the subprocesses, None is returned. Otherwise, a list of
+ subprocess return codes or exceptions are returned.
+ """
+ num_proc = len(proc_info_list)
+ if num_proc == 0:
+ return []
+
+ coro_list = [self.async_new_subprocess(args, log, env, cwd) for args, log, env, cwd in
+ proc_info_list]
+
+ return batch_async_task(coro_list)
+
+ def batch_subprocess_flow(self, proc_info_list: Sequence[Sequence[FlowInfo]]) -> \
+ Optional[Sequence[Union[int, Exception]]]:
+ """Run all given subprocesses flow in parallel.
+
+ Parameters
+ ----------
+ proc_info_list : Sequence[Sequence[FlowInfo]
+ a list of process flow information. Each element is a sequence of tuples of:
+
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ vfun : Sequence[Callable[[Optional[int], str], Any]]
+ a function to validate if it is ok to execute the next process. The output of the
+ last function is returned. The first argument is the return code, the second
+ argument is the log file name.
+
+ Returns
+ -------
+ results : Optional[Sequence[Any]]
+ if user cancelled the subprocess flows, None is returned. Otherwise, a list of
+ flow return values or exceptions are returned.
+ """
+ num_proc = len(proc_info_list)
+ if num_proc == 0:
+ return []
+
+ coro_list = [self.async_new_subprocess_flow(flow_info) for flow_info in proc_info_list]
+
+ return batch_async_task(coro_list)
diff --git a/src/bag/concurrent/util.py b/src/bag/concurrent/util.py
new file mode 100644
index 0000000..e198879
--- /dev/null
+++ b/src/bag/concurrent/util.py
@@ -0,0 +1,61 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Awaitable, Any, List, Iterable
+
+import asyncio
+
+
+async def gather_err(coro_list: Iterable[Awaitable[Any]]) -> List[Any]:
+ gatherer = GatherHelper()
+ for coro in coro_list:
+ gatherer.append(coro)
+
+ return await gatherer.gather_err()
+
+
+class GatherHelper:
+ def __init__(self) -> None:
+ self._tasks = []
+
+ def __bool__(self) -> bool:
+ return bool(self._tasks)
+
+ def append(self, coro: Awaitable[Any]) -> None:
+ self._tasks.append(asyncio.create_task(coro))
+
+ async def gather_err(self) -> List[Any]:
+ done, pending = await asyncio.wait(self._tasks, return_when=asyncio.FIRST_EXCEPTION)
+ if pending:
+ # an error occurred, cancel and re-raise
+ for task in pending:
+ task.cancel()
+ for task in done:
+ err = task.exception()
+ if err is not None:
+ raise err
+
+ # all tasks completed
+ return [task.result() for task in self._tasks]
+
+ async def run(self) -> None:
+ done, pending = await asyncio.wait(self._tasks, return_when=asyncio.FIRST_EXCEPTION)
+ for task in pending:
+ task.cancel()
+ for task in done:
+ task.result()
+
+ def clear(self) -> None:
+ self._tasks.clear()
diff --git a/src/bag/core.py b/src/bag/core.py
new file mode 100644
index 0000000..f87a92a
--- /dev/null
+++ b/src/bag/core.py
@@ -0,0 +1,1269 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is the core bag module.
+"""
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING, Dict, Any, Tuple, Optional, Type, Sequence, Union, Mapping, cast, List
+)
+
+import os
+import shutil
+import pprint
+from pathlib import Path
+
+from pybag.enum import DesignOutput, SupplyWrapMode, LogLevel
+from pybag.core import PySchCellViewInfo
+
+from .io.file import write_yaml, read_yaml
+from .interface import ZMQDealer
+from .interface.lef import LEFInterface
+from .design.netlist import add_mismatch_offsets
+from .design.database import ModuleDB
+from .design.module import Module
+from .layout.routing.grid import RoutingGrid
+from .layout.template import TemplateDB, TemplateBase
+from .layout.tech import TechInfo
+from .concurrent.core import batch_async_task
+from .env import (
+ get_port_number, get_bag_config, get_bag_work_dir, create_routing_grid, get_bag_tmp_dir,
+ get_gds_layer_map, get_gds_object_map
+)
+from .util.importlib import import_class
+from .simulation.data import netlist_info_from_dict
+from .simulation.hdf5 import load_sim_data_hdf5
+from .simulation.core import TestbenchManager
+from .simulation.core import MeasurementManager as MeasurementManagerOld
+from .simulation.measure import MeasurementManager
+from .simulation.cache import SimulationDB, DesignDB
+
+if TYPE_CHECKING:
+ from .simulation.base import SimAccess
+
+
+class BagProject:
+ """The main bag controller class.
+
+ This class mainly stores all the user configurations, and issue
+ high level bag commands.
+
+ Attributes
+ ----------
+ bag_config : Dict[str, Any]
+ the BAG configuration parameters dictionary.
+ """
+
+ def __init__(self) -> None:
+ self.bag_config = get_bag_config()
+
+ bag_tmp_dir = get_bag_tmp_dir()
+ bag_work_dir = get_bag_work_dir()
+
+ # get port files
+ port, msg = get_port_number(bag_config=self.bag_config)
+ if msg:
+ print(f'*WARNING* {msg}. Operating without Virtuoso.')
+
+ # create ZMQDealer object
+ dealer_kwargs = {}
+ dealer_kwargs.update(self.bag_config['socket'])
+ del dealer_kwargs['port_file']
+
+ # create TechInfo instance
+ self._grid = create_routing_grid()
+
+ if port >= 0:
+ # make DbAccess instance.
+ dealer = ZMQDealer(port, **dealer_kwargs)
+ else:
+ dealer = None
+
+ # create database interface object
+ try:
+ lib_defs_file = os.path.join(bag_work_dir, self.bag_config['lib_defs'])
+ except ValueError:
+ lib_defs_file = ''
+ db_cls = cast(Type['DbAccess'], import_class(self.bag_config['database']['class']))
+ self.impl_db = db_cls(dealer, bag_tmp_dir, self.bag_config['database'], lib_defs_file)
+ self._default_lib_path = self.impl_db.default_lib_path
+
+ # make SimAccess instance.
+ sim_cls = cast(Type['SimAccess'], import_class(self.bag_config['simulation']['class']))
+ self._sim = sim_cls(bag_tmp_dir, self.bag_config['simulation'])
+
+ # make LEFInterface instance
+ self._lef: Optional[LEFInterface] = None
+ lef_config = self.bag_config.get('lef', None)
+ if lef_config is not None:
+ lef_cls = cast(Type[LEFInterface], import_class(lef_config['class']))
+ self._lef = lef_cls(lef_config)
+
+ @property
+ def tech_info(self) -> TechInfo:
+ """TechInfo: the TechInfo object."""
+ return self._grid.tech_info
+
+ @property
+ def grid(self) -> RoutingGrid:
+ """RoutingGrid: the global routing grid object."""
+ return self._grid
+
+ @property
+ def default_lib_path(self) -> str:
+ return self._default_lib_path
+
+ @property
+ def sim_access(self) -> SimAccess:
+ return self._sim
+
+ def close_bag_server(self) -> None:
+ """Close the BAG database server."""
+ self.impl_db.close()
+ self.impl_db = None
+
+ def import_sch_cellview(self, lib_name: str, cell_name: str,
+ view_name: str = 'schematic') -> None:
+ """Import the given schematic and symbol template into Python.
+
+ This import process is done recursively.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ view_name : str
+ view name.
+ """
+ self.impl_db.import_sch_cellview(lib_name, cell_name, view_name)
+
+ def import_design_library(self, lib_name, view_name='schematic'):
+ # type: (str, str) -> None
+ """Import all design templates in the given library from CAD database.
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the library.
+ view_name : str
+ the view name to import from the library.
+ """
+ self.impl_db.import_design_library(lib_name, view_name)
+
+ def import_gds_file(self, gds_fname: str, lib_name: str) -> None:
+ lay_map = get_gds_layer_map()
+ obj_map = get_gds_object_map()
+ self.impl_db.create_library(lib_name)
+ self.impl_db.import_gds_file(gds_fname, lib_name, lay_map, obj_map, self.grid)
+
+ def get_cells_in_library(self, lib_name):
+ # type: (str) -> Sequence[str]
+ """Get a list of cells in the given library.
+
+ Returns an empty list if the given library does not exist.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+
+ Returns
+ -------
+ cell_list : Sequence[str]
+ a list of cells in the library
+ """
+ return self.impl_db.get_cells_in_library(lib_name)
+
+ def make_template_db(self, impl_lib: str, **kwargs: Any) -> TemplateDB:
+ """Create and return a new TemplateDB instance.
+
+ Parameters
+ ----------
+ impl_lib : str
+ the library name to put generated layouts in.
+ **kwargs : Any
+ optional TemplateDB parameters.
+ """
+ return TemplateDB(self.grid, impl_lib, prj=self, **kwargs)
+
+ def make_module_db(self, impl_lib: str, **kwargs: Any) -> ModuleDB:
+ """Create and return a new ModuleDB instance.
+
+ Parameters
+ ----------
+ impl_lib : str
+ the library name to put generated layouts in.
+ **kwargs : Any
+ optional ModuleDB parameters.
+ """
+ return ModuleDB(self.tech_info, impl_lib, prj=self, **kwargs)
+
+ def make_dsn_db(self, root_dir: Path, log_file: str, impl_lib: str,
+ sch_db: Optional[ModuleDB] = None, lay_db: Optional[TemplateDB] = None,
+ **kwargs: Any) -> DesignDB:
+ if sch_db is None:
+ sch_db = self.make_module_db(impl_lib)
+ if lay_db is None:
+ lay_db = self.make_template_db(impl_lib)
+
+ dsn_db = DesignDB(root_dir, log_file, self.impl_db, self.sim_access.netlist_type,
+ sch_db, lay_db, **kwargs)
+ return dsn_db
+
+ def make_sim_db(self, dsn_dir: Path, log_file: str, impl_lib: str,
+ dsn_options: Optional[Mapping[str, Any]] = None,
+ **kwargs: Any) -> SimulationDB:
+ if dsn_options is None:
+ dsn_options = {}
+
+ dsn_db = self.make_dsn_db(dsn_dir, log_file, impl_lib, **dsn_options)
+ sim_db = SimulationDB(log_file, dsn_db, **kwargs)
+ return sim_db
+
+ def generate_cell(self, specs: Dict[str, Any],
+ raw: bool = False,
+ gen_lay: bool = True,
+ gen_sch: bool = True,
+ run_drc: bool = False,
+ run_lvs: bool = False,
+ run_rcx: bool = False,
+ lay_db: Optional[TemplateDB] = None,
+ sch_db: Optional[ModuleDB] = None,
+ gen_lef: bool = False,
+ cv_info_out: Optional[List[PySchCellViewInfo]] = None,
+ sim_netlist: bool = False,
+ flat: bool = False,
+ gen_hier: bool = False,
+ gen_model: bool = False,
+ mismatch: bool = False,
+ gen_shell: bool = False,
+ export_lay: bool = False,
+ gen_netlist: bool = False) -> str:
+ """Generate layout/schematic of a given cell from specification file.
+
+ Parameters
+ ----------
+ specs : Dict[str, Any]
+ the specification dictionary. Some non-obvious parameters:
+
+ params : Dict[str, Any]
+ If layout generator is given, this is the layout parameters. Otherwise this
+ is the schematic parameters.
+ netlist_file : str
+ If not empty, we will create a netlist file with this name (even if raw = False).
+ if sim_netlist is True, this will be a simulation netlist.
+ model_file : str
+ the behavioral model filename if gen_model = True.
+ gds_file : str
+ override the default GDS layout file name. Note that specifying this entry does
+ not mean a GDS file will be created, you must set raw = True or gen_gds = True.
+
+ raw : bool
+ True to generate GDS and netlist files instead of OA cellviews.
+ gen_lay : bool
+ True to generate layout.
+ gen_sch : bool
+ True to generate schematics.
+ run_drc : bool
+ True to run DRC.
+ run_lvs : bool
+ True to run LVS.
+ run_rcx : bool
+ True to run RCX.
+ lay_db : Optional[TemplateDB]
+ the layout database.
+ sch_db : Optional[ModuleDB]
+ the schematic database.
+ gen_lef : bool
+ True to create LEF file.
+ cv_info_out : Optional[List[PySchCellViewInfo]] = None
+ If given cellview information objects will be appended to this list.
+ sim_netlist : bool
+ True to return a simulation netlist.
+ flat : bool
+ True to generate flat netlist.
+ gen_hier: bool
+ True to write the system verilog modeling hierarchy in a yaml file.
+ gen_model: bool
+ True to generate behavioral models
+ mismatch : bool
+ True to add mismatch voltages
+ gen_shell: bool
+ True to generate verilog shell file.
+ export_lay: bool
+ True to export layout file even in non-raw mode.
+ gen_netlist : bool
+ True to generate netlist even in non-raw mode.
+ Returns
+ -------
+ rcx_netlist : str
+ the extraction netlist. Empty on error or if extraction is not run.
+ """
+ root_dir: Union[str, Path] = specs.get('root_dir', '')
+ lay_str: Union[str, Type[TemplateBase]] = specs.get('lay_class', '')
+ sch_str: Union[str, Type[Module]] = specs.get('sch_class', '')
+ impl_lib: str = specs['impl_lib']
+ impl_cell: str = specs['impl_cell']
+ params: Optional[Mapping[str, Any]] = specs.get('params', None)
+ netlist_file_override: str = specs.get('netlist_file', '')
+ model_file: str = specs.get('model_file', '')
+ yaml_file: str = specs.get('yaml_file', '')
+ layout_file_override: str = specs.get('layout_file', '')
+ leaves: Optional[Mapping[str, List[str]]] = specs.get('leaf_cells', None)
+ mod_type_str: str = specs.get('model_type', 'SYSVERILOG')
+ default_model_view: str = specs.get('default_model_view', '')
+ hierarchy_file: str = specs.get('hierarchy_file', '')
+ model_params: Mapping[str, Any] = specs.get('model_params', {})
+ sup_wrap_mode: str = specs.get('model_supply_wrap_mode', 'NONE')
+ lef_config: Mapping[str, Any] = specs.get('lef_config', {})
+ name_prefix: str = specs.get('name_prefix', '')
+ name_suffix: str = specs.get('name_suffix', '')
+ exact_cell_names_list: List[str] = specs.get('exact_cell_names', [])
+ square_bracket: bool = specs.get('square_bracket', False)
+ lay_type_specs: Union[str, List[str]] = specs.get('layout_type', 'GDS')
+ mod_type: DesignOutput = DesignOutput[mod_type_str]
+ sup_wrap_type: SupplyWrapMode = SupplyWrapMode[sup_wrap_mode]
+ exact_cell_names = set(exact_cell_names_list)
+
+ if isinstance(lay_type_specs, str):
+ lay_type_list: List[DesignOutput] = [DesignOutput[lay_type_specs]]
+ else:
+ lay_type_list: List[DesignOutput] = [DesignOutput[v] for v in lay_type_specs]
+
+ if isinstance(root_dir, str):
+ root_path = Path(root_dir)
+ else:
+ root_path = root_dir
+
+ if lay_str == '':
+ has_lay = False
+ lay_cls = None
+ else:
+ lay_cls = cast(Type[TemplateBase], import_class(lay_str))
+ has_lay = True
+
+ gen_lay = gen_lay and has_lay
+ gen_model = gen_model and model_params
+ run_drc = run_drc and gen_lay
+
+ verilog_shell_path = root_path / f'{impl_cell}_shell.v' if gen_lef or gen_shell else None
+ sch_cls = None
+ layout_ext = lay_type_list[0].extension
+ layout_file = ''
+ lef_options = {}
+ if layout_file_override and Path(layout_file_override).suffix[1:] != layout_ext:
+ raise ValueError('Conflict between layout file type and layout file name.')
+ if has_lay:
+ if lay_db is None:
+ lay_db = self.make_template_db(impl_lib, name_prefix=name_prefix,
+ name_suffix=name_suffix)
+
+ print('computing layout...')
+ lay_master: TemplateBase = lay_db.new_template(lay_cls, params=params)
+ lay_master.get_lef_options(lef_options, lef_config)
+ # try getting schematic class from instance, if possible
+ sch_cls = lay_master.get_schematic_class_inst()
+ dut_list = [(lay_master, impl_cell)]
+ print('computation done.')
+
+ if gen_lay:
+ print('creating layout...')
+ if not raw:
+ lay_db.batch_layout(dut_list, output=DesignOutput.LAYOUT,
+ exact_cell_names=exact_cell_names)
+ else:
+ layout_file = (layout_file_override or
+ str(root_path / f'{impl_cell}.{layout_ext}'))
+ lay_db.batch_layout(dut_list, output=lay_type_list[0], fname=layout_file,
+ exact_cell_names=exact_cell_names,
+ square_bracket=square_bracket)
+ for out_type in lay_type_list[1:]:
+ cur_file = str(root_path / f'{impl_cell}.{out_type.extension}')
+ lay_db.batch_layout(dut_list, output=out_type, fname=cur_file,
+ exact_cell_names=exact_cell_names,
+ square_bracket=square_bracket)
+
+ print('layout done.')
+
+ sch_params = lay_master.sch_params
+ else:
+ sch_params = params
+
+ if export_lay and not raw:
+ print('exporting layout')
+ layout_file = (layout_file_override or
+ str(root_path / f'{impl_cell}.{layout_ext}'))
+ export_params = dict(square_bracket=square_bracket,
+ output_type=lay_type_list[0])
+ self.impl_db.export_layout(impl_lib, impl_cell, layout_file,
+ params=export_params)
+ for out_type in lay_type_list[1:]:
+ export_params['output_type'] = out_type
+ cur_file = str(root_path / f'{impl_cell}.{out_type.extension}')
+ self.impl_db.export_layout(impl_lib, impl_cell, cur_file,
+ params=export_params)
+
+ if sch_cls is None:
+ if isinstance(sch_str, str):
+ if sch_str:
+ # no schematic class from layout, try get it from string
+ sch_cls = cast(Type[Module], import_class(sch_str))
+ else:
+ sch_cls = sch_str
+ has_sch = sch_cls is not None
+
+ run_lvs = (run_lvs or run_rcx) and gen_lay and has_sch
+ run_rcx = run_rcx and gen_lay and has_sch
+ gen_sch = (gen_sch or gen_hier or gen_model or run_lvs or run_rcx) and has_sch
+ flat = flat or (mismatch and not run_rcx)
+
+ final_netlist = ''
+ final_netlist_type = DesignOutput.CDL
+ lvs_netlist = ''
+ netlist_file = netlist_file_override
+ if (gen_netlist or raw) and not netlist_file:
+ if sim_netlist:
+ ext = self._sim.netlist_type.extension
+ else:
+ ext = DesignOutput.CDL.extension
+ netlist_file = str(root_path / f'{impl_cell}.{ext}')
+
+ if gen_sch:
+ if sch_db is None:
+ sch_db = self.make_module_db(impl_lib, name_prefix=name_prefix,
+ name_suffix=name_suffix)
+
+ print('computing schematic...')
+ sch_master: Module = sch_db.new_master(sch_cls, params=sch_params)
+ sch_master.get_lef_options(lef_options, lef_config)
+ dut_list = [(sch_master, impl_cell)]
+ print('computation done.')
+
+ if not raw:
+ print('creating schematic...')
+ sch_db.batch_schematic(dut_list, exact_cell_names=exact_cell_names)
+ print('schematic done.')
+
+ if yaml_file:
+ sch_db.batch_schematic(dut_list, output=DesignOutput.YAML, fname=yaml_file,
+ exact_cell_names=exact_cell_names)
+
+ if netlist_file:
+ print('creating netlist...')
+ final_netlist = netlist_file
+ if sim_netlist:
+ final_netlist_type = self._sim.netlist_type
+ if run_lvs:
+ lvs_netlist = str(root_path / f'{impl_cell}.{DesignOutput.CDL.extension}')
+ sch_db.batch_schematic(dut_list, output=DesignOutput.CDL,
+ fname=lvs_netlist, cv_info_out=cv_info_out,
+ flat=flat, exact_cell_names=exact_cell_names,
+ square_bracket=square_bracket)
+ sch_db.batch_schematic(dut_list, output=final_netlist_type,
+ fname=netlist_file, cv_info_out=cv_info_out,
+ flat=flat, exact_cell_names=exact_cell_names)
+ else:
+ sch_db.batch_schematic(dut_list, output=final_netlist_type,
+ fname=netlist_file, cv_info_out=cv_info_out,
+ flat=flat, exact_cell_names=exact_cell_names)
+ else:
+ final_netlist_type = DesignOutput.CDL
+ lvs_netlist = netlist_file
+ sch_db.batch_schematic(dut_list, output=final_netlist_type, fname=netlist_file,
+ cv_info_out=cv_info_out, flat=flat,
+ exact_cell_names=exact_cell_names,
+ square_bracket=square_bracket)
+ print('netlisting done.')
+
+ if verilog_shell_path is not None:
+ sch_db.batch_schematic(dut_list, output=DesignOutput.VERILOG, shell=True,
+ fname=str(verilog_shell_path),
+ exact_cell_names=exact_cell_names)
+ print(f'verilog shell file created at {verilog_shell_path}')
+
+ if gen_hier:
+ print('creating hierarchy...')
+ if not hierarchy_file:
+ hierarchy_file = str(root_path / 'hierarchy.yaml')
+ write_yaml(hierarchy_file,
+ sch_master.get_instance_hierarchy(mod_type, leaves, default_model_view))
+ print(f'hierarchy done. File is {hierarchy_file}')
+
+ if gen_model:
+ if not model_file:
+ model_file = str(root_path / f'{impl_cell}.{mod_type.extension}')
+ print('creating behavioral model...')
+ sch_db.batch_model([(sch_master, impl_cell, model_params)],
+ output=mod_type, fname=model_file,
+ supply_wrap_mode=sup_wrap_type,
+ exact_cell_names=exact_cell_names)
+ print(f'behavioral model done. File is {model_file}')
+ elif netlist_file:
+ if sim_netlist:
+ raise ValueError('Cannot generate simulation netlist from custom cellview')
+
+ print('exporting netlist')
+ self.impl_db.export_schematic(impl_lib, impl_cell, netlist_file)
+
+ if impl_cell in exact_cell_names:
+ gen_cell_name = impl_cell
+ else:
+ gen_cell_name = name_prefix + impl_cell + name_suffix
+
+ if run_drc:
+ print('running DRC...')
+ drc_passed, drc_log = self.run_drc(impl_lib, gen_cell_name, layout=layout_file)
+ if drc_passed:
+ print('DRC passed!')
+ else:
+ print(f'DRC failed... log file: {drc_log}')
+
+ lvs_passed = False
+ if run_lvs:
+ print('running LVS...')
+ lvs_passed, lvs_log = self.run_lvs(impl_lib, gen_cell_name, run_rcx=run_rcx,
+ layout=layout_file, netlist=lvs_netlist)
+ if lvs_passed:
+ print('LVS passed!')
+ else:
+ raise ValueError(f'LVS failed... log file: {lvs_log}')
+
+ if lvs_passed and run_rcx:
+ print('running RCX...')
+ final_netlist, rcx_log = self.run_rcx(impl_lib, gen_cell_name)
+ final_netlist_type = DesignOutput.CDL
+ if final_netlist:
+ print('RCX passed!')
+ to_file = str(root_path / Path(final_netlist).name)
+ shutil.copy(final_netlist, to_file)
+ final_netlist = to_file
+ else:
+ raise ValueError(f'RCX failed... log file: {rcx_log}')
+
+ if gen_lef:
+ if not verilog_shell_path.is_file():
+ raise ValueError(f'Missing verilog shell file: {verilog_shell_path}')
+
+ lef_options = lef_config.get('lef_options_override', lef_options)
+ print('generating LEF...')
+ lef_path = root_path / f'{impl_cell}.lef'
+ success = self.generate_lef(impl_lib, impl_cell, verilog_shell_path, lef_path,
+ root_path, lef_options)
+ if success:
+ print(f'LEF generation done, file at {lef_path}')
+ else:
+ raise ValueError('LEF generation failed... '
+ f'check log files in run directory: {root_path}')
+
+ if mismatch:
+ add_mismatch_offsets(final_netlist, final_netlist, final_netlist_type)
+
+ return final_netlist
+
+ def replace_dut_in_wrapper(self, params: Mapping[str, Any], dut_lib: str,
+ dut_cell: str) -> Mapping[str, Any]:
+ # helper function that replaces dut_lib and dut_cell in the wrapper recursively in
+ # dut_params
+ ans = {k: v for k, v in params.items()}
+ dut_params: Optional[Mapping[str, Any]] = params.get('dut_params', None)
+ if dut_params is None:
+ ans['dut_lib'] = dut_lib
+ ans['dut_cell'] = dut_cell
+ else:
+ ans['dut_params'] = self.replace_dut_in_wrapper(dut_params, dut_lib, dut_cell)
+ return ans
+
+ def simulate_cell(self, specs: Dict[str, Any],
+ extract: bool = True,
+ gen_tb: bool = True,
+ simulate: bool = True,
+ mismatch: bool = False,
+ raw: bool = True,
+ lay_db: Optional[TemplateDB] = None,
+ sch_db: Optional[ModuleDB] = None,
+ ) -> str:
+ """Generate and simulate a single design.
+
+ This method only works for simulating a single cell (or a wrapper around a single cell).
+ If you need to simulate multiple designs together, use simulate_config().
+
+ Parameters
+ ----------
+ specs : Dict[str, Any]
+ the specification dictionary. Important entries are:
+
+ use_netlist : str
+ If specified, use this netlist file as the DUT netlist, and
+ only generate the testbench and simulation netlists.
+ If specified but the netlist does not exist, or the PySchCellViewInfo yaml file
+ does not exist in the same directory, we will still generate the DUT,
+ but the resulting netlist/PySchCellViewInfo object will be saved to this location
+
+ extract : bool
+ True to generate extracted netlist.
+ gen_tb : bool
+ True to generate the DUT/testbench/simulation netlists.
+
+ If False, we will simply grab the final simulation netlist and simulate it.
+ This means you can quickly simulate a previously generated netlist with manual
+ modifications.
+ simulate : bool
+ True to run simulation.
+
+ If False, we will only generate the netlists.
+ mismatch: bool
+ If True mismatch voltage sources are added to the netlist and simulation is done with
+ those in place
+ raw: bool
+ True to generate GDS and netlist files instead of OA cellviews.
+ lay_db : Optional[TemplateDB]
+ the layout database.
+ sch_db : Optional[ModuleDB]
+ the schematic database.
+
+ Returns
+ -------
+ sim_result : str
+ simulation result file name.
+ """
+ root_dir: Union[str, Path] = specs['root_dir']
+ impl_lib: str = specs['impl_lib']
+ impl_cell: str = specs['impl_cell']
+ use_netlist: str = specs.get('use_netlist', '')
+ precision: int = specs.get('precision', 6)
+ tb_params: Dict[str, Any] = specs.get('tb_params', {}).copy()
+ wrapper_lib: str = specs.get('wrapper_lib', '')
+ if wrapper_lib:
+ wrapper_cell: str = specs['wrapper_cell']
+ wrapper_params: Mapping[str, Any] = specs['wrapper_params']
+ wrapper_params = self.replace_dut_in_wrapper(wrapper_params, impl_lib, impl_cell)
+ tb_params['dut_params'] = wrapper_params
+ tb_params['dut_lib'] = wrapper_lib
+ tb_params['dut_cell'] = wrapper_cell
+ else:
+ tb_params['dut_lib'] = impl_lib
+ tb_params['dut_cell'] = impl_cell
+
+ if isinstance(root_dir, str):
+ root_path = Path(root_dir).resolve()
+ else:
+ root_path = root_dir
+
+ netlist_type = self._sim.netlist_type
+ tb_netlist_path = root_path / f'tb.{netlist_type.extension}'
+ sim_netlist_path = root_path / f'sim.{netlist_type.extension}'
+
+ root_path.mkdir(parents=True, exist_ok=True)
+ if gen_tb:
+ if not impl_cell:
+ raise ValueError('impl_cell is empty.')
+
+ if sch_db is None:
+ sch_db = self.make_module_db(impl_lib)
+
+ if use_netlist:
+ use_netlist_path = Path(use_netlist)
+ netlist_dir: Path = use_netlist_path.parent
+ cv_info_path = netlist_dir / (use_netlist_path.stem + '.cvinfo.yaml')
+ if use_netlist_path.is_file():
+ if cv_info_path.is_file():
+ # both files exist, load from file
+ cvinfo = PySchCellViewInfo(str(cv_info_path))
+ cv_info_list = [cvinfo]
+ else:
+ # no cv_info, still need to generate
+ cv_info_list = []
+ else:
+ # need to save netlist and cv_info_list
+ cv_info_list = []
+ else:
+ # no need to save
+ use_netlist_path = None
+ cv_info_path = None
+ cv_info_list = []
+ has_netlist = use_netlist_path is not None and use_netlist_path.is_file()
+ extract = extract and not has_netlist
+ if not cv_info_list:
+ gen_netlist = self.generate_cell(specs, raw=raw, gen_lay=extract, gen_sch=True,
+ run_lvs=extract, run_rcx=extract,
+ sim_netlist=True, sch_db=sch_db, lay_db=lay_db,
+ cv_info_out=cv_info_list, mismatch=mismatch)
+ if use_netlist_path is None:
+ use_netlist_path = Path(gen_netlist)
+ else:
+ # save netlist and cvinfo
+ use_netlist_path.parent.mkdir(parents=True, exist_ok=True)
+ if not use_netlist_path.is_file():
+ shutil.copy(gen_netlist, str(use_netlist_path))
+ for cv_info in reversed(cv_info_list):
+ print(cv_info.lib_name, cv_info.cell_name)
+ if cv_info.lib_name == impl_lib and cv_info.cell_name == impl_cell:
+ cv_info.to_file(str(cv_info_path))
+ break
+
+ tbm_str: Union[str, Type[TestbenchManager]] = specs.get('tbm_class', '')
+ if isinstance(tbm_str, str):
+ if tbm_str:
+ tbm_cls = cast(Type[TestbenchManager], import_class(tbm_str))
+ else:
+ tbm_cls = None
+ else:
+ tbm_cls = tbm_str
+
+ if tbm_cls is not None:
+ # setup testbench using TestbenchManager
+ tbm_specs: Dict[str, Any] = specs['tbm_specs']
+ sim_envs: List[str] = tbm_specs['sim_envs']
+
+ tbm = tbm_cls(self._sim, root_path, 'tb_sim', impl_lib,
+ tbm_specs, [], sim_envs, precision=precision)
+ tbm.setup(sch_db, tb_params, cv_info_list, use_netlist_path, gen_sch=not raw)
+ else:
+ # setup testbench using spec file
+ tb_lib: str = specs['tb_lib']
+ tb_cell: str = specs['tb_cell']
+ sim_info_dict: Dict[str, Any] = specs['sim_info']
+ impl_cell_tb = f'{tb_cell.upper()}_{impl_cell}' if impl_cell else tb_cell.upper()
+
+ tb_cls = sch_db.get_schematic_class(tb_lib, tb_cell)
+ # noinspection PyTypeChecker
+ tb_master = sch_db.new_master(tb_cls, params=tb_params)
+ dut_list = [(tb_master, impl_cell_tb)]
+
+ fname = '' if use_netlist_path is None else str(use_netlist_path)
+ sch_db.batch_schematic(dut_list, output=netlist_type, top_subckt=False,
+ fname=str(tb_netlist_path), cv_info_list=cv_info_list,
+ cv_netlist=fname)
+ if not raw:
+ sch_db.batch_schematic(dut_list, output=DesignOutput.SCHEMATIC)
+ sim_info = netlist_info_from_dict(sim_info_dict)
+ self._sim.create_netlist(sim_netlist_path, tb_netlist_path, sim_info, precision)
+ tbm = None
+ else:
+ tbm = None
+
+ sim_result = ''
+ if simulate:
+ if not tb_netlist_path.is_file():
+ raise ValueError(f'Cannot find testbench netlist: {tb_netlist_path}')
+ if not sim_netlist_path.is_file():
+ raise ValueError(f'Cannot find simulation netlist: {sim_netlist_path}')
+
+ sim_tag = 'sim'
+ print(f'simulation netlist: {sim_netlist_path}')
+ self._sim.run_simulation(sim_netlist_path, sim_tag)
+ print(f'Finished simulating {sim_netlist_path}')
+ sim_path = self._sim.get_sim_file(sim_netlist_path.parent, sim_tag)
+ sim_result = str(sim_path)
+ print(f'Simulation result in {sim_result}')
+ if tbm is not None and specs.get('tbm_print', False):
+ tbm.print_results(load_sim_data_hdf5(sim_path))
+
+ return sim_result
+
+ def measure_cell(self, specs: Mapping[str, Any], extract: bool = False,
+ force_sim: bool = False, force_extract: bool = False, gen_sch: bool = False,
+ fake: bool = False, log_level: LogLevel = LogLevel.DEBUG) -> None:
+ meas_str: Union[str, Type[MeasurementManager]] = specs['meas_class']
+ meas_name: str = specs['meas_name']
+ meas_params: Dict[str, Any] = specs['meas_params']
+ precision: int = specs.get('precision', 6)
+
+ gen_specs_file: str = specs.get('gen_specs_file', '')
+ if gen_specs_file:
+ gen_specs: Mapping[str, Any] = read_yaml(gen_specs_file)
+ lay_str: Union[str, Type[TemplateBase]] = gen_specs['lay_class']
+ impl_lib: str = gen_specs['impl_lib']
+ impl_cell: str = gen_specs['impl_cell']
+ dut_params: Mapping[str, Any] = gen_specs['params']
+ root_dir: Union[str, Path] = gen_specs['root_dir']
+ meas_rel_dir: str = specs.get('meas_rel_dir', '')
+ else:
+ lay_str: Union[str, Type[TemplateBase]] = specs['lay_class']
+ impl_lib: str = specs['impl_lib']
+ impl_cell: str = specs['impl_cell']
+ dut_params: Mapping[str, Any] = specs['dut_params']
+ root_dir: Union[str, Path] = specs['root_dir']
+ meas_rel_dir: str = specs.get('meas_rel_dir', '')
+
+ meas_cls = cast(Type[MeasurementManager], import_class(meas_str))
+ lay_cls = cast(Type[TemplateBase], import_class(lay_str))
+ if isinstance(root_dir, str):
+ root_path = Path(root_dir)
+ else:
+ root_path = root_dir
+ if meas_rel_dir:
+ meas_path = root_path / meas_rel_dir
+ else:
+ meas_path = root_path
+
+ dsn_options = dict(
+ extract=extract,
+ force_extract=force_extract,
+ gen_sch=gen_sch,
+ log_level=log_level,
+ )
+ log_file = str(meas_path / 'meas.log')
+ sim_db: SimulationDB = self.make_sim_db(root_path / 'dsn', log_file, impl_lib,
+ dsn_options=dsn_options, force_sim=force_sim,
+ precision=precision, log_level=log_level)
+
+ dut = sim_db.new_design(impl_cell, lay_cls, dut_params, extract=extract)
+ meas_params['fake'] = fake
+ mm = sim_db.make_mm(meas_cls, meas_params)
+ result = sim_db.simulate_mm_obj(meas_name, meas_path / meas_name, dut, mm)
+ pprint.pprint(result.data)
+
+ def measure_cell_old(self, specs: Dict[str, Any],
+ gen_dut: bool = True,
+ load_from_file: bool = False,
+ extract: bool = True,
+ mismatch: bool = False,
+ sch_db: Optional[ModuleDB] = None,
+ cv_info_list: Optional[List[PySchCellViewInfo]] = None,
+ ) -> Dict[str, Any]:
+ """Generate and simulate a single design.
+
+ This method only works for simulating a single cell (or a wrapper around a single cell).
+ If you need to simulate multiple designs together, use simulate_config().
+
+ Parameters
+ ----------
+ specs : Dict[str, Any]
+ the specification dictionary. Important entries are:
+
+ use_netlist : str
+ If specified, use this netlist file as the DUT netlist, and
+ only generate the testbench and simulation netlists.
+
+ gen_dut : bool
+ True to generate DUT.
+ load_from_file : bool
+ True to load from file.
+ extract : bool
+ True to run extracted simulation.
+ mismatch: bool
+ If True mismatch voltage sources are added to the netlist and simulation is done with
+ those in place
+ sch_db : Optional[ModuleDB]
+ the schematic database.
+ cv_info_list: Optional[List[PySchCellViewInfo]]
+ Optional cellview information objects.
+
+ Returns
+ -------
+ meas_result : Dict[str, Any]
+ measurement results dictionary.
+ """
+ root_dir: str = specs['root_dir']
+ impl_lib: str = specs['impl_lib']
+ impl_cell: str = specs['impl_cell']
+ precision: int = specs.get('precision', 6)
+ use_netlist: str = specs.get('use_netlist', None)
+ mm_name: str = specs['meas_name']
+ mm_str: Union[str, Type[MeasurementManagerOld]] = specs['meas_class']
+ mm_specs: Dict[str, Any] = specs['meas_specs']
+
+ gen_dut = (gen_dut or not load_from_file) and not use_netlist
+
+ root_path = Path(root_dir).resolve()
+
+ root_path.mkdir(parents=True, exist_ok=True)
+ wrapper_lookup = {'': impl_cell}
+
+ if cv_info_list is None:
+ cv_info_list = []
+
+ if gen_dut:
+ if sch_db is None:
+ sch_db = self.make_module_db(impl_lib)
+ netlist = Path(self.generate_cell(specs, raw=True, gen_lay=extract, gen_sch=True,
+ run_lvs=extract, run_rcx=extract, sim_netlist=True,
+ sch_db=sch_db, cv_info_out=cv_info_list,
+ mismatch=mismatch))
+ else:
+ netlist = use_netlist
+
+ mm_cls = cast(Type[MeasurementManagerOld], import_class(mm_str))
+ sim_envs: List[str] = mm_specs['sim_envs']
+ mm = mm_cls(self._sim, root_path, mm_name, impl_lib,
+ mm_specs, wrapper_lookup, [], sim_envs, precision)
+
+ result = mm.measure_performance(sch_db, cv_info_list, netlist,
+ load_from_file=load_from_file, gen_sch=False)
+
+ return result
+
+ def create_library(self, lib_name, lib_path=''):
+ # type: (str, str) -> None
+ """Create a new library if one does not exist yet.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ lib_path : str
+ directory to create the library in. If Empty, use default location.
+ """
+ return self.impl_db.create_library(lib_name, lib_path=lib_path)
+
+ def instantiate_schematic(self, lib_name, content_list, lib_path=''):
+ # type: (str, Sequence[Any], str) -> None
+ """Create the given schematic contents in CAD database.
+
+ NOTE: this is BAG's internal method. To create schematics, call batch_schematic() instead.
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the new library to put the schematic instances.
+ content_list : Sequence[Any]
+ list of schematics to create.
+ lib_path : str
+ the path to create the library in. If empty, use default location.
+ """
+ self.impl_db.instantiate_schematic(lib_name, content_list, lib_path=lib_path)
+
+ def instantiate_layout_pcell(self, lib_name, cell_name, inst_lib, inst_cell, params,
+ pin_mapping=None, view_name='layout'):
+ # type: (str, str, str, str, Dict[str, Any], Optional[Dict[str, str]], str) -> None
+ """Create a layout cell with a single pcell instance.
+
+ Parameters
+ ----------
+ lib_name : str
+ layout library name.
+ cell_name : str
+ layout cell name.
+ inst_lib : str
+ pcell library name.
+ inst_cell : str
+ pcell cell name.
+ params : Dict[str, Any]
+ the parameter dictionary.
+ pin_mapping: Optional[Dict[str, str]]
+ the pin renaming dictionary.
+ view_name : str
+ layout view name, default is "layout".
+ """
+ pin_mapping = pin_mapping or {}
+ self.impl_db.instantiate_layout_pcell(lib_name, cell_name, view_name,
+ inst_lib, inst_cell, params, pin_mapping)
+
+ def instantiate_layout(self, lib_name, content_list, lib_path='', view='layout'):
+ # type: (str, Sequence[Any], str, str) -> None
+ """Create a batch of layouts.
+
+ Parameters
+ ----------
+ lib_name : str
+ layout library name.
+ content_list : Sequence[Any]
+ list of layouts to create
+ lib_path : str
+ the path to create the library in. If empty, use default location.
+ view : str
+ layout view name.
+ """
+ self.impl_db.instantiate_layout(lib_name, content_list, lib_path=lib_path, view=view)
+
+ def release_write_locks(self, lib_name, cell_view_list):
+ # type: (str, Sequence[Tuple[str, str]]) -> None
+ """Release write locks from all the given cells.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_view_list : Sequence[Tuple[str, str]]
+ list of cell/view name tuples.
+ """
+ self.impl_db.release_write_locks(lib_name, cell_view_list)
+
+ def refresh_cellviews(self, lib_name, cell_view_list):
+ # type: (str, Sequence[Tuple[str, str]]) -> None
+ """Refresh the given cellviews in the database.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_view_list : Sequence[Tuple[str, str]]
+ list of cell/view name tuples.
+ """
+ self.impl_db.refresh_cellviews(lib_name, cell_view_list)
+
+ def perform_checks_on_cell(self, lib_name, cell_name, view_name):
+ # type: (str, str, str) -> None
+ """Perform checks on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_name : str
+ the cell name.
+ view_name : str
+ the view name.
+ """
+ self.impl_db.perform_checks_on_cell(lib_name, cell_name, view_name)
+
+ def run_drc(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """Run DRC on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs :
+ optional keyword arguments. See DbAccess class for details.
+
+ Returns
+ -------
+ value : bool
+ True if DRC succeeds.
+ log_fname : str
+ name of the DRC log file.
+ """
+ return self.impl_db.run_drc(lib_name, cell_name, **kwargs)
+
+ def run_lvs(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """Run LVS on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs :
+ optional keyword arguments. See DbAccess class for details.
+
+ Returns
+ -------
+ value : bool
+ True if LVS succeeds
+ log_fname : str
+ name of the LVS log file.
+ """
+ return self.impl_db.run_lvs(lib_name, cell_name, **kwargs)
+
+ def run_rcx(self, lib_name: str, cell_name: str,
+ params: Optional[Mapping[str, Any]] = None) -> Tuple[str, str]:
+ """run RC extraction on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ params : Optional[Dict[str, Any]]
+ optional RCX parameter values.
+
+ Returns
+ -------
+ netlist : str
+ The RCX netlist file name. empty if RCX failed.
+ log_fname : str
+ RCX log file name.
+ """
+ return self.impl_db.run_rcx(lib_name, cell_name, params=params)
+
+ def generate_lef(self, impl_lib: str, impl_cell: str, verilog_path: Path,
+ lef_path: Path, run_path: Path, options: Dict[str, Any]) -> bool:
+ if self._lef is None:
+ raise ValueError('LEF generation interface not defined in bag_config.yaml')
+ else:
+ return self._lef.generate_lef(impl_lib, impl_cell, verilog_path, lef_path, run_path,
+ **options)
+
+ def export_layout(self, lib_name: str, cell_name: str, out_file: str, **kwargs: Any) -> str:
+ """export layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ output file name.
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ log_fname : str
+ log file name. Empty if task cancelled.
+ """
+ return self.impl_db.export_layout(lib_name, cell_name, out_file, **kwargs)
+
+ def batch_export_layout(self, info_list):
+ # type: (Sequence[Tuple[Any, ...]]) -> Optional[Sequence[str]]
+ """Export layout of all given cells
+
+ Parameters
+ ----------
+ info_list:
+ list of cell information. Each element is a tuple of:
+
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ layout output file name.
+ view_name : str
+ layout view name. Optional.
+ params : Optional[Dict[str, Any]]
+ optional export parameter values.
+
+ Returns
+ -------
+ results : Optional[Sequence[str]]
+ If task is cancelled, return None. Otherwise, this is a
+ list of log file names.
+ """
+ coro_list = [self.impl_db.async_export_layout(*info) for info in info_list]
+ temp_results = batch_async_task(coro_list)
+ if temp_results is None:
+ return None
+ return ['' if isinstance(val, Exception) else val for val in temp_results]
+
+ async def async_run_lvs(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """A coroutine for running LVS.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+ LVS parameters should be specified as lvs_params.
+
+ Returns
+ -------
+ value : bool
+ True if LVS succeeds
+ log_fname : str
+ name of the LVS log file.
+ """
+ return await self.impl_db.async_run_lvs(lib_name, cell_name, **kwargs)
+
+ async def async_run_rcx(self, lib_name: str, cell_name: str,
+ params: Optional[Dict[str, Any]] = None) -> Tuple[str, str]:
+ """A coroutine for running RCX.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ params : Optional[Dict[str, Any]]
+ optional RCX parameter values.
+
+ Returns
+ -------
+ netlist : str
+ The RCX netlist file name. empty if RCX failed.
+ log_fname : str
+ RCX log file name.
+ """
+ return await self.impl_db.async_run_rcx(lib_name, cell_name, params=params)
+
+ def create_schematic_from_netlist(self, netlist, lib_name, cell_name,
+ sch_view=None, **kwargs):
+ # type: (str, str, str, Optional[str], **Any) -> None
+ """Create a schematic from a netlist.
+
+ This is mainly used to create extracted schematic from an extracted netlist.
+
+ Parameters
+ ----------
+ netlist : str
+ the netlist file name.
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ sch_view : Optional[str]
+ schematic view name. The default value is implemendation dependent.
+ **kwargs : Any
+ additional implementation-dependent arguments.
+ """
+ return self.impl_db.create_schematic_from_netlist(netlist, lib_name, cell_name,
+ sch_view=sch_view, **kwargs)
+
+ def create_verilog_view(self, verilog_file, lib_name, cell_name, **kwargs):
+ # type: (str, str, str, **Any) -> None
+ """Create a verilog view for mix-signal simulation.
+
+ Parameters
+ ----------
+ verilog_file : str
+ the verilog file name.
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ **kwargs : Any
+ additional implementation-dependent arguments.
+ """
+ verilog_file = os.path.abspath(verilog_file)
+ if not os.path.isfile(verilog_file):
+ raise ValueError('%s is not a file.' % verilog_file)
+
+ return self.impl_db.create_verilog_view(verilog_file, lib_name, cell_name, **kwargs)
+
+ def exclude_model(self, lib_name: str, cell_name: str) -> bool:
+ """True to exclude the given schematic generator when generating behavioral models."""
+ return self.impl_db.exclude_model(lib_name, cell_name)
diff --git a/src/bag/data/__init__.py b/src/bag/data/__init__.py
new file mode 100644
index 0000000..23c8734
--- /dev/null
+++ b/src/bag/data/__init__.py
@@ -0,0 +1,52 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package defines methods and classes useful for data post-processing.
+"""
+
+# compatibility import.
+from ..io import load_sim_results, save_sim_results, load_sim_file
+from .core import Waveform
+
+__all__ = ['load_sim_results', 'save_sim_results', 'load_sim_file',
+ 'Waveform', ]
diff --git a/src/bag/data/core.py b/src/bag/data/core.py
new file mode 100644
index 0000000..62ed25a
--- /dev/null
+++ b/src/bag/data/core.py
@@ -0,0 +1,379 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines core data post-processing classes.
+"""
+
+import numpy as np
+import scipy.interpolate as interp
+import scipy.cluster.vq as svq
+import scipy.optimize as sciopt
+
+
+class Waveform(object):
+ """A (usually transient) waveform.
+
+ This class provides interpolation and other convenience functions.
+
+ Parameters
+ ----------
+ xvec : np.multiarray.ndarray
+ the X vector.
+ yvec : np.multiarray.ndarray
+ the Y vector.
+ xtol : float
+ the X value tolerance.
+ order : int
+ the interpolation order. 1 for nearest, 2 for linear, 3 for spline.
+ ext : int or str
+ interpolation extension mode. See documentation for InterpolatedUnivariateSpline.
+
+ """
+
+ def __init__(self, xvec, yvec, xtol, order=3, ext=3):
+ self._xvec = xvec
+ self._yvec = yvec
+ self._xtol = xtol
+ self._order = order
+ self._ext = ext
+ self._fun = interp.InterpolatedUnivariateSpline(xvec, yvec, k=order, ext=ext)
+
+ @property
+ def xvec(self):
+ """the X vector"""
+ return self._xvec
+
+ @property
+ def yvec(self):
+ """the Y vector"""
+ return self._yvec
+
+ @property
+ def order(self):
+ """the interpolation order. 1 for nearest, 2 for linear, 3 for spline."""
+ return self._order
+
+ @property
+ def xtol(self):
+ """the X value tolerance."""
+ return self._xtol
+
+ @property
+ def ext(self):
+ """interpolation extension mode. See documentation for InterpolatedUnivariateSpline."""
+ return self._ext
+
+ def __call__(self, *arg, **kwargs):
+ """Evaluate the waveform at the given points."""
+ return self._fun(*arg, **kwargs)
+
+ def get_xrange(self):
+ """Returns the X vector range.
+
+ Returns
+ -------
+ xmin : float
+ minimum X value.
+ xmax : float
+ maximum X value.
+ """
+ return self.xvec[0], self.xvec[-1]
+
+ def shift_by(self, xshift):
+ """Returns a shifted version of this waveform.
+
+ Parameters
+ ----------
+ xshift : float
+ the amount to shift by.
+
+ Returns
+ -------
+ wvfm : bag.data.core.Waveform
+ a reference to this instance, or a copy if copy is True.
+ """
+ return Waveform(self.xvec + xshift, self.yvec, self.xtol, order=self.order, ext=self.ext)
+
+ def get_all_crossings(self, threshold, start=None, stop=None, edge='both'):
+ """Returns all X values at which this waveform crosses the given threshold.
+
+ Parameters
+ ----------
+ threshold : float
+ the threshold value.
+ start : float or None
+ if given, search for crossings starting at this X value.
+ stop : float or None
+ if given, search only for crossings before this X value.
+ edge : string
+ crossing type. Valid values are 'rising', 'falling', or 'both'.
+
+ Returns
+ -------
+ xval_list : list[float]
+ all X values at which crossing occurs.
+ """
+ # determine start and stop indices
+ sidx = 0 if start is None else np.searchsorted(self.xvec, [start])[0]
+ if stop is None:
+ eidx = len(self.xvec)
+ else:
+ eidx = np.searchsorted(self.xvec, [stop])[0]
+ if eidx < len(self.xvec) and abs(self.xvec[eidx] - stop) < self.xtol:
+ eidx += 1
+
+ # quantize waveform values, then detect edge.
+ bool_vec = self.yvec[sidx:eidx] >= threshold # type: np.ndarray
+ qvec = bool_vec.astype(int)
+ dvec = np.diff(qvec)
+
+ # eliminate unwanted edge types.
+ if edge == 'rising':
+ dvec = np.maximum(dvec, 0)
+ elif edge == 'falling':
+ dvec = np.minimum(dvec, 0)
+
+ # get crossing indices
+ idx_list = dvec.nonzero()[0]
+
+ # convert indices to X value using brentq interpolation.
+ def crossing_fun(x):
+ return self._fun(x) - threshold
+
+ xval_list = []
+ for idx in idx_list:
+ t0, t1 = self.xvec[sidx + idx], self.xvec[sidx + idx + 1]
+ try:
+ tcross = sciopt.brentq(crossing_fun, t0, t1, xtol=self.xtol)
+ except ValueError:
+ # no solution, this happens only if we have numerical error
+ # around the threshold. In this case just pick the endpoint
+ # closest to threshold.
+ va = crossing_fun(t0)
+ vb = crossing_fun(t1)
+ tcross = t0 if abs(va) < abs(vb) else t1
+
+ xval_list.append(tcross)
+
+ return xval_list
+
+ def get_crossing(self, threshold, start=None, stop=None, n=1, edge='both'):
+ """Returns the X value at which this waveform crosses the given threshold.
+
+ Parameters
+ ----------
+ threshold : float
+ the threshold value.
+ start : float or None
+ if given, search for the crossing starting at this X value.'
+ stop : float or None
+ if given, search only for crossings before this X value.
+ n : int
+ returns the nth crossing.
+ edge : str
+ crossing type. Valid values are 'rising', 'falling', or 'both'.
+
+ Returns
+ -------
+ xval : float or None
+ the X value at which the crossing occurs. None if no crossings are detected.
+ """
+ xval_list = self.get_all_crossings(threshold, start=start, stop=stop, edge=edge)
+ if len(xval_list) < n:
+ return None
+ return xval_list[n - 1]
+
+ def to_arrays(self, xmin=None, xmax=None):
+ """Returns the X and Y arrays representing this waveform.
+
+ Parameters
+ ----------
+ xmin : float or None
+ If given, will start from this value.
+ xmax : float or None
+ If given, will end at this value.
+
+ Returns
+ -------
+ xvec : np.multiarray.ndarray
+ the X array
+ yvec : np.multiarray.ndarray
+ the Y array
+ """
+ sidx = 0 if xmin is None else np.searchsorted(self.xvec, [xmin])[0]
+ eidx = len(self.xvec) if xmax is None else np.searchsorted(self.xvec, [xmax])[0]
+
+ if eidx < len(self.xvec) and self.xvec[eidx] == xmax:
+ eidx += 1
+
+ xtemp = self.xvec[sidx:eidx]
+ if xmin is not None and (len(xtemp) == 0 or xtemp[0] != xmin):
+ np.insert(xtemp, 0, [xmin])
+ if xmax is not None and (len(xtemp) == 0 or xtemp[-1] != xmax):
+ np.append(xtemp, [xmax])
+ return xtemp, self(xtemp)
+
+ def get_eye_specs(self, tbit, tsample, thres=0.0, nlev=2):
+ """Compute the eye diagram spec of this waveform.
+
+ This algorithm uses the following steps.
+
+ 1. set t_off to 0
+ 2. sample the waveform at tbit interval, starting at t0 + t_off.
+ 3. sort the sampled values, get gap between adjacent values.
+ 4. record G, the length of the gap covering thres.
+ 5. increment t_off by tsample, go to step 2 and repeat until
+ t_off >= tbit.
+ 6. find t_off with maximum G. This is the eye center.
+ 7. at the eye center, compute eye height and eye opening using kmeans
+ clustering algorithm.
+ 8. return result.
+
+ Parameters
+ ----------
+ tbit : float
+ eye period.
+ tsample : float
+ the resolution to sample the eye. Used to find optimal
+ time shift and maximum eye opening.
+ thres : float
+ the eye vertical threshold.
+ nlev : int
+ number of expected levels. 2 for NRZ, 4 for PAM4.
+
+ Returns
+ -------
+ result : dict
+ A dictionary from specification to value.
+ """
+
+ tstart, tend = self.get_xrange()
+ toff_vec = np.arange(0, tbit, tsample)
+ best_idx = 0
+ best_gap = 0.0
+ best_values = None
+ mid_lev = nlev // 2
+ for idx, t_off in enumerate(toff_vec):
+ # noinspection PyTypeChecker
+ values = self(np.arange(tstart + t_off, tend, tbit))
+ values.sort()
+
+ up_idx = np.searchsorted(values, [thres])[0]
+ if up_idx == 0 or up_idx == len(values):
+ continue
+ cur_gap = values[up_idx] - values[up_idx - 1]
+ if cur_gap > best_gap:
+ best_idx = idx
+ best_gap = cur_gap
+ best_values = values
+
+ if best_values is None:
+ raise ValueError("waveform never cross threshold=%.4g" % thres)
+
+ vstd = np.std(best_values)
+ vtemp = best_values / vstd
+ tmp_arr = np.linspace(vtemp[0], vtemp[-1], nlev) # type: np.ndarray
+ clusters = svq.kmeans(vtemp, tmp_arr)[0]
+ # clusters = svq.kmeans(vtemp, 4, iter=50)[0]
+ clusters *= vstd
+ clusters.sort()
+ vcenter = (clusters[mid_lev] + clusters[mid_lev - 1]) / 2.0
+
+ # compute eye opening/margin
+ openings = []
+ tr_widths = []
+ last_val = best_values[0]
+ bot_val = last_val
+ cur_cidx = 0
+ for cur_val in best_values:
+ cur_cluster = clusters[cur_cidx]
+ next_cluster = clusters[cur_cidx + 1]
+ if abs(cur_val - cur_cluster) > abs(cur_val - next_cluster):
+ openings.append(cur_val - last_val)
+ tr_widths.append(last_val - bot_val)
+ cur_cidx += 1
+ if cur_cidx == len(clusters) - 1:
+ tr_widths.append(best_values[-1] - cur_val)
+ break
+ bot_val = cur_val
+ last_val = cur_val
+
+ return {'center': (float(toff_vec[best_idx]), vcenter),
+ 'levels': clusters,
+ 'heights': clusters[1:] - clusters[:-1],
+ 'openings': np.array(openings),
+ 'trace_widths': np.array(tr_widths)
+ }
+
+ def _add_xy(self, other):
+ if not isinstance(other, Waveform):
+ raise ValueError("Trying to add non-Waveform object.")
+ xnew = np.concatenate((self.xvec, other.xvec))
+ xnew = np.unique(np.around(xnew / self.xtol)) * self.xtol
+ # noinspection PyTypeChecker
+ y1 = self(xnew)
+ y2 = other(xnew)
+ return xnew, y1 + y2
+
+ def __add__(self, other):
+ if np.isscalar(other):
+ return Waveform(np.array(self.xvec), self.yvec + other, self.xtol, order=self.order,
+ ext=self.ext)
+ elif isinstance(other, Waveform):
+ new_order = max(self.order, other.order)
+ xvec, yvec = self._add_xy(other)
+ return Waveform(xvec, yvec, self.xtol, order=new_order, ext=self.ext)
+ else:
+ raise Exception('type %s not supported' % type(other))
+
+ def __neg__(self):
+ return Waveform(np.array(self.xvec), -self.yvec, self.xtol, order=self.order, ext=self.ext)
+
+ def __mul__(self, scale):
+ if not np.isscalar(scale):
+ raise ValueError("Can only multiply by scalar.")
+ return Waveform(np.array(self.xvec), scale * self.yvec, self.xtol, order=self.order,
+ ext=self.ext)
+
+ def __rmul__(self, scale):
+ return self.__mul__(scale)
diff --git a/src/bag/data/dc.py b/src/bag/data/dc.py
new file mode 100644
index 0000000..421588c
--- /dev/null
+++ b/src/bag/data/dc.py
@@ -0,0 +1,263 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines classes for computing DC operating point.
+"""
+
+from typing import Union, Dict
+
+import scipy.sparse
+import scipy.optimize
+import numpy as np
+
+from bag.tech.mos import MosCharDB
+
+
+class DCCircuit(object):
+ """A class that solves DC operating point of a circuit.
+
+ Parameters
+ ----------
+ ndb : MosCharDB
+ nmos characterization database.
+ pdb : MosCharDB
+ pmos characterization database.
+ """
+
+ def __init__(self, ndb, pdb):
+ # type: (MosCharDB, MosCharDB) -> None
+ self._n = 1
+ self._ndb = ndb
+ self._pdb = pdb
+ self._transistors = {}
+ self._node_id = {'gnd': 0, 'vss': 0, 'VSS': 0}
+ self._node_name_lookup = {0: 'gnd'}
+ self._node_voltage = {0: 0}
+
+ def _get_node_id(self, name):
+ # type: (str) -> int
+ if name not in self._node_id:
+ ans = self._n
+ self._node_id[name] = ans
+ self._node_name_lookup[ans] = name
+ self._n += 1
+ return ans
+ else:
+ return self._node_id[name]
+
+ def set_voltage_source(self, node_name, voltage):
+ # type: (str, float) -> None
+ """
+ Specify voltage the a node.
+
+ Parameters
+ ----------
+ node_name : str
+ the net name.
+ voltage : float
+ voltage of the given net.
+ """
+ node_id = self._get_node_id(node_name)
+ self._node_voltage[node_id] = voltage
+
+ def add_transistor(self, d_name, g_name, s_name, b_name, mos_type, intent, w, lch, fg=1):
+ # type: (str, str, str, str, str, str, Union[float, int], float, int) -> None
+ """Adds a small signal transistor model to the circuit.
+
+ Parameters
+ ----------
+ d_name : str
+ drain net name.
+ g_name : str
+ gate net name.
+ s_name : str
+ source net name.
+ b_name : str
+ body net name. Defaults to 'gnd'.
+ mos_type : str
+ transistor type. Either 'nch' or 'pch'.
+ intent : str
+ transistor threshold flavor.
+ w : Union[float, int]
+ transistor width.
+ lch : float
+ transistor channel length.
+ fg : int
+ transistor number of fingers.
+ """
+ node_d = self._get_node_id(d_name)
+ node_g = self._get_node_id(g_name)
+ node_s = self._get_node_id(s_name)
+ node_b = self._get_node_id(b_name)
+
+ # get existing current function. Initalize if not found.
+ ids_key = (mos_type, intent, lch)
+ if ids_key in self._transistors:
+ arow, acol, bdata, fg_list, ds_list = self._transistors[ids_key]
+ else:
+ arow, acol, bdata, fg_list, ds_list = [], [], [], [], []
+ self._transistors[ids_key] = (arow, acol, bdata, fg_list, ds_list)
+
+ # record Ai and bi data
+ offset = len(fg_list) * 4
+ arow.extend([offset + 1, offset + 1, offset + 2, offset + 2, offset + 3, offset + 3])
+ acol.extend([node_b, node_s, node_d, node_s, node_g, node_s])
+ bdata.append(w)
+ fg_list.append(fg)
+ ds_list.append((node_d, node_s))
+
+ def solve(self, env, guess_dict, itol=1e-10, inorm=1e-6):
+ # type: (str, Dict[str, float], float, float) -> Dict[str, float]
+ """Solve DC operating point.
+
+ Parameters
+ ----------
+ env : str
+ the simulation environment.
+ guess_dict : Dict[str, float]
+ initial guess dictionary.
+ itol : float
+ current error tolerance.
+ inorm : float
+ current normalization factor.
+
+ Returns
+ -------
+ op_dict : Dict[str, float]
+ DC operating point dictionary.
+ """
+ # step 1: get list of nodes to solve
+ node_list = [idx for idx in range(self._n) if idx not in self._node_voltage]
+ reverse_dict = {nid: idx for idx, nid in enumerate(node_list)}
+ ndim = len(node_list)
+
+ # step 2: get Av and bv
+ amatv = scipy.sparse.csr_matrix(([1] * ndim, (node_list, np.arange(ndim))), shape=(self._n, ndim))
+ bmatv = np.zeros(self._n)
+ for nid, val in self._node_voltage.items():
+ bmatv[nid] = val
+
+ # step 3: gather current functions, and output matrix entries
+ ifun_list = []
+ out_data = []
+ out_row = []
+ out_col = []
+ out_col_cnt = 0
+ for (mos_type, intent, lch), (arow, acol, bdata, fg_list, ds_list) in self._transistors.items():
+ db = self._ndb if mos_type == 'nch' else self._pdb
+ ifun = db.get_function('ids', env=env, intent=intent, l=lch)
+ # step 3A: compute Ai and bi
+ num_tran = len(fg_list)
+ adata = [1, -1] * (3 * num_tran)
+ amati = scipy.sparse.csr_matrix((adata, (arow, acol)), shape=(4 * num_tran, self._n))
+ bmati = np.zeros(4 * num_tran)
+ bmati[0::4] = bdata
+
+ # step 3B: compute A = Ai * Av, b = Ai * bv + bi
+ amat = amati.dot(amatv)
+ bmat = amati.dot(bmatv) + bmati
+ # record scale matrix and function.
+ scale_mat = scipy.sparse.diags(fg_list) / inorm
+ ifun_list.append((ifun, scale_mat, amat, bmat))
+ for node_d, node_s in ds_list:
+ if node_d in reverse_dict:
+ out_row.append(reverse_dict[node_d])
+ out_data.append(-1)
+ out_col.append(out_col_cnt)
+ if node_s in reverse_dict:
+ out_row.append(reverse_dict[node_s])
+ out_data.append(1)
+ out_col.append(out_col_cnt)
+ out_col_cnt += 1
+ # construct output matrix
+ out_mat = scipy.sparse.csr_matrix((out_data, (out_row, out_col)), shape=(ndim, out_col_cnt))
+
+ # step 4: define zero function
+ def zero_fun(varr):
+ iarr = np.empty(out_col_cnt)
+ offset = 0
+ for idsf, smat, ai, bi in ifun_list:
+ num_out = smat.shape[0]
+ # reshape going row first instead of column
+ arg = (ai.dot(varr) + bi).reshape(4, -1, order='F').T
+ if idsf.ndim == 3:
+ # handle case where transistor source and body are shorted
+ tmpval = idsf(arg[:, [0, 2, 3]])
+ else:
+ tmpval = idsf(arg)
+ iarr[offset:offset + num_out] = smat.dot(tmpval)
+ offset += num_out
+ return out_mat.dot(iarr)
+
+ # step 5: define zero function
+ def jac_fun(varr):
+ jarr = np.empty((out_col_cnt, ndim))
+ offset = 0
+ for idsf, smat, ai, bi in ifun_list:
+ num_out = smat.shape[0]
+ # reshape going row first instead of column
+ arg = (ai.dot(varr) + bi).reshape(4, -1, order='F').T
+ if idsf.ndim == 3:
+ # handle case where transistor source and body are shorted
+ tmpval = idsf.jacobian(arg[:, [0, 2, 3]])
+ # noinspection PyTypeChecker
+ tmpval = np.insert(tmpval, 1, 0.0, axis=len(tmpval.shape) - 1)
+ else:
+ tmpval = idsf.jacobian(arg)
+ jcur = smat.dot(tmpval)
+ for idx in range(num_out):
+ # ai is sparse matrix; multiplication is matrix
+ jarr[offset + idx, :] = jcur[idx, :] @ ai[4 * idx:4 * idx + 4, :]
+ offset += num_out
+ return out_mat.dot(jarr)
+
+ xguess = np.empty(ndim)
+ for name, guess_val in guess_dict.items():
+ xguess[reverse_dict[self._node_id[name]]] = guess_val
+
+ result = scipy.optimize.root(zero_fun, xguess, jac=jac_fun, tol=itol / inorm, method='hybr')
+ if not result.success:
+ raise ValueError('solution failed.')
+
+ op_dict = {self._node_name_lookup[nid]: result.x[idx] for idx, nid in enumerate(node_list)}
+ return op_dict
diff --git a/src/bag/data/digital.py b/src/bag/data/digital.py
new file mode 100644
index 0000000..bad2ccc
--- /dev/null
+++ b/src/bag/data/digital.py
@@ -0,0 +1,299 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines functions useful for digital verification/postprocessing.
+"""
+
+from typing import Optional, List, Tuple
+
+import numpy as np
+
+from .core import Waveform
+
+
+def de_bruijn(n, symbols=None):
+ # type: (int, Optional[List[float]]) -> List[float]
+ """Returns a De Bruijn sequence with subsequence of length n.
+
+ a De Bruijn sequence with subsequence of length n is a sequence such that
+ all possible subsequences of length n appear exactly once somewhere in the
+ sequence. This method is useful for simulating the worst case eye diagram
+ given finite impulse response.
+
+ Parameters
+ ----------
+ n : int
+ length of the subsequence.
+ symbols : Optional[List[float]] or None
+ the list of symbols. If None, defaults to [0.0, 1.0].
+
+ Returns
+ -------
+ seq : List[float]
+ the de bruijn sequence.
+ """
+ symbols = symbols or [0.0, 1.0]
+ k = len(symbols)
+
+ a = [0] * (k * n)
+ sequence = []
+
+ def db(t, p):
+ if t > n:
+ if n % p == 0:
+ sequence.extend(a[1:p + 1])
+ else:
+ a[t] = a[t - p]
+ db(t + 1, p)
+ for j in range(a[t - p] + 1, k):
+ a[t] = j
+ db(t + 1, t)
+
+ db(1, 1)
+ return [symbols[i] for i in sequence]
+
+
+def dig_to_pwl(values, tper, trf, td=0):
+ # type: (List[float], float, float, float) -> Tuple[List[float], List[float]]
+ """Convert a list of digital bits to PWL waveform.
+
+ This function supports negative delay. However, time/value pairs for negative data
+ are truncated.
+
+ Parameters
+ ----------
+ values : List[float]
+ list of values for each bit.
+ tper : float
+ the period in seconds.
+ trf : float
+ the rise/fall time in seconds.
+ td : float
+ the delay
+
+ Returns
+ -------
+ tvec : List[float]
+ the time vector.
+ yvec : List[float]
+ the value vector.
+ """
+ y0 = values[0]
+ tcur, ycur = td, y0
+ tvec, yvec = [], []
+ for v in values:
+ if v != ycur:
+ if tcur >= 0:
+ tvec.append(tcur)
+ yvec.append(ycur)
+ elif tcur < 0 < tcur + trf:
+ # make sure time starts at 0
+ tvec.append(0)
+ yvec.append(ycur - (v - ycur) / trf * tcur)
+ ycur = v
+ if tcur + trf >= 0:
+ tvec.append(tcur + trf)
+ yvec.append(ycur)
+ elif tcur + trf < 0 < tcur + tper:
+ # make sure time starts at 0
+ tvec.append(0)
+ yvec.append(ycur)
+ tcur += tper
+ else:
+ if tcur <= 0 < tcur + tper:
+ # make sure time starts at 0
+ tvec.append(0)
+ yvec.append(ycur)
+ tcur += tper
+
+ if not tvec:
+ # only here if input is constant
+ tvec = [0, tper]
+ yvec = [y0, y0]
+ elif tvec[0] > 0:
+ # make time start at 0
+ tvec.insert(0, 0)
+ yvec.insert(0, y0)
+
+ return tvec, yvec
+
+
+def get_crossing_index(yvec, threshold, n=0, rising=True):
+ # type: (np.array, float, int, bool) -> int
+ """Returns the first index that the given numpy array crosses the given threshold.
+
+ Parameters
+ ----------
+ yvec : np.array
+ the numpy array.
+ threshold : float
+ the crossing threshold.
+ n : int
+ returns the nth edge index, with n=0 being the first index.
+ rising : bool
+ True to return rising edge index. False to return falling edge index.
+
+ Returns
+ -------
+ idx : int
+ the crossing edge index.
+ """
+
+ bool_vec = yvec >= threshold
+ qvec = bool_vec.astype(int)
+ dvec = np.diff(qvec)
+
+ dvec = np.maximum(dvec, 0) if rising else np.minimum(dvec, 0)
+ idx_list = dvec.nonzero()[0]
+ return idx_list[n]
+
+
+def get_flop_timing(tvec, d, q, clk, ttol, data_thres=0.5,
+ clk_thres=0.5, tstart=0.0, clk_edge='rising', tag=None, invert=False):
+ """Calculate flop timing parameters given the associated waveforms.
+
+ This function performs the following steps:
+
+ 1. find all valid clock edges. Compute period of the clock (clock waveform
+ must be periodic).
+
+ 2. For each valid clock edge:
+
+ A. Check if the input changes in the previous cycle. If so, compute tsetup.
+ Otherwise, tsetup = tperiod.
+
+ B. Check if input changes in the current cycle. If so, compute thold.
+ Otherwise, thold = tperiod.
+
+ C. Check that output transition at most once and that output = input.
+ Otherwise, record an error.
+
+ D. record the output data polarity.
+
+ 3. For each output data polarity, compute the minimum tsetup and thold and any
+ errors. Return summary as a dictionary.
+
+
+ The output is a dictionary with keys 'setup', 'hold', 'delay', and 'errors'.
+ the setup/hold/delay entries contains 2-element tuples describing the worst
+ setup/hold/delay time. The first element is the setup/hold/delay time, and
+ the second element is the clock edge time at which it occurs. The errors field
+ stores all clock edge times at which an error occurs.
+
+
+ Parameters
+ ----------
+ tvec : np.ndarray
+ the time data.
+ d : np.ndarray
+ the input data.
+ q : np.ndarray
+ the output data.
+ clk : np.ndarray
+ the clock data.
+ ttol : float
+ time resolution.
+ data_thres : float
+ the data threshold.
+ clk_thres : float
+ the clock threshold.
+ tstart : float
+ ignore data points before tstart.
+ clk_edge : str
+ the clock edge type. Valid values are "rising", "falling", or "both".
+ tag : obj
+ an identifier tag to append to results.
+ invert : bool
+ if True, the flop output is inverted from the data.
+
+ Returns
+ -------
+ data : dict[str, any]
+ A dictionary describing the worst setup/hold/delay and errors, if any.
+ """
+ d_wv = Waveform(tvec, d, ttol)
+ clk_wv = Waveform(tvec, clk, ttol)
+ q_wv = Waveform(tvec, q, ttol)
+ tend = tvec[-1]
+
+ # get all clock sampling times and clock period
+ samp_times = clk_wv.get_all_crossings(clk_thres, start=tstart, edge=clk_edge)
+ tper = (samp_times[-1] - samp_times[0]) / (len(samp_times) - 1)
+ # ignore last clock cycle if it's not a full cycle.
+ if samp_times[-1] + tper > tend:
+ samp_times = samp_times[:-1]
+
+ # compute setup/hold/error for each clock period
+ data = {'setup': (tper, -1), 'hold': (tper, -1), 'delay': (0.0, -1), 'errors': []}
+ for t in samp_times:
+ d_prev = d_wv.get_all_crossings(data_thres, start=t - tper, stop=t, edge='both')
+ d_cur = d_wv.get_all_crossings(data_thres, start=t, stop=t + tper, edge='both')
+ q_cur = q_wv.get_all_crossings(data_thres, start=t, stop=t + tper, edge='both')
+ d_val = d_wv(t) > data_thres
+ q_val = q_wv(t + tper) > data_thres
+
+ # calculate setup/hold/delay
+ tsetup = t - d_prev[-1] if d_prev else tper
+ thold = d_cur[0] - t if d_cur else tper
+ tdelay = q_cur[0] - t if q_cur else 0.0
+
+ # check if flop has error
+ error = (invert != (q_val != d_val)) or (len(q_cur) > 1)
+
+ # record results
+ if tsetup < data['setup'][0]:
+ data['setup'] = (tsetup, t)
+ if thold < data['hold'][0]:
+ data['hold'] = (thold, t)
+ if tdelay > data['delay'][0]:
+ data['delay'] = (tdelay, t)
+ if error:
+ data['errors'].append(t)
+
+ if tag is not None:
+ data['setup'] += (tag, )
+ data['hold'] += (tag, )
+ data['delay'] += (tag, )
+ data['errors'] = [(t, tag) for t in data['errors']]
+
+ return data
diff --git a/src/bag/data/lark.py b/src/bag/data/lark.py
new file mode 100644
index 0000000..5a21946
--- /dev/null
+++ b/src/bag/data/lark.py
@@ -0,0 +1,269 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Optional, Union, List, Any, Dict
+from pathlib import Path
+from dataclasses import dataclass
+
+from lark.lark import Lark
+from lark.visitors import Transformer
+from lark.tree import pydot__tree_to_png
+
+from ..util.search import BinaryIterator
+from ..io.file import open_file
+from ..io.string import wrap_string
+from ..design.netlist import read_spectre_cdl_unwrap
+
+grammar_cdl = """
+ start: headers subckts+
+
+ headers: HEADER*
+ subckts.2: ".SUBCKT" CELL PORTS+ NEWLINE instances* ".ENDS"
+
+ instances: DEV PORTS* "/"* PORTS PARAMS* NEWLINE
+
+ HEADER: ("*." | ".") ("_" | LETTER | NUMBER)+ (("=" | " = " | WS)? ("_" | LETTER | NUMBER)+)?
+ CELL: ("_" | LETTER | NUMBER)+
+ PORTS: ("_" | LETTER | NUMBER)+
+ DEV: ("_" | LETTER | NUMBER | "@" | "/")+
+ PARAMS: ("_" | LETTER | NUMBER)+ "=" PAR_VAL
+
+ SC_NUM: (NUMBER | ".")+ "e-" NUMBER
+ PAR_VAL: (("_" | LETTER | NUMBER)+ | SC_NUM | " * " | "*")+
+
+ %import common.ESCAPED_STRING -> STRING
+ %import common.LETTER
+ %import common.SIGNED_NUMBER -> NUMBER
+ %import common.WS
+ %import common.NEWLINE
+ %ignore WS
+"""
+
+grammar_scs = r"""
+ start: headers subckts+
+
+ headers: HEADER*
+ subckts: "subckt" CELL PORTS+ NEWLINE instances* "ends" CELL
+
+ instances: DEV PORTS* PARAMS* NEWLINE
+
+ HEADER: "include " PATH | "simulator lang=spectre"
+ CELL: ("_" | LETTER | NUMBER)+
+ PORTS: ("_" | LETTER | NUMBER)+
+ DEV: ("_" | LETTER | NUMBER | "@" | "/")+
+ PARAMS: ("_" | LETTER | NUMBER)+ "=" PAR_VAL
+
+ SC_NUM: (NUMBER | ".")+ "e-" NUMBER
+ PAR_VAL: (("_" | LETTER | NUMBER)+ | SC_NUM | " * " | "*")+
+ PATH: /"[\w\.\/]+"/
+
+ %import common.ESCAPED_STRING -> STRING
+ %import common.LETTER
+ %import common.SIGNED_NUMBER -> NUMBER
+ %import common.WS
+ %import common.NEWLINE
+ %ignore WS
+"""
+
+
+@dataclass
+class Instance:
+ inst_name: str
+ ports: List[str]
+ params: List[str]
+ prim: Optional[str] = None
+ is_transistor: bool = False
+ is_BAG_prim: bool = False
+ netlist_str: str = ''
+
+ def __init__(self, items: List[Any]):
+ self.params = []
+ self.ports = []
+
+ for item in items:
+ if item.type == 'DEV':
+ self.inst_name = item.value
+ elif item.type == 'PARAMS':
+ self.params.append(item.value)
+ elif item.type == 'PORTS':
+ if item.value.startswith('nmos4') or item.value.startswith('pmos4'):
+ self.prim = item.value
+ self.is_transistor = True
+ self.is_BAG_prim = True
+ # TODO: add conditions to check for transistor in extracted netlist
+ else:
+ self.ports.append(item.value)
+ if self.prim is None:
+ self.prim = self.ports.pop()
+
+ def netlist(self, used_names: List[str], offset_map: Dict[str, str], scs: bool, last: bool
+ ) -> str:
+ if self.is_transistor:
+ if self.is_BAG_prim:
+ body, drain, gate, source = self.ports
+ else:
+ drain, gate, source, body = self.ports
+
+ # 1. modify gate connection of device
+ new_gate = f'new___{gate}_{self.inst_name.replace("/", "_").replace("@", "_")}' if \
+ last else gate
+ if self.is_BAG_prim:
+ new_ports = [body, drain, new_gate, source]
+ else:
+ new_ports = [drain, new_gate, source, body]
+ self.netlist_str = wrap_string([self.inst_name] + new_ports + [self.prim] + self.params)
+
+ if last:
+ # 2. add voltage source
+ base_name, sep, index = self.inst_name.partition('@')
+ if base_name in offset_map.keys(): # different finger of same transistor
+ offset_v = offset_map[base_name]
+ else: # create unique name
+ offset_v = f'v__{base_name.replace("/", "_")}'
+ if offset_v in used_names: # not unique; find unique by Binary Iteration
+ bin_iter = BinaryIterator(1, None)
+ while bin_iter.has_next():
+ new_offset_v = f'{offset_v}_{bin_iter.get_next()}'
+ if new_offset_v in used_names:
+ bin_iter.up()
+ else:
+ bin_iter.save_info(new_offset_v)
+ bin_iter.down()
+
+ offset_v = f'{offset_v}_{bin_iter.get_last_save_info()}'
+ used_names.append(offset_v)
+ offset_map[base_name] = offset_v
+
+ vdc_name = f'V{offset_v}{sep}{index}'
+ if scs:
+ str_list = [vdc_name, new_gate, gate, 'vsource', 'type=dc', f'dc={offset_v}']
+ else:
+ str_list = [vdc_name, new_gate, gate, offset_v]
+ self.netlist_str += wrap_string(str_list)
+ else:
+ tmp_list = [self.inst_name]
+ tmp_list.extend(self.ports)
+ tmp_list.append(self.prim)
+ tmp_list.extend(self.params)
+ self.netlist_str = wrap_string(tmp_list)
+
+ return self.netlist_str
+
+
+@dataclass
+class SubCKT:
+ subckt_name: str
+ ports: List[str]
+ instances: List[Instance]
+ netlist_str: str = ''
+ last: bool = False
+
+ def __init__(self, items: List[Any]):
+ self.ports = []
+ self.instances = []
+
+ for item in items:
+ if isinstance(item, Instance):
+ self.instances.append(item)
+ elif item.type == 'CELL':
+ self.subckt_name = item.value
+ elif item.type == 'PORTS':
+ self.ports.append(item.value)
+
+ def netlist(self, used_names: List[str], offset_map: Dict[str, str], scs: bool) -> str:
+ # Construct sub-circuit netlist
+ # 1. begin sub-circuit
+ str_list = ['subckt' if scs else '.SUBCKT', self.subckt_name] + self.ports
+ self.netlist_str = wrap_string(str_list)
+
+ # 2. write instances
+ for inst in self.instances:
+ net = inst.netlist(used_names=used_names, offset_map=offset_map, scs=scs,
+ last=self.last)
+ self.netlist_str += net
+
+ # 3. end
+ str_list = ['ends', self.subckt_name] if scs else ['.ENDS']
+ self.netlist_str += wrap_string(str_list)
+ return self.netlist_str + '\n'
+
+
+@dataclass
+class Header:
+ netlist_str: str
+
+ def __init__(self, items: List[Any]):
+ self.netlist_str = ''
+ for item in items:
+ self.netlist_str += f'{item.value}\n'
+
+ # noinspection PyUnusedLocal
+ def netlist(self, used_names: List[str], offset_map: Dict[str, str], scs: bool) -> str:
+ return self.netlist_str + '\n'
+
+
+class CktTransformer(Transformer):
+ @classmethod
+ def instances(cls, items):
+ return Instance(items)
+
+ @classmethod
+ def subckts(cls, items):
+ return SubCKT(items)
+
+ @classmethod
+ def headers(cls, items):
+ return Header(items)
+
+
+def add_mismatch_offsets(netlist_in: Union[Path, str],
+ netlist_out: Optional[Union[Path, str]] = None, debug: bool = False,
+ ) -> None:
+ if isinstance(netlist_in, str):
+ netlist_in = Path(netlist_in)
+
+ if netlist_in.suffix in ['.cdl', '.sp', '.spf']:
+ parser = Lark(grammar_cdl, parser='lalr')
+ scs = False
+ elif netlist_in.suffix in ['.scs', '.net']:
+ parser = Lark(grammar_scs, parser='lalr')
+ scs = True
+ else:
+ raise ValueError(f'Unknown netlist suffix={netlist_in.suffix}. Use ".cdl" or ".scs".')
+
+ lines = read_spectre_cdl_unwrap(netlist_in)
+
+ lines[-1] += '\n'
+ tree = parser.parse('\n'.join(lines))
+
+ if debug:
+ pydot__tree_to_png(tree, "test0.png")
+ obj_list = CktTransformer().transform(tree).children
+ obj_list[-1].last = True
+
+ if netlist_out is None:
+ netlist_out: Path = netlist_in.with_name(netlist_in.stem + 'out')
+ if isinstance(netlist_out, str):
+ netlist_out: Path = Path(netlist_out)
+ full_netlist = ''
+ used_names = []
+ offset_map = {}
+ for obj in obj_list:
+ full_netlist += obj.netlist(used_names, offset_map, scs)
+ for key, val in offset_map.items():
+ print(f'{val}: 0.0')
+
+ with open_file(netlist_out, 'w') as f:
+ f.write(full_netlist)
diff --git a/src/bag/data/lti.py b/src/bag/data/lti.py
new file mode 100644
index 0000000..5cd17be
--- /dev/null
+++ b/src/bag/data/lti.py
@@ -0,0 +1,880 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines functions/classes for characterizing linear time-invariant circuits.
+"""
+
+from typing import Dict, List, Tuple, Union, Optional
+
+import numpy as np
+import scipy.signal
+import scipy.sparse
+import scipy.sparse.linalg
+# noinspection PyProtectedMember
+from scipy.signal.ltisys import StateSpaceContinuous, TransferFunctionContinuous
+
+
+class LTICircuit(object):
+ """A class that models a linear-time-invariant circuit.
+
+ This class computes AC transfer functions for linear-time-invariant circuits.
+
+ Note: Since this class work with AC transfer functions, 'gnd' in this circuit is AC ground.
+
+ Parameters
+ ----------
+ udot_tol : float
+ tolerance to determine if dependency on input derivatives is zero.
+ """
+
+ _float_min = np.finfo(np.float64).eps
+
+ def __init__(self, udot_tol: float = 1.0e-12) -> None:
+ self._num_n = 0
+ self._gmat_data = {} # type: Dict[Tuple[int, int], float]
+ self._cmat_data = {} # type: Dict[Tuple[int, int], float]
+ self._vcvs_list = [] # type: List[Tuple[int, int, int, int, float]]
+ self._ind_data = {} # type: Dict[Tuple[int, int], float]
+ self._node_id = {'gnd': -1}
+ self._udot_tol = udot_tol
+
+ def _get_node_id(self, name: str) -> int:
+ if name not in self._node_id:
+ ans = self._num_n
+ self._node_id[name] = ans
+ self._num_n += 1
+ return ans
+ else:
+ return self._node_id[name]
+
+ @staticmethod
+ def _add(mat: Dict[Tuple[int, int], float], key: Tuple[int, int], val: float) -> None:
+ if key in mat:
+ mat[key] += val
+ else:
+ mat[key] = val
+
+ def add_res(self, res: float, p_name: str, n_name: str) -> None:
+ """Adds a resistor to the circuit.
+
+ Parameters
+ ----------
+ res : float
+ the resistance value, in Ohms.
+ p_name : str
+ the positive terminal net name.
+ n_name : str
+ the negative terminal net name.
+ """
+ # avoid 0 resistance.
+ res_sgn = 1 if res >= 0 else -1
+ g = res_sgn / max(abs(res), self._float_min)
+ self.add_conductance(g, p_name, n_name)
+
+ def add_conductance(self, g: float, p_name: str, n_name: str) -> None:
+ """Adds a resistor to the circuit given conductance value.
+
+ Parameters
+ ----------
+ g : float
+ the conductance value, in inverse Ohms.
+ p_name : str
+ the positive terminal net name.
+ n_name : str
+ the negative terminal net name.
+ """
+ node_p = self._get_node_id(p_name)
+ node_n = self._get_node_id(n_name)
+
+ if node_p == node_n:
+ return
+ if node_p < node_n:
+ node_p, node_n = node_n, node_p
+
+ self._add(self._gmat_data, (node_p, node_p), g)
+ if node_n >= 0:
+ self._add(self._gmat_data, (node_p, node_n), -g)
+ self._add(self._gmat_data, (node_n, node_p), -g)
+ self._add(self._gmat_data, (node_n, node_n), g)
+
+ def add_vccs(self, gm: float, p_name: str, n_name: str, cp_name: str, cn_name: str = 'gnd'
+ ) -> None:
+ """Adds a voltage controlled current source to the circuit.
+
+ Parameters
+ ----------
+ gm : float
+ the gain of the voltage controlled current source, in Siemens.
+ p_name : str
+ the terminal that the current flows out of.
+ n_name : str
+ the terminal that the current flows in to.
+ cp_name : str
+ the positive voltage control terminal.
+ cn_name : str
+ the negative voltage control terminal. Defaults to 'gnd'.
+ """
+ node_p = self._get_node_id(p_name)
+ node_n = self._get_node_id(n_name)
+ node_cp = self._get_node_id(cp_name)
+ node_cn = self._get_node_id(cn_name)
+
+ if node_p == node_n or node_cp == node_cn:
+ return
+
+ if node_cp >= 0:
+ if node_p >= 0:
+ self._add(self._gmat_data, (node_p, node_cp), gm)
+ if node_n >= 0:
+ self._add(self._gmat_data, (node_n, node_cp), -gm)
+ if node_cn >= 0:
+ if node_p >= 0:
+ self._add(self._gmat_data, (node_p, node_cn), -gm)
+ if node_n >= 0:
+ self._add(self._gmat_data, (node_n, node_cn), gm)
+
+ def add_vcvs(self, gain: float, p_name: str, n_name: str, cp_name: str, cn_name: str = 'gnd'
+ ) -> None:
+ """Adds a voltage controlled voltage source to the circuit.
+
+ Parameters
+ ----------
+ gain : float
+ the gain of the voltage controlled voltage source.
+ p_name : str
+ the positive terminal of the output voltage source.
+ n_name : str
+ the negative terminal of the output voltage source.
+ cp_name : str
+ the positive voltage control terminal.
+ cn_name : str
+ the negative voltage control terminal. Defaults to 'gnd'.
+ """
+ node_p = self._get_node_id(p_name)
+ node_n = self._get_node_id(n_name)
+ node_cp = self._get_node_id(cp_name)
+ node_cn = self._get_node_id(cn_name)
+
+ if node_p == node_n:
+ raise ValueError('positive and negative terminal of a vcvs cannot be the same.')
+ if node_cp == node_cn:
+ raise ValueError('positive and negative control terminal of a vcvs cannot be the same.')
+ if node_p < node_n:
+ # flip nodes so we always have node_p > node_n, to guarantee node_p >= 0
+ node_p, node_n, node_cp, node_cn = node_n, node_p, node_cn, node_cp
+
+ self._vcvs_list.append((node_p, node_n, node_cp, node_cn, gain))
+
+ def add_cap(self, cap: float, p_name: str, n_name: str) -> None:
+ """Adds a capacitor to the circuit.
+
+ Parameters
+ ----------
+ cap : float
+ the capacitance value, in Farads.
+ p_name : str
+ the positive terminal net name.
+ n_name : str
+ the negative terminal net name.
+ """
+ node_p = self._get_node_id(p_name)
+ node_n = self._get_node_id(n_name)
+
+ if node_p == node_n:
+ return
+ if node_p < node_n:
+ node_p, node_n = node_n, node_p
+
+ self._add(self._cmat_data, (node_p, node_p), cap)
+ if node_n >= 0:
+ self._add(self._cmat_data, (node_p, node_n), -cap)
+ self._add(self._cmat_data, (node_n, node_p), -cap)
+ self._add(self._cmat_data, (node_n, node_n), cap)
+
+ def add_ind(self, ind: float, p_name: str, n_name: str) -> None:
+ """Adds an inductor to the circuit.
+
+ Parameters
+ ----------
+ ind : float
+ the inductance value, in Henries.
+ p_name : str
+ the positive terminal net name.
+ n_name : str
+ the negative terminal net name.
+ """
+ node_p = self._get_node_id(p_name)
+ node_n = self._get_node_id(n_name)
+
+ if node_p == node_n:
+ return
+ if node_p < node_n:
+ key = node_n, node_p
+ else:
+ key = node_p, node_n
+
+ if key not in self._ind_data:
+ self._ind_data[key] = ind
+ else:
+ self._ind_data[key] = 1.0 / (1.0 / ind + 1.0 / self._ind_data[key])
+
+ def add_transistor(self, tran_info: Dict[str, float], d_name: str, g_name: str, s_name: str,
+ b_name: str = 'gnd', fg: Union[float, int] = 1, neg_cap: bool = True
+ ) -> None:
+ """Adds a small signal transistor model to the circuit.
+
+ Parameters
+ ----------
+ tran_info : Dict[str, float]
+ a dictionary of 1-finger transistor small signal parameters. Should contain gm, gds,
+ gb, cgd, cgs, cgb, cds, cdb, and csb.
+ d_name : str
+ drain net name.
+ g_name : str
+ gate net name.
+ s_name : str
+ source net name.
+ b_name : str
+ body net name. Defaults to 'gnd'.
+ fg : Union[float, int]
+ number of transistor fingers.
+ neg_cap : bool
+ True to allow negative capacitance (which is there due to model fitting).
+ """
+ gm = tran_info['gm'] * fg
+ gds = tran_info['gds'] * fg
+ cgd = tran_info['cgd'] * fg
+ cgs = tran_info['cgs'] * fg
+ cds = tran_info['cds'] * fg
+ cgb = tran_info.get('cgb', 0) * fg
+ cdb = tran_info.get('cdb', 0) * fg
+ csb = tran_info.get('csb', 0) * fg
+
+ if not neg_cap:
+ cgd = max(cgd, 0)
+ cgs = max(cgs, 0)
+ cds = max(cds, 0)
+ cgb = max(cgb, 0)
+ cdb = max(cdb, 0)
+ csb = max(csb, 0)
+
+ self.add_vccs(gm, d_name, s_name, g_name, s_name)
+ self.add_conductance(gds, d_name, s_name)
+ self.add_cap(cgd, g_name, d_name)
+ self.add_cap(cgs, g_name, s_name)
+ self.add_cap(cds, d_name, s_name)
+ self.add_cap(cgb, g_name, b_name)
+ self.add_cap(cdb, d_name, b_name)
+ self.add_cap(csb, s_name, b_name)
+
+ if 'gb' in tran_info:
+ # only add these if source is not shorted to body.
+ gb = tran_info['gb'] * fg
+ self.add_vccs(gb, d_name, s_name, b_name, s_name)
+
+ @classmethod
+ def _count_rank(cls, diag: np.ndarray) -> int:
+ diag_abs = np.abs(diag)
+ float_min = cls._float_min
+ rank_tol = diag_abs[0] * diag.size * float_min
+ rank_cnt = diag_abs > rank_tol
+ return np.count_nonzero(rank_cnt)
+
+ @classmethod
+ def _solve_gx_bw(cls, g: np.ndarray, b: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
+ """Solve the equation G*x + B*[w, w', ...].T = 0 for x.
+
+ Finds matrix Ka, Kw such that x = Ka * a + Kw * [w, w', ...].T solves
+ the given equation for any value of a.
+
+ Parameters
+ ----------
+ g : np.ndarray
+ the G matrix, with shape (M, N) and M < N.
+ b : np.ndarray
+ the B matrix.
+
+ Returns
+ -------
+ ka : np.ndarray
+ the Ky matrix.
+ kw : np.ndarray
+ the Kw matrix.
+ """
+ # G = U*S*Vh
+ u, s, vh = scipy.linalg.svd(g, full_matrices=True, overwrite_a=True)
+ # let B=Uh*B, so now S*Vh*x + B*w = 0
+ b = u.T.dot(b)
+ # let y = Vh*x, or x = V*y, so now S*y + U*B*w = 0
+ v = vh.T
+ # truncate the bottom 0 part of S, now S_top*y_top + B_top*w = 0
+ rank = cls._count_rank(s)
+ # check bottom part of B. If not 0, there's no solution
+ b_abs = np.abs(b)
+ zero_tol = np.amax(b_abs) * cls._float_min
+ if np.count_nonzero(b_abs[rank:, :] > zero_tol) > 0:
+ raise ValueError('B matrix bottom is not zero. This circuit has no solution.')
+ b_top = b[:rank, :]
+ s_top_inv = 1 / s[:rank] # type: np.ndarray
+ s_top_inv = np.diag(s_top_inv)
+ # solving, we get y_top = -S_top^-1*B_top*w = Ku*w
+ kw = s_top_inv.dot(-b_top)
+ # now x = V*y = Vl*y_top + Vr*y_bot = Vr*y_bot + Vl*Kw*w = Ky*y_bot = Kw*w
+ vl = v[:, :rank]
+ vr = v[:, rank:]
+ kw = vl.dot(kw)
+ return vr, kw
+
+ @classmethod
+ def _transform_c_qr(cls, g, c, b, d):
+ """Reveal redundant variables by transforming C matrix using QR decomposition"""
+ q, r, p = scipy.linalg.qr(c, pivoting=True)
+ rank = cls._count_rank(np.diag(r))
+ qh = q.T
+ return rank, qh.dot(g[:, p]), r, qh.dot(b), d[:, p]
+
+ # @classmethod
+ # def _transform_c_svd(cls, g, c, b, d):
+ # """Reveal redundant variables by transforming C matrix using SVD decomposition"""
+ # u, s, vh = scipy.linalg.svd(c, full_matrices=True, overwrite_a=True)
+ # uh = u.T
+ # v = vh.T
+ # rank = cls._count_rank(s)
+ # return rank, uh.dot(g).dot(v), np.diag(s), uh.dot(b), d.dot(v)
+
+ @classmethod
+ def _reduce_state_space(cls, g, c, b, d, e, ndim_w):
+ """Reduce state space variables.
+
+ Given the state equation G*x + C*x' + B*[w, w', w'', ...].T = 0, and
+ y = D*x + E*[w, w', w'', ...].T, check if C is full rank. If not,
+ we compute new G, C, and B matrices with reduced dimensions.
+ """
+ # step 0: transform C and obtain rank
+ rank, g, c, b, d = cls._transform_c_qr(g, c, b, d)
+ # rank, g, c, b, d = cls._transform_c_svd(g, c, b, d)
+ while rank < c.shape[0]:
+ # step 1: eliminate x' term by looking at bottom part of matrices
+ ctop = c[:rank, :]
+ gtop = g[:rank, :]
+ gbot = g[rank:, :]
+ btop = b[:rank, :]
+ bbot = b[rank:, :]
+ # step 2: find ka and kw from bottom
+ ka, kw = cls._solve_gx_bw(gbot, bbot)
+ # step 3: substitute x = ka * a + kw * [w, w', w'', ...].T
+ g = gtop.dot(ka)
+ c = ctop.dot(ka)
+ b = np.zeros((btop.shape[0], btop.shape[1] + ndim_w))
+ b[:, :btop.shape[1]] = btop + gtop.dot(kw)
+ b[:, ndim_w:] += ctop.dot(kw)
+ enew = np.zeros((e.shape[0], e.shape[1] + ndim_w))
+ enew[:, :-ndim_w] = e + d.dot(kw)
+ e = enew
+ d = d.dot(ka)
+ # step 4: transform C to prepare for next iteration
+ rank, g, c, b, d = cls._transform_c_qr(g, c, b, d)
+ # rank, g, c, b, d = cls._transform_c_svd(g, c, b, d)
+
+ g, c, b, d, e = cls._simplify(g, c, b, d, e, ndim_w)
+ return g, c, b, d, e
+
+ @classmethod
+ def _simplify(cls, g, c, b, d, e, ndim_w):
+ """Eliminate input derivatives by re-defining state variables.
+ """
+ while b.shape[1] > ndim_w:
+ kw = scipy.linalg.solve_triangular(c, b[:, ndim_w:])
+ bnew = np.dot(g, -kw)
+ bnew[:, :ndim_w] += b[:, :ndim_w]
+ b = bnew
+ e[:, :kw.shape[1]] -= d.dot(kw)
+ return g, c, b, d, e
+
+ def _build_mna_matrices(self, inputs: Union[str, List[str]], outputs: Union[str, List[str]],
+ in_type: str = 'v') -> Tuple[np.ndarray, ...]:
+ """Create and return MNA matrices representing this circuit.
+
+ Parameters
+ ----------
+ inputs : Union[str, List[str]]
+ the input voltage/current node name(s).
+ outputs : Union[str, List[str]]
+ the output voltage node name(s).
+ in_type : str
+ set to 'v' for input voltage sources. Otherwise, current sources.
+
+ Returns
+ -------
+ g : np.ndarray
+ the conductance matrix
+ c : np.ndarray
+ the capacitance/inductance matrix.
+ b : np.ndarray
+ the input-to-state matrix.
+ d : np.ndarray
+ the state-to-output matrix.
+ e : np.ndarray
+ the input-to-output matrix.
+ """
+ if isinstance(inputs, list) or isinstance(inputs, tuple):
+ node_ins = [self._node_id[name] for name in inputs]
+ else:
+ node_ins = [self._node_id[inputs]]
+ if isinstance(outputs, list) or isinstance(outputs, tuple):
+ node_outs = [self._node_id[name] for name in outputs]
+ else:
+ node_outs = [self._node_id[outputs]]
+
+ is_voltage = (in_type == 'v')
+
+ # step 1: construct matrices
+ gdata, grows, gcols = [], [], []
+ cdata, crows, ccols = [], [], []
+ # step 1A: gather conductors/vccs
+ for (ridx, cidx), gval in self._gmat_data.items():
+ gdata.append(gval)
+ grows.append(ridx)
+ gcols.append(cidx)
+ # step 1B: gather capacitors
+ for (ridx, cidx), cval in self._cmat_data.items():
+ cdata.append(cval)
+ crows.append(ridx)
+ ccols.append(cidx)
+ # step 1C: gather inductors
+ num_states = self._num_n
+ for (node_p, node_n), lval in self._ind_data.items():
+ gdata.append(1)
+ grows.append(node_p)
+ gcols.append(num_states)
+ gdata.append(1)
+ grows.append(num_states)
+ gcols.append(node_p)
+ if node_n >= 0:
+ gdata.append(-1)
+ grows.append(node_n)
+ gcols.append(num_states)
+ gdata.append(-1)
+ grows.append(num_states)
+ gcols.append(node_n)
+ cdata.append(-lval)
+ crows.append(num_states)
+ ccols.append(num_states)
+ num_states += 1
+ # step 1D: add currents from vcvs
+ for node_p, node_n, node_cp, node_cn, gain in self._vcvs_list:
+ # vcvs means vp - vn - A*vcp + A*vcn = 0, and current flows from p to n
+ # current flowing out of p
+ gdata.append(1)
+ grows.append(node_p)
+ gcols.append(num_states)
+ # voltage of p
+ gdata.append(1)
+ grows.append(num_states)
+ gcols.append(node_p)
+ if node_n >= 0:
+ # current flowing into n
+ gdata.append(-1)
+ grows.append(node_n)
+ gcols.append(num_states)
+ # voltage of n
+ gdata.append(-1)
+ grows.append(num_states)
+ gcols.append(node_n)
+ if node_cp >= 0:
+ # voltage of cp
+ gdata.append(-gain)
+ grows.append(num_states)
+ gcols.append(node_cp)
+ if node_cn >= 0:
+ # voltage of cn
+ gdata.append(gain)
+ grows.append(num_states)
+ gcols.append(node_cn)
+ num_states += 1
+
+ ndim_in = len(node_ins)
+ if is_voltage:
+ # step 1E: add current/voltage from input voltage source
+ b = np.zeros((num_states + ndim_in, ndim_in))
+ for in_idx, node_in in enumerate(node_ins):
+ gdata.append(1)
+ grows.append(node_in)
+ gcols.append(num_states)
+ gdata.append(-1)
+ grows.append(num_states)
+ gcols.append(node_in)
+ b[num_states + in_idx, in_idx] = 1
+ num_states += ndim_in
+ else:
+ # inject current to node_in
+ b = np.zeros((num_states, ndim_in))
+ for in_idx, node_in in enumerate(node_ins):
+ b[node_in, in_idx] = -1
+
+ # step 2: create matrices
+ shape = (num_states, num_states)
+ g = scipy.sparse.csc_matrix((gdata, (grows, gcols)), shape=shape).todense().A
+ c = scipy.sparse.csc_matrix((cdata, (crows, ccols)), shape=shape).todense().A
+ ndim_out = len(node_outs)
+ d = scipy.sparse.csc_matrix((np.ones(ndim_out), (np.arange(ndim_out), node_outs)),
+ shape=(ndim_out, num_states)).todense().A
+ e = np.zeros((ndim_out, ndim_in))
+
+ return g, c, b, d, e
+
+ def get_state_space(self, inputs: Union[str, List[str]], outputs: Union[str, List[str]],
+ in_type: str = 'v') -> StateSpaceContinuous:
+ """Compute the state space model from the given inputs to outputs.
+
+ Parameters
+ ----------
+ inputs : Union[str, List[str]]
+ the input voltage/current node name(s).
+ outputs : Union[str, List[str]]
+ the output voltage node name(s).
+ in_type : str
+ set to 'v' for input voltage sources. Otherwise, current sources.
+
+ Returns
+ -------
+ system : StateSpaceContinuous
+ the scipy state space object. See scipy.signal package on how to use this object.
+ """
+ g0, c0, b0, d0, e0 = self._build_mna_matrices(inputs, outputs, in_type)
+ ndim_in = e0.shape[1]
+ g, c, b, d, e = self._reduce_state_space(g0, c0, b0, d0, e0, ndim_in)
+ amat = scipy.linalg.solve_triangular(c, -g)
+ bmat = scipy.linalg.solve_triangular(c, -b)
+ cmat = d
+ e_abs = np.abs(e)
+ tol = np.amax(e_abs) * self._udot_tol
+ if np.count_nonzero(e_abs[:, ndim_in:] > tol) > 0:
+ print('WARNING: output depends on input derivatives. Ignored.')
+ print('D matrix: ')
+ print(e)
+ dmat = e[:, :ndim_in]
+
+ return StateSpaceContinuous(amat, bmat, cmat, dmat)
+
+ def get_num_den(self, in_name: str, out_name: str, in_type: str = 'v', atol: float = 0.0
+ ) -> Tuple[np.ndarray, np.ndarray]:
+ """Compute the transfer function between the two given nodes.
+
+ Parameters
+ ----------
+ in_name : str
+ the input voltage/current node name.
+ out_name : Union[str, List[str]]
+ the output voltage node name.
+ in_type : str
+ set to 'v' for input voltage sources. Otherwise, current sources.
+ atol : float
+ absolute tolerance for checking zeros in the numerator. Used to filter out scipy
+ warnings.
+
+ Returns
+ -------
+ num : np.ndarray
+ the numerator polynomial.
+ den : np.ndarray
+ the denominator polynomial.
+ """
+ state_space = self.get_state_space(in_name, out_name, in_type=in_type)
+ num, den = scipy.signal.ss2tf(state_space.A, state_space.B, state_space.C, state_space.D)
+ num = num[0, :]
+ # check if numerator has leading zeros.
+ # this makes it so the user have full control over numerical precision, and
+ # avoid scipy bad conditioning warnings.
+ while abs(num[0]) <= atol:
+ num = num[1:]
+
+ return num, den
+
+ def get_transfer_function(self, in_name: str, out_name: str, in_type: str = 'v',
+ atol: float = 0.0) -> TransferFunctionContinuous:
+ """Compute the transfer function between the two given nodes.
+
+ Parameters
+ ----------
+ in_name : str
+ the input voltage/current node name.
+ out_name : Union[str, List[str]]
+ the output voltage node name.
+ in_type : str
+ set to 'v' for input voltage sources. Otherwise, current sources.
+ atol : float
+ absolute tolerance for checking zeros in the numerator. Used to filter out scipy
+ warnings.
+
+ Returns
+ -------
+ system : TransferFunctionContinuous
+ the scipy transfer function object. See scipy.signal package on how to use this object.
+ """
+ num, den = self.get_num_den(in_name, out_name, in_type=in_type, atol=atol)
+ return TransferFunctionContinuous(num, den)
+
+ def get_impedance(self, node_name: str, freq: float, atol: float = 0.0) -> complex:
+ """Computes the impedance looking into the given node.
+
+ Parameters
+ ----------
+ node_name : str
+ the node to compute impedance for. We will inject a current into this node and
+ measure the voltage on this node.
+ freq : float
+ the frequency to compute the impedance at, in Hertz.
+ atol : float
+ absolute tolerance for checking zeros in the numerator. Used to filter out scipy
+ warnings.
+
+ Returns
+ -------
+ impedance : complex
+ the impedance value, in Ohms.
+ """
+ sys = self.get_transfer_function(node_name, node_name, in_type='i', atol=atol)
+ w_test = 2 * np.pi * freq
+ _, zin_vec = sys.freqresp(w=[w_test])
+ return zin_vec[0]
+
+
+def get_w_crossings(num: np.ndarray, den: np.ndarray, atol: float = 1.0e-8,
+ ) -> Tuple[Optional[float], Optional[float]]:
+ """Compte gain margin/phase margin frequencies from the transfer function,
+
+ To determine the crossover frequencies, we write the transfer function as:
+
+ .. math::
+
+ \\frac{A(w) + jB(w)}{C(w) + jD(w)}
+
+ where :math:`A(w)`, :math:`B(w)`, :math:`C(w)`, and :math:`D(w)` are real polynomials. The
+ gain margin frequency is the frequency at which:
+
+ .. math::
+
+ \\frac{B(w)}{A(w)} = \\frac{D(w)}{C(w)} \\implies A(w)D(w) - B(w)C(w) = 0
+
+
+ The phase margin frequency is the frequency at which:
+
+ .. math::
+
+ \\frac{A^2(w) + B^2(w)}{C^2(w) + D^2(w)} = 1 : implies A^2(w) + B^2(w) - C^2(w) - D^2(w) = 0
+
+ This function solves these two equations and returns the smallest real and positive roots.
+
+ Parameters
+ ----------
+ num : np.ndarray
+ the numerator polynomial coefficients array. index 0 is coefficient for highest term.
+ den : np.ndarray
+ the denominator polynomial coefficients array. index 0 is coefficient for highest term.
+ atol : float
+ absolute tolerance used to check if the imaginary part of a root is 0, or if a root is
+ greater than 0.
+
+ Returns
+ -------
+ w_phase : Optional[float]
+ lowest positive frequency in rad/s at which the gain becomes unity. None if no such
+ frequency exist.
+ w_gain : Optional[float]
+ lower positive frequency in rad/s at which the phase becomes 180 degrees. None if no such
+ frequency exist.
+ """
+ # construct A(w), B(w), C(w), and D(w)
+ num_flip = num[::-1]
+ den_flip = den[::-1]
+ avec = np.copy(num_flip)
+ bvec = np.copy(num_flip)
+ cvec = np.copy(den_flip)
+ dvec = np.copy(den_flip)
+ avec[1::2] = 0
+ avec[2::4] *= -1
+ bvec[0::2] = 0
+ bvec[3::4] *= -1
+ cvec[1::2] = 0
+ cvec[2::4] *= -1
+ dvec[0::2] = 0
+ dvec[3::4] *= -1
+
+ apoly = np.poly1d(avec[::-1])
+ bpoly = np.poly1d(bvec[::-1])
+ cpoly = np.poly1d(cvec[::-1])
+ dpoly = np.poly1d(dvec[::-1])
+
+ # solve for w_phase/w_gain
+ poly_list = [apoly**2 + bpoly**2 - cpoly**2 - dpoly**2,
+ apoly * dpoly - bpoly * cpoly]
+ w_list = [None, None] # type: List[Optional[float]]
+ for idx in range(2):
+ for root in poly_list[idx].roots:
+ root_real = float(root.real)
+ if abs(root.imag) < atol < root_real:
+ w_list_idx = w_list[idx]
+ if w_list_idx is None or root_real < w_list_idx:
+ w_list[idx] = root_real
+
+ return w_list[0], w_list[1]
+
+
+def get_w_3db(num: np.ndarray, den: np.ndarray, atol: float = 1.0e-8
+ ) -> Optional[float]:
+ """Given the numerator and denominator of the transfer function, compute the 3dB frequency.
+
+ To determine the 3dB frequency, we first normalize the transfer function so that its
+ DC gain is one, then we write the transfer function as:
+
+ .. math::
+
+ \\frac{A(w) + jB(w)}{C(w) + jD(w)}
+
+ where :math:`A(w)`, :math:`B(w)`, :math:`C(w)`, and :math:`D(w)` are real polynomials. The
+ 3dB frequency is the frequency at which:
+
+ .. math::
+
+ \\frac{A^2(w) + B^2(w)}{C^2(w) + D^2(w)} = 0.5 : implies A^2(w) + B^2(w) - 0.5\\left(C^2(
+ w) + D^2(w)\\right) = 0
+
+ This function solves this equation and returns the smallest real and positive roots.
+
+ Parameters
+ ----------
+ num : np.ndarray
+ the numerator polynomial coefficients array. index 0 is coefficient for highest term.
+ den : np.ndarray
+ the denominator polynomial coefficients array. index 0 is coefficient for highest term.
+ atol : float
+ absolute tolerance used to check if the imaginary part of a root is 0, or if a root is
+ greater than 0.
+
+ Returns
+ -------
+ w_3db : Optional[float]
+ the 3dB frequency in rad/s. None if no such frequency exist.
+ """
+ # construct A(w), B(w), C(w), and D(w) of normalized transfer function
+ num_flip = num[::-1] / num[-1]
+ den_flip = den[::-1] / den[-1]
+ avec = np.copy(num_flip)
+ bvec = np.copy(num_flip)
+ cvec = np.copy(den_flip)
+ dvec = np.copy(den_flip)
+ avec[1::2] = 0
+ avec[2::4] *= -1
+ bvec[0::2] = 0
+ bvec[3::4] *= -1
+ cvec[1::2] = 0
+ cvec[2::4] *= -1
+ dvec[0::2] = 0
+ dvec[3::4] *= -1
+
+ apoly = np.poly1d(avec[::-1])
+ bpoly = np.poly1d(bvec[::-1])
+ cpoly = np.poly1d(cvec[::-1])
+ dpoly = np.poly1d(dvec[::-1])
+
+ # solve for w_phase/w_gain
+ poly = apoly**2 + bpoly**2 - (cpoly**2 + dpoly**2) / 2 # type: np.poly1d
+ w_ans = None
+ for root in poly.roots:
+ root_real = float(root.real)
+ if abs(root.imag) < atol < root_real and (w_ans is None or root_real < w_ans):
+ w_ans = root_real
+
+ return w_ans
+
+
+def get_stability_margins(num: np.ndarray, den: np.ndarray, rtol: float = 1.0e-8,
+ atol: float = 1.0e-8) -> Tuple[float, float]:
+ """Given the numerator and denominator of the transfer function, compute phase and gain margins.
+
+ Parameters
+ ----------
+ num : np.ndarray
+ the numerator polynomial coefficients array. index 0 is coefficient for highest term.
+ den : np.ndarray
+ the denominator polynomial coefficients array. index 0 is coefficient for highest term.
+ rtol : float
+ relative tolerance. Used to check if two frequencies are equal.
+ atol : float
+ absolute tolerance. Used to check a number is equal to 0.
+
+ Returns
+ -------
+ phase_margin : float
+ the phase margin in degrees. If the system is unstable, a negative number is returned.
+ gain_margin : float
+ the gain margin.
+ """
+ poly_n = np.poly1d(num)
+ poly_d = np.poly1d(den)
+
+ # compute gain margin.
+ w_phase, w_gain = get_w_crossings(num, den, atol=atol)
+ if w_gain is None:
+ gain_margin = float('inf')
+ else:
+ gain_margin = abs(poly_d(1j * w_gain) / poly_n(1j * w_gain))
+
+ # compute phase margin
+ if w_phase is None:
+ # gain never equal to 1; that means gain is always greater than 1 or less than 1.
+ dc_gain = poly_n(0) / poly_d(0)
+ if dc_gain < 1 - max(rtol, atol):
+ # gain is always less than 1, infinite phase margin
+ phase_margin = float('inf')
+ else:
+ # gain is always greater than 1, unstable
+ phase_margin = -1
+ elif w_gain is not None and w_phase > w_gain + max(w_gain * rtol, atol):
+ # unity gain frequency > 180 degree frequency, we're unstable
+ phase_margin = -1
+ else:
+ phase_margin = np.angle(poly_n(1j * w_phase) / poly_d(1j * w_phase), deg=True) + 180
+
+ return phase_margin, gain_margin
diff --git a/src/bag/data/ltv.py b/src/bag/data/ltv.py
new file mode 100644
index 0000000..038d11d
--- /dev/null
+++ b/src/bag/data/ltv.py
@@ -0,0 +1,495 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines functions and classes for linear time-varying circuits data post-processing.
+"""
+
+import numpy as np
+import scipy.interpolate as interp
+import scipy.sparse as sparse
+
+
+def _even_quotient(a, b, tol=1e-6):
+ """Returns a / b if it is an integer, -1 if it is not.."""
+ num = int(round(a / b))
+ if abs(a - b * num) < abs(b * tol):
+ return num
+ return -1
+
+
+class LTVImpulseFinite(object):
+ r"""A class that computes finite impulse response of a linear time-varying circuit.
+
+ This class computes the time-varying impulse response based on PSS/PAC simulation
+ data, and provides several useful query methods. Your simulation should be set up
+ as follows:
+
+ #. Setup PSS as usual. We will denote system period as tper and fc = 1/tper.
+
+ #. In PAC, set the maxmimum sidebands to m.
+
+ #. In PAC, set the input frequency sweep to be absolute, and sweep from 0 to
+ n * fstep in steps of fstep, where fstep = fc / k for some integer k.
+
+ k should be chosen so that the output settles back to 0 after time k * tper. k
+ should also be chosen such that fstep is a nice round frequency. Otherwise,
+ numerical errors may introduce strange results.
+
+ n should be chosen so that n * fstep is sufficiently large compared to system
+ bandwidth.
+
+ #. In PAC options, set the freqaxis option to be "in".
+
+ #. After simulation, PAC should save the output frequency response as a function of
+ output harmonic number and input frequency. Post-process this into a complex 2D
+ matrix hmat with shape (2 * m + 1, n + 1), and pass it to this class's constructor.
+
+ Parameters
+ ----------
+ hmat : np.ndarray
+ the PAC simulation data matrix with shape (2 * m + 1, n + 1).
+ hmat[a + m, b] is the complex AC gain from input frequency b * fc / k
+ to output frequency a * fc + b * fc / k.
+ m : int
+ number of output sidebands.
+ n : int
+ number of input frequencies.
+ tper : float
+ the system period, in seconds.
+ k : int
+ the ratio between period of the input impulse train and the system period.
+ Must be an integer.
+ out0 : :class:`numpy.ndarray`
+ steady-state output transient waveform with 0 input over 1 period. This should
+ be a two-column array, where the first column is time vector and second column
+ is the output. Used to compute transient response.
+
+ Notes
+ -----
+ This class uses the algorithm described in [1]_ to compute impulse response from PSS/PAC
+ simulation data. The impulse response :math:`h(t, \tau)` satisfies the following equation:
+
+ .. math:: y(t) = \int_{-\infty}^{\infty} h(t, \tau) \cdot x(\tau)\ d\tau
+
+ Intuitively, :math:`h(t, \tau)` represents the output at time :math:`t` subject to an impulse at
+ time :math:`\tau`. As described in the paper, If :math:`w_c` is the system frequency, and
+ :math:`H_m(jw)` is the frequency response of the system at :math:`mw_c + w` due to an input
+ sinusoid with frequency :math:`w`, then the impulse response can be calculated as:
+
+ .. math::
+
+ h(t, \tau) = \frac{1}{kT}\sum_{n=-\infty}^{\infty}\sum_{m=-\infty}^{\infty}
+ H_m\left (j\dfrac{nw_c}{k}\right) \exp \left[ jmw_ct + j\dfrac{nw_c}{k} (t - \tau)\right]
+
+ where :math:`0 \le \tau < T` and :math:`\tau \le t \le \tau + kT`.
+
+ References
+ ----------
+ .. [1] J. Kim, B. S. Leibowitz and M. Jeeradit, "Impulse sensitivity function analysis of
+ periodic circuits," 2008 IEEE/ACM International Conference on Computer-Aided Design,
+ San Jose, CA, 2008, pp. 386-391.
+
+ .. automethod:: __call__
+ """
+ def __init__(self, hmat, m, n, tper, k, out0):
+ hmat = np.asarray(hmat)
+ if hmat.shape != (2 * m + 1, n + 1):
+ raise ValueError('hmat shape = %s not compatible with M=%d, N=%d' %
+ (hmat.shape, m, n))
+
+ # use symmetry to fill in negative input frequency data.
+ fullh = np.empty((2 * m + 1, 2 * n + 1), dtype=complex)
+ fullh[:, n:] = hmat / (k * tper)
+ fullh[:, :n] = np.fliplr(np.flipud(fullh[:, n + 1:])).conj()
+
+ self.hmat = fullh
+ wc = 2.0 * np.pi / tper
+ self.m_col = np.arange(-m, m + 1) * (1.0j * wc)
+ self.n_col = np.arange(-n, n + 1) * (1.0j * wc / k)
+ self.m_col = self.m_col.reshape((-1, 1))
+ self.n_col = self.n_col.reshape((-1, 1))
+ self.tper = tper
+ self.k = k
+ self.outfun = interp.interp1d(out0[:, 0], out0[:, 1], bounds_error=True,
+ assume_sorted=True)
+
+ @staticmethod
+ def _print_debug_msg(result):
+ res_imag = np.imag(result).flatten()
+ res_real = np.real(result).flatten()
+ res_ratio = np.abs(res_imag / (res_real + 1e-18))
+ idx = np.argmax(res_ratio)
+ print('max imag/real ratio: %.4g, imag = %.4g, real = %.4g' %
+ (res_ratio[idx], res_imag[idx], res_real[idx]))
+
+ def __call__(self, t, tau, debug=False):
+ """Calculate h(t, tau).
+
+ Compute h(t, tau), which is the output at t subject to an impulse
+ at time tau. standard numpy broadcasting rules apply.
+
+ Parameters
+ ----------
+ t : array-like
+ the output time.
+ tau : array-like
+ the input impulse time.
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ val : :class:`numpy.ndarray`
+ the time-varying impulse response evaluated at the given coordinates.
+ """
+ # broadcast arguments to same shape
+ t, tau = np.broadcast_arrays(t, tau)
+
+ # compute impulse using efficient matrix multiply and numpy broadcasting.
+ dt = t - tau
+ zero_indices = (dt < 0) | (dt > self.k * self.tper)
+ t_row = t.reshape((1, -1))
+ dt_row = dt.reshape((1, -1))
+ tmp = np.dot(self.hmat, np.exp(np.dot(self.n_col, dt_row))) * np.exp(np.dot(self.m_col,
+ t_row))
+ result = np.sum(tmp, axis=0).reshape(dt.shape)
+
+ # zero element such that dt < 0 or dt > k * T.
+ result[zero_indices] = 0.0
+
+ if debug:
+ self._print_debug_msg(result)
+
+ # discard imaginary part
+ return np.real(result)
+
+ def _get_core(self, num_points, debug=False):
+ """Returns h(dt, tau) matrix and output waveform over 1 period. Used by lsim.
+
+ Compute h(dt, tau) for 0 <= tau < T and 0 <= dt < kT, where dt = t - tau.
+ """
+ dt_vec = np.linspace(0.0, self.k * self.tper, self.k * num_points,
+ endpoint=False) # type: np.ndarray
+ tvec_per = dt_vec[:num_points]
+ tau_col = tvec_per.reshape((-1, 1))
+ dt_row = dt_vec.reshape((1, -1))
+ # use matrix multiply to sum across n
+ tmp = np.dot(self.hmat, np.exp(np.dot(self.n_col, dt_row)))
+ # use broadcast multiply for exp(-jwm*(t-tau)) term
+ tmp = tmp * np.exp(np.dot(self.m_col, dt_row))
+ # use matrix multiply to sum across m
+ result = np.dot(np.exp(np.dot(tau_col, self.m_col.T)), tmp).T
+
+ if debug:
+ self._print_debug_msg(result)
+
+ # discard imaginary part
+ result = np.real(result)
+ # compute output waveform
+ wvfm = self.outfun(tvec_per)
+ return result, wvfm
+
+ def visualize(self, fig_idx, num_points, num_period=None,
+ plot_color=True, plot_3d=False, show=True):
+ """Visualize the time-varying impulse response.
+
+ Parameters
+ ----------
+ fig_idx : int
+ starting figure index.
+ num_points : int
+ number of sample points in a period.
+ num_period : int
+ number of output period.
+ plot_color : bool
+ True to create a plot of the time-varying impulse response as 2D color plot.
+ plot_3d : bool
+ True to create a 3D plot of the impulse response.
+ show : bool
+ True to show the plots immediately. Set to False if you want to create some
+ other plots.
+ """
+ if not plot_color and not plot_3d:
+ # do nothing.
+ return
+ if num_period is None:
+ num_period = self.k
+ elif num_period > self.k:
+ raise ValueError(f'num_period = {num_period} > {self.k} = k')
+
+ tot_points = num_period * num_points
+ tau_vec = np.linspace(0, self.tper, num_points, endpoint=False)
+ dt_vec = np.linspace(0, num_period * self.tper, tot_points, endpoint=False)
+ dt, tau = np.meshgrid(dt_vec, tau_vec, indexing='ij', copy=False)
+ t = tau + dt
+
+ result, _ = self._get_core(num_points)
+ result = result[:num_period * num_points, :]
+
+ import matplotlib.pyplot as plt
+
+ if plot_color:
+ # plot 2D color
+ fig = plt.figure(fig_idx)
+ fig_idx += 1
+ ax = fig.gca()
+ cp = ax.pcolor(t, tau, result, cmap=plt.get_cmap('cubehelix'))
+ plt.colorbar(cp)
+ ax.set_title('Impulse response contours')
+ ax.set_ylabel('impulse time')
+ ax.set_xlabel('output time')
+
+ if plot_3d:
+ # plot 3D impulse response
+ # noinspection PyUnresolvedReferences
+ from mpl_toolkits.mplot3d import Axes3D
+
+ fig = plt.figure(fig_idx)
+ ax = fig.add_subplot(111, projection='3d')
+ ax.plot_surface(t, tau, result, rstride=1, cstride=1, linewidth=0,
+ cmap=plt.get_cmap('cubehelix'))
+ ax.set_title('Impulse response')
+ ax.set_ylabel('impulse time')
+ ax.set_xlabel('output time')
+
+ if show:
+ plt.show()
+
+ def lsim(self, u, tstep, tstart=0.0, ac_only=False, periodic=False, debug=False):
+ r"""Compute the output waveform given input waveform.
+
+ This method assumes zero initial state. The output waveform will be the
+ same length as the input waveform, so pad zeros if necessary.
+
+ Parameters
+ ----------
+ u : array-like
+ the input waveform.
+ tstep : float
+ the input/output time step, in seconds. Must evenly divide system period.
+ tstart : float
+ the time corresponding to u[0]. Assume u = 0 for all time before tstart.
+ Defaults to 0.
+ ac_only : bool
+ Return output waveform due to AC input only and without steady-state
+ transient.
+ periodic : bool
+ True if the input is periodic. If so, returns steady state output.
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ y : :class:`numpy.ndarray`
+ the output waveform.
+
+ Notes
+ -----
+ This method computes the integral:
+
+ .. math:: y(t) = \int_{-\infty}^{\infty} h(t, \tau) \cdot x(\tau)\ d\tau
+
+ using the following algorithm:
+
+ #. set :math:`d\tau = \texttt{tstep}`.
+ #. Compute :math:`h(\tau + dt, \tau)` for :math:`0 \le dt < kT` and
+ :math:`0 \le \tau < T`, then express as a kN-by-N matrix. This matrix
+ completely describes the time-varying impulse response.
+ #. tile the impulse response matrix horizontally until its number of columns
+ matches input signal length, then multiply column i by u[i].
+ #. Compute y as the sum of all anti-diagonals of the matrix computed in
+ previous step, multiplied by :math:`d\tau`. Truncate if necessary.
+ """
+ u = np.asarray(u)
+ nstep = _even_quotient(self.tper, tstep)
+ ndelay = _even_quotient(tstart, tstep)
+
+ # error checking
+ if len(u.shape) != 1:
+ raise ValueError('u must be a 1D array.')
+ if nstep < 0:
+ raise ValueError('Time step = %.4g does not evenly divide'
+ 'System period = %.4g' % (tstep, self.tper))
+ if ndelay < 0:
+ raise ValueError('Time step = %.4g does not evenly divide'
+ 'Startimg time = %.4g' % (tstep, tstart))
+ if periodic and nstep != u.size:
+ raise ValueError('Periodic waveform must have same period as system period.')
+
+ # calculate and tile hcore
+ ntot = u.size
+ hcore, outwv = self._get_core(nstep, debug=debug)
+ hcore = np.roll(hcore, -ndelay, axis=1)
+ outwv = np.roll(outwv, -ndelay)
+
+ if periodic:
+ # input periodic; more efficient math.
+ hcore *= u
+ hcore = np.tile(hcore, (1, self.k + 1))
+ y = np.bincount(np.sum(np.indices(hcore.shape), axis=0).flat, hcore.flat)
+ y = y[self.k * nstep:(self.k + 1) * nstep] * tstep
+ else:
+ ntile = int(np.ceil(ntot * 1.0 / nstep))
+ hcore = np.tile(hcore, (1, ntile))
+ outwv = np.tile(outwv, (ntile,))
+ hcore = hcore[:, :ntot]
+ outwv = outwv[:ntot]
+
+ # broadcast multiply
+ hcore *= u
+ # magic code from stackoverflow
+ # returns an array of the sums of all anti-diagonals.
+ y = np.bincount(np.sum(np.indices(hcore.shape), axis=0).flat, hcore.flat)[:ntot] * tstep
+
+ if not ac_only:
+ # add output steady state transient
+ y += outwv
+ return y
+
+ def lsim_digital(self, tsym, tstep, data, pulse, tstart=0.0, nchain=1, tdelta=0.0, **kwargs):
+ """Compute output waveform given input pulse shape and data.
+
+ This method is similar to :func:`~bag.data.ltv.LTVImpulseFinite.lsim`, but
+ assumes the input is superposition of shifted and scaled copies of a given
+ pulse waveform. This assumption speeds up the computation and is useful
+ for high speed link design.
+
+ Parameters
+ ----------
+ tsym : float
+ the symbol period, in seconds. Must evenly divide system period.
+ tstep : float
+ the output time step, in seconds. Must evenly divide symbol period.
+ data : list[float]
+ list of symbol values.
+ pulse : np.ndarray
+ the pulse waveform as a two-column array. The first column is time,
+ second column is pulse waveform value. Linear interpolation will be used
+ if necessary. Time must start at 0.0 and be increasing.
+ tstart : float
+ time of the first data symbol. Defaults to 0.0
+ nchain : int
+ number of blocks in a chain. Defaults to 1. This argument is useful if
+ you have multiple blocks cascaded together in a chain, and you wish to find
+ the output waveform at the end of the chain.
+ tdelta : float
+ time difference between adjacent elements in a chain. Defaults to 0. This
+ argument is useful for simulating a chain of latches, where blocks operate
+ on alternate phases of the clock.
+ kwargs : dict[str, any]
+ additional keyword arguments for :func:`~bag.data.ltv.LTVImpulseFinite.lsim`.
+
+ Returns
+ -------
+ output : :class:`numpy.ndarray`
+ the output waveform over N symbol period, where N is the given data length.
+ """
+ # check tsym evenly divides system period
+ nsym = _even_quotient(self.tper, tsym)
+ if nsym < 0:
+ raise ValueError('Symbol period %.4g does not evenly divide '
+ 'system period %.4g' % (tsym, self.tper))
+
+ # check tstep evenly divides tsym
+ nstep = _even_quotient(tsym, tstep)
+ if nstep < 0:
+ raise ValueError('Time step %.4g does not evenly divide '
+ 'symbol period %.4g' % (tstep, tsym))
+
+ # check tstep evenly divides tstart
+ ndelay = _even_quotient(tstart, tstep)
+ if ndelay < 0:
+ raise ValueError('Time step %.4g does not evenly divide '
+ 'starting time %.4g' % (tstep, tstart))
+
+ nper = nstep * nsym
+
+ pulse = np.asarray(pulse)
+ tvec = pulse[:, 0]
+ pvec = pulse[:, 1]
+
+ # find input length
+ # noinspection PyUnresolvedReferences
+ nlast = min(np.nonzero(pvec)[0][-1] + 1, tvec.size - 1)
+ tlast = tvec[nlast]
+ ntot = int(np.ceil(tlast / tstep)) + nchain * self.k * nper + nstep * (nsym - 1)
+
+ # interpolate input
+ pfun = interp.interp1d(tvec, pvec, kind='linear', copy=False, bounds_error=False,
+ fill_value=0.0, assume_sorted=True)
+ tin = np.linspace(0.0, ntot * tstep, ntot, endpoint=False)
+ pin = pfun(tin)
+
+ # super-impose pulse responses
+ num_out = len(data) * nstep
+ output = np.zeros(num_out)
+ for idx in range(nsym):
+ # get output pulse response
+ pout = pin
+ for j in range(nchain):
+ pout = self.lsim(pout, tstep, tstart=tstart + j * tdelta, periodic=False,
+ ac_only=True, **kwargs)
+
+ # construct superposition matrix
+ cur_data = data[idx::nsym]
+ offsets = np.arange(0, len(cur_data) * nper, nper) * -1
+ diags = np.tile(cur_data, (ntot, 1)).T
+ dia_mat = sparse.dia_matrix((diags, offsets), shape=(num_out, ntot))
+
+ # superimpose
+ output += dia_mat.dot(pout)
+ # shift input pulse.
+ pin = np.roll(pin, nstep)
+
+ # compute output steady state waveform
+ out_pss = self.outfun(np.linspace(0.0, self.tper, nper, endpoint=False))
+ out_pss = np.roll(out_pss, -ndelay)
+ for j in range(1, nchain):
+ out_pss = self.lsim(out_pss, tstep, tstart=tstart + j * tdelta, periodic=True,
+ ac_only=False, **kwargs)
+
+ ntile = int(np.ceil(num_out * 1.0 / nper))
+ out_pss = np.tile(out_pss, (ntile,))
+ output += out_pss[:num_out]
+
+ return output
diff --git a/src/bag/data/plot.py b/src/bag/data/plot.py
new file mode 100644
index 0000000..2801194
--- /dev/null
+++ b/src/bag/data/plot.py
@@ -0,0 +1,656 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains utilities to improve waveform plotting in python.
+"""
+
+import numpy as np
+import scipy.interpolate as interp
+
+from matplotlib.lines import Line2D
+from matplotlib.figure import Figure
+from matplotlib.text import Annotation
+import matplotlib.pyplot as plt
+
+from ..math import float_to_si_string
+
+# Vega category10 palette
+color_cycle = ['#1f77b4', '#ff7f0e',
+ '#2ca02c', '#d62728',
+ '#9467bd', '#8c564b',
+ '#e377c2', '#7f7f7f',
+ '#bcbd22', '#17becf',
+ ]
+
+
+def figure(fig_id, picker=5.0):
+ """Create a WaveformPlotter.
+
+ Parameters
+ ----------
+ fig_id : int
+ the figure ID.
+ picker : float
+ picker event pixel tolerance.
+
+ Returns
+ -------
+ plotter : bag.data.plot.WaveformPlotter
+ a plotter that helps you make interactive matplotlib figures.
+ """
+ return WaveformPlotter(fig_id, picker=picker)
+
+
+def plot_waveforms(xvec, panel_list, fig=1):
+ """Plot waveforms in vertical panels with shared X axis.
+
+ Parameters
+ ----------
+ xvec : :class:`numpy.ndarray`
+ the X data.
+ panel_list : list[list[(str, :class:`numpy.ndarray`)]]
+ list of lists of Y data. Each sub-list is one panel. Each element of the sub-list
+ is a tuple of signal name and signal data.
+ fig : int
+ the figure ID.
+ """
+ nrow = len(panel_list)
+
+ if nrow > 0:
+ myfig = plt.figure(fig, FigureClass=MarkerFigure) # type: MarkerFigure
+ ax0 = None
+ for idx, panel in enumerate(panel_list):
+ if ax0 is None:
+ ax = plt.subplot(nrow, 1, idx + 1)
+ ax0 = ax
+ else:
+ ax = plt.subplot(nrow, 1, idx + 1, sharex=ax0)
+
+ for name, sig in panel:
+ ax.plot(xvec, sig, label=name, picker=5.0)
+
+ box = ax.get_position()
+ ax.set_position([box.x0, box.y0, box.width * 0.9, box.height])
+
+ # Put a legend to the right of the current axis
+ ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
+
+ myfig.setup_callbacks()
+ plt.show(block=False)
+
+
+def _fpart(x):
+ return x - int(x)
+
+
+def _rfpart(x):
+ return 1 - _fpart(x)
+
+
+def draw_line(x0, y0, x1, y1, xmax, grid):
+ """Draws an anti-aliased line in img from p1 to p2 with the given color."""
+
+ if x0 > x1:
+ # x1 is wrapped around
+ x1 += xmax
+
+ dx, dy = x1 - x0, y1 - y0
+ steep = dx < abs(dy)
+ if steep:
+ x0, y0, x1, y1, dx, dy = y0, x0, y1, x1, dy, dx
+
+ gradient = dy * 1.0 / dx
+ # handle first endpoint
+ xpxl1 = int(x0 + 0.5)
+ yend = y0 + gradient * (xpxl1 - x0)
+ xgap = _rfpart(x0 + 0.5)
+ ypxl1 = int(yend)
+ if steep:
+ grid[ypxl1 % xmax, xpxl1] += _rfpart(yend) * xgap
+ grid[(ypxl1 + 1) % xmax, xpxl1] += _fpart(yend) * xgap
+ else:
+ grid[xpxl1 % xmax, ypxl1] += _rfpart(yend) * xgap
+ grid[xpxl1 % xmax, ypxl1 + 1] += _fpart(yend) * xgap
+
+ intery = yend + gradient # first y-intersection for the main loop
+
+ # do not color second endpoint to avoid double coloring.
+ xpxl2 = int(x1 + 0.5)
+ # main loop
+ if steep:
+ for x in range(xpxl1 + 1, xpxl2):
+ xval = int(intery)
+ grid[xval % xmax, x] += _rfpart(intery)
+ grid[(xval + 1) % xmax, x] += _fpart(intery)
+ intery += gradient
+ else:
+ for x in range(xpxl1 + 1, xpxl2):
+ xval = x % xmax
+ grid[xval, int(intery)] += _rfpart(intery)
+ grid[xval, int(intery) + 1] += _fpart(intery)
+ intery += gradient
+
+
+def plot_eye_heatmap(fig, tvec, yvec, tper, tstart=None, tend=None, toff=None,
+ tstep=None, vstep=None,
+ cmap=None, vmargin=0.05, interpolation='gaussian',
+ repeat=False):
+ """Plot eye diagram heat map.
+
+ Parameters
+ ----------
+ fig : int
+ the figure ID.
+ tvec : np.ndarray
+ the time data.
+ yvec : np.ndarray
+ waveform data.
+ tper : float
+ the eye period.
+ tstart : float
+ starting time. Defaults to first point.
+ tend : float
+ ending time. Defaults to last point.
+ toff : float
+ eye offset. Defaults to 0.
+ tstep : float or None
+ horizontal bin size. Defaults to using 200 bins.
+ vstep : float or None
+ vertical bin size. Defaults to using 200 bins.
+ cmap :
+ the colormap used for coloring the heat map. If None, defaults to cubehelix_r
+ vmargin : float
+ vertical margin in percentage of maximum/minimum waveform values. Defaults
+ to 5 percent. This is used so that there some room between top/bottom of
+ eye and the plot.
+ interpolation : str
+ interpolation method. Defaults to 'gaussian'. Use 'none' for no interpolation.
+ repeat : bool
+ True to repeat the eye diagram once to the right. This is useful if you
+ want to look at edge transistions.
+ """
+ if not toff:
+ toff = 0.0
+ if tstart is None:
+ tstart = tvec[0]
+ if tend is None:
+ tend = tvec[-1]
+
+ if tstep is None:
+ num_h = 200
+ else:
+ num_h = int(np.ceil(tper / tstep))
+
+ arr_idx = (tstart <= tvec) & (tvec < tend)
+ tplot = np.mod((tvec[arr_idx] - toff), tper) / tper * num_h # type: np.ndarray
+ yplot = yvec[arr_idx]
+
+ # get vertical range
+ ymin, ymax = np.amin(yplot), np.amax(yplot)
+ yrang = (ymax - ymin) * (1 + vmargin)
+ ymid = (ymin + ymax) / 2.0
+ ymin = ymid - yrang / 2.0
+ ymax = ymin + yrang
+
+ if vstep is None:
+ num_v = 200
+ else:
+ num_v = int(np.ceil(yrang / vstep))
+
+ # rescale Y axis
+ yplot = (yplot - ymin) / yrang * num_v
+
+ grid = np.zeros((num_h, num_v), dtype=float)
+ for idx in range(yplot.size - 1):
+ draw_line(tplot[idx], yplot[idx], tplot[idx + 1], yplot[idx + 1], num_h, grid)
+
+ if cmap is None:
+ from matplotlib import cm
+ # noinspection PyUnresolvedReferences
+ cmap = cm.cubehelix_r
+
+ plt.figure(fig)
+ grid = grid.T[::-1, :]
+ if repeat:
+ grid = np.tile(grid, (1, 2))
+ tper *= 2.0
+ plt.imshow(grid, extent=[0, tper, ymin, ymax], cmap=cmap,
+ interpolation=interpolation, aspect='auto')
+ cb = plt.colorbar()
+ cb.set_label('counts')
+ return grid
+
+
+def plot_eye(fig, tvec, yvec_list, tper, tstart=None, tend=None,
+ toff_list=None, name_list=None, alpha=1.0):
+ """Plot eye diagram.
+
+ Parameters
+ ----------
+ fig : int
+ the figure ID.
+ tvec : np.ndarray
+ the time data.
+ yvec_list : list[np.ndarray]
+ list of waveforms to plot in eye diagram.
+ tper : float
+ the period.
+ tstart : float
+ starting time. Defaults to first point.
+ tend : float
+ ending time. Defaults to last point.
+ toff_list : list[float]
+ offset to apply to each waveform. Defaults to zeros.
+ name_list : list[str] or None
+ the name of each waveform. Defaults to numbers.
+ alpha : float
+ the transparency of each trace. Can be used to mimic heatmap.
+ """
+ if not yvec_list:
+ return
+
+ if not name_list:
+ name_list = [str(num) for num in range(len(yvec_list))]
+ if not toff_list:
+ toff_list = [0.0] * len(yvec_list)
+ if tstart is None:
+ tstart = tvec[0]
+ if tend is None:
+ tend = tvec[-1]
+
+ # get new tstep that evenly divides tper and new x vector
+ tstep_given = (tvec[-1] - tvec[0]) / (tvec.size - 1)
+ num_samp = int(round(tper / tstep_given))
+ t_plot = np.linspace(0.0, tper, num_samp, endpoint=False)
+
+ # find tstart and tend in number of tper.
+ nstart = int(np.floor(tstart / tper))
+ nend = int(np.ceil(tend / tper))
+ ncycle = nend - nstart
+ teye = np.linspace(nstart * tper, nend * tper, num_samp * ncycle, endpoint=False) # type: np.ndarray
+ teye = teye.reshape((ncycle, num_samp))
+
+ myfig = plt.figure(fig, FigureClass=MarkerFigure) # type: MarkerFigure
+ ax = plt.subplot()
+ legend_lines = []
+ for idx, yvec in enumerate(yvec_list):
+ color = color_cycle[idx % len(color_cycle)]
+ toff = toff_list[idx]
+ # get eye traces
+ yfun = interp.interp1d(tvec - toff, yvec, kind='linear', copy=False, bounds_error=False,
+ fill_value=np.nan, assume_sorted=True)
+ plot_list = []
+ for cycle_idx in range(ncycle):
+ plot_list.append(t_plot)
+ plot_list.append(yfun(teye[cycle_idx, :]))
+
+ lines = ax.plot(*plot_list, alpha=alpha, color=color, picker=4.0, linewidth=2)
+ legend_lines.append(lines[0])
+
+ # Put a legend to the right of the current axis
+ box = ax.get_position()
+ ax.set_position([box.x0, box.y0, box.width * 0.9, box.height])
+ ax.legend(legend_lines, name_list, loc='center left', bbox_to_anchor=(1, 0.5))
+
+ myfig.setup_callbacks()
+ plt.show(block=False)
+
+
+def _find_closest_point(x, y, xvec, yvec, xnorm, ynorm):
+ """Find point on PWL waveform described by xvec, yvec closest to (x, y)"""
+ xnvec = xvec / xnorm
+ ynvec = yvec / ynorm
+ xn = x / xnorm
+ yn = y / ynorm
+
+ dx = np.diff(xnvec)
+ dy = np.diff(ynvec)
+ px = (xn - xnvec[:-1])
+ py = (yn - ynvec[:-1])
+
+ that = (px * dx + py * dy) / (dx ** 2 + dy ** 2)
+ t = np.minimum(np.maximum(that, 0), 1)
+
+ minx = xnvec[:-1] + t * dx
+ miny = ynvec[:-1] + t * dy
+
+ dist = (minx - xn) ** 2 + (miny - yn) ** 2
+ idx = np.argmin(dist)
+ return minx[idx] * xnorm, miny[idx] * ynorm
+
+
+class WaveformPlotter(object):
+ """A custom matplotlib interactive plotting class.
+
+ This class adds many useful features, such as ability to add/remove markers,
+ ability to toggle waveforms on and off, and so on.
+
+ Parameters
+ ----------
+ fig_idx : int
+ the figure index.
+ picker : float
+ picker event pixel tolerance.
+ normal_width : float
+ normal linewidth.
+ select_width : float
+ selected linewidth.
+ """
+
+ def __init__(self, fig_idx, picker=5.0, normal_width=1.5, select_width=3.0):
+ self.figure = plt.figure(fig_idx, FigureClass=MarkerFigure) # type: MarkerFigure
+ self.picker = picker
+ self.norm_lw = normal_width
+ self.top_lw = select_width
+ self.ax = self.figure.gca()
+ self.ax.set_prop_cycle('color', color_cycle)
+ self.leline_lookup = {}
+ self.letext_lookup = {}
+ self.last_top = None
+ self.legend = None
+ self.resized_legend = False
+
+ def plot(self, *args, **kwargs):
+ if self.figure is None:
+ raise ValueError('figure closed already')
+
+ if 'picker' not in kwargs:
+ kwargs['picker'] = self.picker
+ kwargs['linewidth'] = self.norm_lw
+ if 'lw' in kwargs:
+ del kwargs['lw']
+ return self.ax.plot(*args, **kwargs)
+
+ def setup(self):
+ if self.figure is None:
+ raise ValueError('figure closed already')
+
+ self.figure.tight_layout()
+ # Put a legend to the right of the current axis
+ ax_lines, ax_labels = self.ax.get_legend_handles_labels()
+ self.legend = self.ax.legend(ax_lines, ax_labels, loc='center left',
+ bbox_to_anchor=(1, 0.5), fancybox=True)
+ le_lines = self.legend.get_lines()
+ le_texts = self.legend.get_texts()
+
+ for leline, letext, axline in zip(le_lines, le_texts, ax_lines):
+ self.leline_lookup[leline] = (letext, axline)
+ self.letext_lookup[letext] = (leline, axline)
+ leline.set_picker(self.picker)
+ letext.set_picker(self.picker)
+ letext.set_alpha(0.5)
+
+ le_texts[-1].set_alpha(1.0)
+ ax_lines[-1].set_zorder(2)
+ ax_lines[-1].set_linewidth(self.top_lw)
+ self.last_top = (le_texts[-1], ax_lines[-1])
+
+ self.figure.register_pick_event(self.leline_lookup, self.legend_line_picked)
+ self.figure.register_pick_event(self.letext_lookup, self.legend_text_picked)
+ self.figure.setup_callbacks()
+ self.figure.canvas.mpl_connect('draw_event', self.fix_legend_location)
+ self.figure.canvas.mpl_connect('close_event', self.figure_closed)
+ self.figure.canvas.mpl_connect('resize_event', self.figure_resized)
+
+ # noinspection PyUnusedLocal
+ def figure_closed(self, event):
+ self.figure.close_figure()
+ self.figure = None
+ self.ax = None
+ self.leline_lookup = None
+ self.letext_lookup = None
+ self.last_top = None
+ self.legend = None
+
+ # noinspection PyUnusedLocal
+ def figure_resized(self, event):
+ self.resized_legend = False
+ self.fix_legend_location(None)
+
+ # noinspection PyUnusedLocal
+ def fix_legend_location(self, event):
+ if not self.resized_legend:
+ self.figure.tight_layout()
+ inv_tran = self.figure.transFigure.inverted()
+ leg_box = inv_tran.transform(self.legend.get_window_extent())
+ leg_width = leg_box[1][0] - leg_box[0][0]
+ box = self.ax.get_position()
+ # print box.x0, box.y0, box.width, box.height, leg_width, leg_frame.get_height()
+ self.ax.set_position([box.x0, box.y0, box.width - leg_width, box.height])
+ self.resized_legend = True
+ self.figure.canvas.draw()
+
+ def legend_line_picked(self, artist):
+ letext, axline = self.leline_lookup[artist]
+ visible = not axline.get_visible()
+ if visible:
+ artist.set_alpha(1.0)
+ else:
+ artist.set_alpha(0.2)
+ if visible and (self.last_top[1] is not axline):
+ # set to be top line
+ self.legend_text_picked(letext, draw=False)
+ self.figure.set_line_visibility(axline, visible)
+
+ def legend_text_picked(self, artist, draw=True):
+ leline, axline = self.letext_lookup[artist]
+ self.last_top[0].set_alpha(0.5)
+ self.last_top[1].set_zorder(1)
+ self.last_top[1].set_linewidth(self.norm_lw)
+ axline.set_zorder(2)
+ artist.set_alpha(1.0)
+ axline.set_linewidth(self.top_lw)
+ self.last_top = (artist, axline)
+
+ # if draw is False, this method is not called from
+ # legend_line_picked(), so we'll never have recursion issues.
+ if draw:
+ if not axline.get_visible():
+ # set line to be visible if not
+ # draw() will be called in legend_line_picked
+ self.legend_line_picked(leline)
+ else:
+ self.figure.canvas.draw()
+
+
+# noinspection PyAbstractClass
+class MarkerFigure(Figure):
+ def __init__(self, **kwargs):
+ Figure.__init__(self, **kwargs)
+ self.markers = []
+ self.epsilon = 10.0
+ self.drag_idx = -1
+ self.timer = None
+ self.marker_line_info = None
+ self.pick_sets = []
+ self.pick_funs = []
+
+ def set_line_visibility(self, axline, visible):
+ axline.set_visible(visible)
+ if not visible:
+ # delete all markers on this line
+ del_idx_list = [idx for idx, item in enumerate(self.markers) if item[2] is axline]
+ for targ_idx in reversed(del_idx_list):
+ an, pt, _, _ = self.markers[targ_idx]
+ del self.markers[targ_idx]
+ # print targ_idx, an
+ an.set_visible(False)
+ pt.set_visible(False)
+
+ self.canvas.draw()
+
+ def register_pick_event(self, artist_set, fun):
+ self.pick_sets.append(artist_set)
+ self.pick_funs.append(fun)
+
+ def on_button_release(self, event):
+ """Disable data cursor dragging. """
+ if event.button == 1:
+ self.drag_idx = -1
+
+ def on_motion(self, event):
+ """Move data cursor around. """
+ ax = event.inaxes
+ if self.drag_idx >= 0 and ax is not None and event.button == 1:
+ xmin, xmax = ax.get_xlim()
+ ymin, ymax = ax.get_ylim()
+ anno, pt, line, bg = self.markers[self.drag_idx]
+ x, y = _find_closest_point(event.xdata, event.ydata,
+ line.get_xdata(), line.get_ydata(),
+ xmax - xmin, ymax - ymin)
+ pt.set_data([x], [y])
+ xstr, ystr = float_to_si_string(x, 4), float_to_si_string(y, 4)
+ anno.set_text('x: %s\ny: %s' % (xstr, ystr))
+ anno.xy = (x, y)
+ self.canvas.restore_region(bg)
+ anno.set_visible(True)
+ pt.set_visible(True)
+ ax.draw_artist(anno)
+ ax.draw_artist(pt)
+ self.canvas.blit(ax.bbox)
+
+ def _get_idx_under_point(self, event):
+ """Find selected data cursor."""
+ mx = event.x
+ my = event.y
+ mind = None
+ minidx = None
+ # find closest marker point
+ for idx, (an, pt, _, _) in enumerate(self.markers):
+ xv, yv = pt.get_xdata()[0], pt.get_ydata()[0]
+ xp, yp = event.inaxes.transData.transform([xv, yv])
+ # print xv, yv, xp, yp, mx, my
+ d = ((mx - xp) ** 2 + (my - yp) ** 2) ** 0.5
+ if mind is None or d < mind:
+ mind = d
+ minidx = idx
+
+ if mind is not None and mind < self.epsilon:
+ return minidx
+ return -1
+
+ def on_pick(self, event):
+ artist = event.artist
+ if not artist.get_visible():
+ return
+ for idx, artist_set in enumerate(self.pick_sets):
+ if artist in artist_set:
+ self.pick_funs[idx](artist)
+ return
+
+ if isinstance(artist, Line2D):
+ mevent = event.mouseevent
+ # figure out if we picked marker or line
+ self.drag_idx = self._get_idx_under_point(mevent)
+
+ if self.drag_idx >= 0:
+ # picked marker.
+ ax = mevent.inaxes
+ an, pt, _, _ = self.markers[self.drag_idx]
+ an.set_visible(False)
+ pt.set_visible(False)
+ self.canvas.draw()
+ self.markers[self.drag_idx][-1] = self.canvas.copy_from_bbox(ax.bbox)
+ an.set_visible(True)
+ pt.set_visible(True)
+ ax.draw_artist(an)
+ ax.draw_artist(pt)
+ self.canvas.blit(ax.bbox)
+
+ else:
+ # save data to plot marker later
+ mxval = mevent.xdata
+ button = mevent.button
+ if mxval is not None and button == 1 and not self.marker_line_info:
+ self.marker_line_info = (artist, mxval, mevent.ydata,
+ button, mevent.inaxes)
+ elif isinstance(artist, Annotation):
+ # delete marker.
+ mevent = event.mouseevent
+ if mevent.button == 3:
+ targ_idx = None
+ for idx, (an, pt, _, _) in enumerate(self.markers):
+ if an is artist:
+ targ_idx = idx
+ break
+ if targ_idx is not None:
+ an, pt, _, _ = self.markers[targ_idx]
+ del self.markers[targ_idx]
+ an.set_visible(False)
+ pt.set_visible(False)
+ self.canvas.draw()
+
+ def _create_marker(self):
+ if self.marker_line_info:
+ artist, mxval, myval, button, ax = self.marker_line_info
+ xmin, xmax = ax.get_xlim()
+ ymin, ymax = ax.get_ylim()
+ mxval, myval = _find_closest_point(mxval, myval,
+ artist.get_xdata(), artist.get_ydata(),
+ xmax - xmin, ymax - ymin)
+ pt = ax.plot(mxval, myval, 'ko', picker=5.0)[0]
+ xstr, ystr = float_to_si_string(mxval, 4), float_to_si_string(myval, 4)
+ msg = 'x: %s\ny: %s' % (xstr, ystr)
+ anno = ax.annotate(msg, xy=(mxval, myval), bbox=dict(boxstyle='round', fc='yellow', alpha=0.3),
+ arrowprops=dict(arrowstyle="->"))
+ anno.draggable()
+ anno.set_picker(True)
+
+ self.markers.append([anno, pt, artist, None])
+ ax.draw_artist(anno)
+ ax.draw_artist(pt)
+ self.canvas.blit(ax.bbox)
+ self.marker_line_info = None
+
+ def close_figure(self):
+ self.timer.stop()
+
+ def setup_callbacks(self):
+ self.canvas.mpl_connect('pick_event', self.on_pick)
+ self.canvas.mpl_connect('motion_notify_event', self.on_motion)
+ self.canvas.mpl_connect('button_release_event', self.on_button_release)
+ # use timer to make sure we won't create multiple markers at once when
+ # clicked on overlapping lines.
+ self.timer = self.canvas.new_timer(interval=100)
+ self.timer.add_callback(self._create_marker)
+ self.timer.start()
diff --git a/src/bag/design/__init__.py b/src/bag/design/__init__.py
new file mode 100644
index 0000000..6ce54af
--- /dev/null
+++ b/src/bag/design/__init__.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package defines design template classes.
+"""
diff --git a/src/bag/design/database.py b/src/bag/design/database.py
new file mode 100644
index 0000000..282dee9
--- /dev/null
+++ b/src/bag/design/database.py
@@ -0,0 +1,190 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines the design database class.
+"""
+
+from __future__ import annotations
+from typing import TYPE_CHECKING, TypeVar, Mapping, Optional, Any, Sequence, Type, Tuple
+
+import importlib
+from pathlib import Path
+
+from jinja2 import Template
+
+from pybag.enum import DesignOutput
+
+from ..util.cache import MasterDB, Param
+from ..io.template import new_template_env_fs
+
+from .module import Module
+
+if TYPE_CHECKING:
+ from ..core import BagProject
+ from ..layout.tech import TechInfo
+
+ModuleType = TypeVar('ModuleType', bound=Module)
+
+
+class ModuleDB(MasterDB):
+ """A database of all modules.
+
+ This class is a subclass of MasterDB that defines some extra properties/function
+ aliases to make creating schematics easier.
+
+ Parameters
+ ----------
+ tech_info : TechInfo
+ the TechInfo instance.
+ lib_name : str
+ the cadence library to put all generated templates in.
+ prj : Optional[BagProject]
+ the BagProject instance.
+ name_prefix : str
+ generated schematic name prefix.
+ name_suffix : str
+ generated schematic name suffix.
+ """
+
+ def __init__(self, tech_info: TechInfo, lib_name: str, prj: Optional[BagProject] = None,
+ name_prefix: str = '', name_suffix: str = '') -> None:
+ MasterDB.__init__(self, lib_name, prj=prj, name_prefix=name_prefix, name_suffix=name_suffix)
+
+ self._tech_info = tech_info
+ self._temp_env = new_template_env_fs()
+
+ @classmethod
+ def get_schematic_class(cls, lib_name: str, cell_name: str) -> Type[Module]:
+ """Get the Python class object for the given schematic.
+
+ Parameters
+ ----------
+ lib_name : str
+ schematic library name.
+ cell_name : str
+ schematic cell name.
+
+ Returns
+ -------
+ sch_cls : Type[Module]
+ the schematic class.
+ """
+ module_name = lib_name + '.schematic.' + cell_name
+ try:
+ sch_module = importlib.import_module(module_name)
+ except ImportError:
+ raise ImportError('Cannot find Python module {} for schematic generator {}__{}. '
+ 'Is it on your PYTHONPATH?'.format(module_name, lib_name, cell_name))
+ cls_name = lib_name + '__' + cell_name
+ if not hasattr(sch_module, cls_name):
+ raise ImportError('Cannot find schematic generator class {} '
+ 'in module {}'.format(cls_name, module_name))
+ return getattr(sch_module, cls_name)
+
+ @property
+ def tech_info(self) -> TechInfo:
+ """the :class:`~bag.layout.core.TechInfo` instance."""
+ return self._tech_info
+
+ def get_model_netlist_template(self, fpath: Path) -> Template:
+ return self._temp_env.get_template(str(fpath))
+
+ def instantiate_schematic(self, design: Module, top_cell_name: str = '',
+ output: DesignOutput = DesignOutput.SCHEMATIC,
+ **kwargs: Any) -> None:
+ """Alias for instantiate_master(), with default output type of SCHEMATIC.
+ """
+ self.instantiate_master(output, design, top_cell_name, **kwargs)
+
+ def batch_schematic(self, info_list: Sequence[Tuple[Module, str]],
+ output: DesignOutput = DesignOutput.SCHEMATIC,
+ **kwargs: Any) -> None:
+ """Alias for batch_output(), with default output type of SCHEMATIC.
+ """
+ self.batch_output(output, info_list, **kwargs)
+
+ def new_model(self, master: Module, model_params: Param, **kwargs: Any) -> Module:
+ """Create a new schematic master instance with behavioral model information
+
+ Parameters
+ ----------
+ master : Module
+ the schematic master instance.
+ model_params : Param
+ model parameters.
+ **kwargs : Any
+ optional arguments
+
+ Returns
+ -------
+ master : Module
+ the new master instance.
+ """
+ debug = kwargs.get('debug', False)
+
+ new_params = master.params.copy(append=dict(model_params=model_params))
+ key = master.compute_unique_key(new_params)
+ test = self.find_master(key)
+ if test is not None:
+ if debug:
+ print('model master cached')
+ return test
+
+ if debug:
+ print('generating model master')
+ new_master = master.get_copy_with(new_params)
+ new_master.design_model(key)
+ self.register_master(key, new_master)
+ return new_master
+
+ def instantiate_model(self, design: Module, model_params: Param, top_cell_name: str = '',
+ **kwargs: Any) -> None:
+ self.batch_model([(design, top_cell_name, model_params)], **kwargs)
+
+ def batch_model(self, info_list: Sequence[Tuple[Module, str, Mapping[str, Any]]],
+ output: DesignOutput = DesignOutput.SYSVERILOG,
+ **kwargs: Any) -> Sequence[Tuple[Module, str]]:
+ new_info_list = [(self.new_model(m, Param(m_params)), name)
+ for m, name, m_params in info_list]
+ self.batch_output(output, new_info_list, **kwargs)
+ return new_info_list
diff --git a/src/bag/design/designer.py b/src/bag/design/designer.py
new file mode 100644
index 0000000..6b68eb1
--- /dev/null
+++ b/src/bag/design/designer.py
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from typing import Any, Optional, cast, Type, Mapping, Dict, Union, List
+
+import abc
+import pkg_resources
+from pathlib import Path
+
+from bag.core import BagProject
+from bag.io.file import read_yaml
+from bag.design.database import ModuleDB
+from bag.layout.template import TemplateDB
+
+
+class DesignerBase(abc.ABC):
+
+ def __init__(self, bprj: BagProject, spec_file: str = '',
+ spec_dict: Optional[Mapping[str, Any]] = None,
+ sch_db: Optional[ModuleDB] = None, lay_db: Optional[TemplateDB] = None,
+ extract: bool = False) -> None:
+ if spec_dict:
+ params = spec_dict
+ else:
+ params = read_yaml(spec_file)
+
+ self.params = cast(Dict[str, Any], params)
+
+ self._root_dir = Path(self.params['root_dir']).resolve()
+
+ self._prj = bprj
+
+ if sch_db is None:
+ self._sch_db = ModuleDB(bprj.tech_info, self.params['impl_lib'], prj=bprj)
+ else:
+ self._sch_db = sch_db
+
+ if lay_db is None:
+ self._lay_db = TemplateDB(bprj.grid, self.params['impl_lib'], prj=bprj)
+ else:
+ self._lay_db = lay_db
+
+ self.extract = extract
+ self.data = {} # a dictionary to access package resources
+ self.designed_params = {} # the parameters after design has been done
+ self.designed_performance = {} # the performance metrics that designed params satisfy
+
+ @classmethod
+ def get_schematic_class(cls):
+ return None
+
+ @classmethod
+ def get_layout_class(cls):
+ return None
+
+ @property
+ def sch_db(self):
+ return self._sch_db
+
+ @property
+ def lay_db(self):
+ return self._lay_db
+
+ def new_designer(self, cls: Type[DesignerBase], params: Mapping[str, Any],
+ extract: bool):
+ return cls(self._prj, spec_dict=params, sch_db=self._sch_db, lay_db=self._lay_db,
+ extract=extract)
+
+ def register_resources(self, resource_names: Union[str, List[str]]) -> None:
+
+ if isinstance(resource_names, str):
+ resource_names = [resource_names]
+ for name in resource_names:
+ module_name = self.__module__.split('.')[-1]
+ fpath = str(Path('data', module_name, f'{name}.yaml'))
+ yaml_file = pkg_resources.resource_filename(self.__module__, fpath)
+ self.data[name] = yaml_file
+
+ @abc.abstractmethod
+ def design(self, *args, **kwargs) -> None:
+ pass
diff --git a/src/bag/design/instance.py b/src/bag/design/instance.py
new file mode 100644
index 0000000..9bc3d42
--- /dev/null
+++ b/src/bag/design/instance.py
@@ -0,0 +1,273 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines classes representing various design instances.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Type, Optional, Any
+
+from ..util.cache import Param
+
+from pybag.core import PySchInstRef
+
+if TYPE_CHECKING:
+ from .database import ModuleDB
+ from .module import Module
+
+
+class SchInstance:
+ """This class represents an instance inside a schematic.
+
+ Parameters
+ ----------
+ db : ModuleDB
+ the design database.
+ inst_ptr : PySchInstRef
+ a reference to the actual schematic instance object.
+ """
+
+ def __init__(self, db: ModuleDB, inst_ptr: PySchInstRef,
+ master: Optional[Module] = None) -> None:
+ self._db: ModuleDB = db
+ self._master: Optional[Module] = master
+ self._ptr: PySchInstRef = inst_ptr
+
+ # get schematic class object from master
+ if master is None:
+ lib_name = self._ptr.lib_name
+ static = self._ptr.is_primitive and lib_name != 'BAG_prim'
+ if static:
+ sch_cls = None
+ else:
+ cell_name = self._ptr.cell_name
+ sch_cls = db.get_schematic_class(lib_name, cell_name)
+ else:
+ sch_cls = master.__class__
+
+ self._sch_cls: Optional[Type[Module]] = sch_cls
+
+ @property
+ def database(self) -> ModuleDB:
+ """ModuleDB: the schematic database."""
+ return self._db
+
+ @property
+ def master(self) -> Optional[Module]:
+ """Optional[Module]: the master object of this instance."""
+ return self._master
+
+ @property
+ def master_class(self) -> Optional[Type[Module]]:
+ """Optional[Type[Module]]: the class object of the master of this instance."""
+ return self._sch_cls
+
+ @property
+ def lib_name(self) -> str:
+ """str: the generator library name."""
+ return self._ptr.lib_name
+
+ @property
+ def cell_name(self) -> str:
+ """str: the generator cell name."""
+ return self._ptr.cell_name
+
+ @property
+ def master_cell_name(self) -> str:
+ """str: the cell name of the master object"""
+ return self.cell_name if self.master is None else self.master.cell_name
+
+ @property
+ def static(self) -> bool:
+ """bool: True if this instance points to a static/fixed schematic."""
+ return self._sch_cls is None
+
+ @property
+ def width(self) -> int:
+ """int: the instance symbol width."""
+ return self._ptr.width
+
+ @property
+ def height(self) -> int:
+ """int: the instance symbol height."""
+ return self._ptr.height
+
+ @property
+ def is_valid(self) -> bool:
+ """bool: True if this instance is valid (i.e. static or has a master."""
+ return self._sch_cls is None or self.master is not None
+
+ @property
+ def is_primitive(self) -> bool:
+ """bool: True if this is a primitive (static or in BAG_prim) schematic instance."""
+ return self._sch_cls is None or self.master.is_primitive()
+
+ @property
+ def should_delete(self) -> bool:
+ """bool: True if this instance should be deleted by the parent."""
+ return self.master is not None and self.master.should_delete_instance()
+
+ @property
+ def master_key(self) -> Optional[Any]:
+ """Optional[Any]: A unique key identifying the master object."""
+ if self.master is None:
+ raise ValueError('Instance {} has no master; cannot get key')
+ return self.master.key
+
+ def design(self, **kwargs: Any) -> None:
+ """Call the design method on master."""
+ if self._sch_cls is None:
+ raise RuntimeError('Cannot call design() method on static instances.')
+
+ self._master = self._db.new_master(self._sch_cls, params=kwargs)
+ if self._master.is_primitive():
+ # update parameters
+ for key, val in self._master.get_schematic_parameters().items():
+ self.set_param(key, val)
+ else:
+ self._ptr.lib_name = self._master.lib_name
+ self._ptr.cell_name = self._master.cell_name
+
+ def design_model(self, model_params: Param) -> None:
+ """Call design_model method on master."""
+ if self._sch_cls is None:
+ # static instance; assume model is defined in include files
+ return
+
+ self._master = self._db.new_model(self._master, model_params)
+ self._ptr.cell_name = self._master.cell_name
+
+ def change_generator(self, gen_lib_name: str, gen_cell_name: str,
+ static: bool = False, keep_connections: bool = False) -> None:
+ """Change the circuit generator responsible for producing this instance.
+
+ Parameter
+ ---------
+ gen_lib_name : str
+ new generator library name.
+ gen_cell_name : str
+ new generator cell name.
+ static : bool
+ True if this is actually a fixed schematic, not a generator.
+ keep_connections : bool
+ True to keep the old connections when the instance master changed.
+ """
+ self._master = None
+ if static:
+ self._sch_cls = None
+ prim = True
+ else:
+ self._sch_cls = self._db.get_schematic_class(gen_lib_name, gen_cell_name)
+ prim = self._sch_cls.is_primitive()
+ self._ptr.update_master(gen_lib_name, gen_cell_name, prim=prim,
+ keep_connections=keep_connections)
+
+ def set_param(self, key: str, val: Any) -> None:
+ """Sets the parameters of this instance.
+
+ Parameters
+ ----------
+ key : str
+ the parameter name.
+ val : Any
+ the parameter value.
+ """
+ self._ptr.set_param(key, val)
+
+ def update_connection(self, inst_name: str, term_name: str, net_name: str) -> None:
+ """Update connections of this schematic instance.
+
+ Parameters
+ ----------
+ inst_name : str
+ The instance name.
+ term_name : str
+ The terminal (in other words, port) of the instance.
+ net_name : str
+ The net to connect the terminal to.
+ """
+ self._ptr.update_connection(inst_name, term_name, net_name)
+
+ def check_connections(self):
+ """Check that the connections of this instance is valid.
+
+ This method is called by the finalize() method, and checks that the user
+ connected every port of this instance.
+ """
+ if self._master is not None:
+ self._ptr.check_connections(self._master.pins.keys())
+
+ def get_connection(self, term_name: str) -> str:
+ """Get the net name connected to the given terminal.
+
+ Parameters
+ ----------
+ term_name : str
+ the terminal name.
+
+ Returns
+ -------
+ net_name : str
+ the resulting net name. Empty string if given terminal is not found.
+ """
+ return self._ptr.get_connection(term_name)
+
+ def get_master_lib_name(self, impl_lib: str) -> str:
+ """Returns the master library name.
+
+ the master library could be different than the implementation library in
+ the case of static schematic.
+
+ Parameters
+ ----------
+ impl_lib : str
+ implementation library name.
+
+ Returns
+ -------
+ master_lib : str
+ the master library name.
+
+ """
+ return self.lib_name if self.is_primitive else impl_lib
diff --git a/src/bag/design/module.py b/src/bag/design/module.py
new file mode 100644
index 0000000..b02659c
--- /dev/null
+++ b/src/bag/design/module.py
@@ -0,0 +1,1174 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines base design module class and primitive design classes.
+"""
+
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING, List, Dict, Optional, Tuple, Any, Union, Iterable, Set, Mapping, Sequence,
+ ItemsView
+)
+
+import abc
+from pathlib import Path
+from itertools import zip_longest
+
+from pybag.core import PySchCellView, get_cv_header
+from pybag.enum import TermType, SigType, DesignOutput, SupplyWrapMode
+
+from ..math import float_to_si_string
+from ..util.cache import DesignMaster, Param, format_cell_name
+from .instance import SchInstance
+from ..layout.tech import TechInfo
+
+if TYPE_CHECKING:
+ from .database import ModuleDB
+
+
+class Module(DesignMaster):
+ """The base class of all schematic generators. This represents a schematic master.
+
+ This class defines all the methods needed to implement a design in the CAD database.
+
+ Parameters
+ ----------
+ yaml_fname : str
+ the netlist information file name.
+ database : ModuleDB
+ the design database object.
+ params : Param
+ the parameters dictionary.
+ copy_state : Optional[Dict[str, Any]]
+ If not None, set content of this master from this dictionary.
+ **kwargs : Any
+ optional arguments
+ """
+
+ def __init__(self, yaml_fname: str, database: ModuleDB, params: Param, *,
+ copy_state: Optional[Dict[str, Any]] = None, **kwargs: Any) -> None:
+ self._cv: Optional[PySchCellView] = None
+ if copy_state:
+ self._netlist_dir: Optional[Path] = copy_state['netlist_dir']
+ self._cv = copy_state['cv']
+ self._pins: Dict[str, TermType] = copy_state['pins']
+ self._orig_lib_name = copy_state['orig_lib_name']
+ self._orig_cell_name = copy_state['orig_cell_name']
+ self.instances: Dict[str, SchInstance] = copy_state['instances']
+ else:
+ self._pins: Dict[str, TermType] = {}
+ if yaml_fname:
+ # normal schematic
+ yaml_path = Path(yaml_fname).resolve()
+ self._netlist_dir: Optional[Path] = yaml_path.parent
+ self._cv = PySchCellView(str(yaml_path), 'symbol')
+ self._orig_lib_name = self._cv.lib_name
+ self._orig_cell_name = self._cv.cell_name
+ self.instances: Dict[str, SchInstance] = {name: SchInstance(database, ref)
+ for name, ref in self._cv.inst_refs()}
+ if not self.is_primitive():
+ self._cv.lib_name = database.lib_name
+ else:
+ # empty yaml file name, this is a BAG primitive
+ self._netlist_dir: Optional[Path] = None
+ self._orig_lib_name, self._orig_cell_name = self.__class__.__name__.split('__')
+ self.instances: Dict[str, SchInstance] = {}
+
+ # initialize schematic master
+ DesignMaster.__init__(self, database, params, copy_state=copy_state, **kwargs)
+
+ @classmethod
+ def get_hidden_params(cls) -> Dict[str, Any]:
+ ans = DesignMaster.get_hidden_params()
+ ans['model_params'] = None
+ return ans
+
+ @classmethod
+ def is_primitive(cls) -> bool:
+ """Returns True if this Module represents a BAG primitive.
+
+ NOTE: This method is only used by BAG and schematic primitives. This method prevents
+ the module from being copied during design implementation. Custom subclasses should
+ not override this method.
+
+ Returns
+ -------
+ is_primitive : bool
+ True if this Module represents a BAG primitive.
+ """
+ return False
+
+ @classmethod
+ def is_leaf_model(cls) -> bool:
+ """Returns True if this class is always the leaf model cell."""
+ return False
+
+ @property
+ def sch_db(self) -> ModuleDB:
+ # noinspection PyTypeChecker
+ return self.master_db
+
+ def get_master_basename(self) -> str:
+ return self.orig_cell_name
+
+ def get_copy_state_with(self, new_params: Param) -> Dict[str, Any]:
+ base = DesignMaster.get_copy_state_with(self, new_params)
+ new_cv = self._cv.get_copy()
+ new_inst = {name: SchInstance(self.sch_db, ref, master=self.instances[name].master)
+ for name, ref in new_cv.inst_refs()}
+
+ base['netlist_dir'] = self._netlist_dir
+ base['cv'] = new_cv
+ base['pins'] = self._pins.copy()
+ base['orig_lib_name'] = self._orig_lib_name
+ base['orig_cell_name'] = self._orig_cell_name
+ base['instances'] = new_inst
+ return base
+
+ @property
+ def tech_info(self) -> TechInfo:
+ return self.master_db.tech_info
+
+ @property
+ def sch_scale(self) -> float:
+ tech_info = self.master_db.tech_info
+ return tech_info.resolution * tech_info.layout_unit
+
+ @property
+ def pins(self) -> Dict[str, TermType]:
+ return self._pins
+
+ @abc.abstractmethod
+ def design(self, **kwargs: Any) -> None:
+ """To be overridden by subclasses to design this module.
+
+ To design instances of this module, you can
+ call their :meth:`.design` method or any other ways you coded.
+
+ To modify schematic structure, call:
+
+ :meth:`.rename_pin`
+
+ :meth:`.delete_instance`
+
+ :meth:`.replace_instance_master`
+
+ :meth:`.reconnect_instance_terminal`
+
+ :meth:`.array_instance`
+ """
+ pass
+
+ def design_model(self, key: Any) -> None:
+ self.update_signature(key)
+ self._cv.cell_name = self.cell_name
+ model_params = self.params['model_params']
+ if 'view_name' not in model_params:
+ # this is a hierarchical model
+ if not self.instances:
+ # found a leaf cell with no behavioral model
+ raise ValueError('Schematic master has no instances and no behavioral model.')
+
+ self.clear_children_key()
+
+ master_db = self.master_db
+ for name, inst in self.instances.items():
+ if master_db.exclude_model(inst.lib_name, inst.cell_name):
+ continue
+ cur_params: Optional[Param] = model_params.get(name, None)
+ if cur_params is None:
+ raise ValueError('Cannot find model parameters for instance {}'.format(name))
+ inst.design_model(cur_params)
+ if not inst.is_primitive:
+ self.add_child_key(inst.master_key)
+
+ def set_param(self, key: str, val: Union[int, float, bool, str]) -> None:
+ """Set schematic parameters for this master.
+
+ This method is only used to set parameters for BAG primitives.
+
+ Parameters
+ ----------
+ key : str
+ parameter name.
+ val : Union[int, float, bool, str]
+ parameter value.
+ """
+ self._cv.set_param(key, val)
+
+ def finalize(self) -> None:
+ """Finalize this master instance.
+ """
+ # invoke design function, excluding model_params
+ args = dict((k, v) for k, v in self.params.items() if k != 'model_params')
+ self.design(**args)
+
+ # get set of children master keys
+ for name, inst in self.instances.items():
+ if not inst.is_valid:
+ raise ValueError(f'Schematic instance {name} is not valid. '
+ 'Did you forget to call design()?')
+
+ if not inst.is_primitive:
+ # NOTE: only non-primitive instance can have ports change
+ try:
+ inst.check_connections()
+ except RuntimeError as err:
+ raise RuntimeError(f'Error checking connection of instance {name}') from err
+
+ self.add_child_key(inst.master_key)
+
+ if self._cv is not None:
+ # get pins
+ self._pins = {k: TermType(v) for k, v in self._cv.terminals()}
+ # update cell name
+ self._cv.cell_name = self.cell_name
+
+ # call super finalize routine
+ DesignMaster.finalize(self)
+
+ def get_content(self, output_type: DesignOutput, rename_dict: Dict[str, str], name_prefix: str,
+ name_suffix: str, shell: bool, exact_cell_names: Set[str],
+ supply_wrap_mode: SupplyWrapMode) -> Tuple[str, Any]:
+ if not self.finalized:
+ raise ValueError('This module is not finalized yet')
+
+ cell_name = format_cell_name(self.cell_name, rename_dict, name_prefix, name_suffix,
+ exact_cell_names, supply_wrap_mode)
+
+ if self.is_primitive():
+ return cell_name, (None, '')
+
+ netlist = ''
+ if not shell and output_type.is_model:
+ # NOTE: only get model netlist if we're doing real netlisting (versus shell netlisting)
+ model_params: Optional[Param] = self.params['model_params']
+ if model_params is None:
+ # model parameters is unset. This happens if a behavioral model view is used
+ # at a top level block, and this cell gets shadows out.
+ # If this is the case, just return None so this cellview won't be netlisted.
+ return cell_name, (None, '')
+ view_name: Optional[str] = model_params.get('view_name', None)
+ if view_name is not None:
+ fpath = self.get_model_path(output_type, view_name)
+ template = self.sch_db.get_model_netlist_template(fpath)
+ netlist = template.render(_header=get_cv_header(self._cv, cell_name, output_type),
+ _sch_params=self.params, _pins=self.pins,
+ _cell_name=cell_name, **model_params)
+
+ return cell_name, (self._cv, netlist)
+
+ @property
+ def cell_name(self) -> str:
+ """The master cell name."""
+ if self.is_primitive():
+ return self.get_cell_name_from_parameters()
+ return super(Module, self).cell_name
+
+ @property
+ def orig_lib_name(self) -> str:
+ """The original schematic template library name."""
+ return self._orig_lib_name
+
+ @property
+ def orig_cell_name(self) -> str:
+ """The original schematic template cell name."""
+ return self._orig_cell_name
+
+ def get_model_path(self, output_type: DesignOutput, view_name: str = '') -> Path:
+ """Returns the model file path."""
+ if view_name:
+ basename = f'{self.orig_cell_name}.{view_name}'
+ else:
+ basename = self.orig_cell_name
+
+ file_name = f'{basename}.{output_type.extension}'
+ path: Path = self._netlist_dir.parent / 'models' / file_name
+ if not path.is_file():
+ fallback_type = output_type.fallback_model_type
+ if fallback_type is not output_type:
+ # if there is a fallback model type defined, try to return that model file
+ # instead.
+ test_path = path.with_name(f'{basename}.{fallback_type.extension}')
+ if test_path.is_file():
+ return test_path
+
+ return path
+
+ def should_delete_instance(self) -> bool:
+ """Returns True if this instance should be deleted based on its parameters.
+
+ This method is mainly used to delete 0 finger or 0 width transistors. However,
+ You can override this method if there exists parameter settings which corresponds
+ to an empty schematic.
+
+ Returns
+ -------
+ delete : bool
+ True if parent should delete this instance.
+ """
+ return False
+
+ def get_schematic_parameters(self) -> Dict[str, str]:
+ """Returns the schematic parameter dictionary of this instance.
+
+ NOTE: This method is only used by BAG primitives, as they are
+ implemented with parameterized cells in the CAD database. Custom
+ subclasses should not override this method.
+
+ Returns
+ -------
+ params : Dict[str, str]
+ the schematic parameter dictionary.
+ """
+ return {}
+
+ def get_cell_name_from_parameters(self) -> str:
+ """Returns new cell name based on parameters.
+
+ NOTE: This method is only used by BAG primitives. This method
+ enables a BAG primitive to change the cell master based on
+ design parameters (e.g. change transistor instance based on the
+ intent parameter). Custom subclasses should not override this
+ method.
+
+ Returns
+ -------
+ cell : str
+ the cell name based on parameters.
+ """
+ return self.orig_cell_name
+
+ def rename_pin(self, old_pin: str, new_pin: str) -> None:
+ """Renames an input/output pin of this schematic.
+
+ NOTE: Make sure to call :meth:`.reconnect_instance_terminal` so that instances are
+ connected to the new pin.
+
+ Parameters
+ ----------
+ old_pin : str
+ the old pin name.
+ new_pin : str
+ the new pin name.
+ """
+ self._cv.rename_pin(old_pin, new_pin)
+
+ def add_pin(self, new_pin: str, pin_type: Union[TermType, str],
+ sig_type: SigType = SigType.signal) -> None:
+ """Adds a new pin to this schematic.
+
+ NOTE: Make sure to call :meth:`.reconnect_instance_terminal` so that instances are
+ connected to the new pin.
+
+ Parameters
+ ----------
+ new_pin : str
+ the new pin name.
+ pin_type : Union[TermType, str]
+ the new pin type.
+ sig_type : SigType
+ the signal type of the pin.
+ """
+ if isinstance(pin_type, str):
+ pin_type = TermType[pin_type]
+
+ self._cv.add_pin(new_pin, pin_type.value, sig_type.value)
+
+ def get_signal_type(self, pin_name: str) -> SigType:
+ if not self.finalized:
+ raise ValueError('This method only works on finalized master.')
+
+ return self._cv.get_signal_type(pin_name)
+
+ def remove_pin(self, remove_pin: str) -> bool:
+ """Removes a pin from this schematic.
+
+ Parameters
+ ----------
+ remove_pin : str
+ the pin to remove.
+
+ Returns
+ -------
+ success : bool
+ True if the pin is successfully found and removed.
+ """
+ return self._cv.remove_pin(remove_pin)
+
+ def set_pin_attribute(self, pin_name: str, key: str, val: str) -> None:
+ """Set an attribute on the given pin.
+
+ Parameters
+ ----------
+ pin_name : str
+ the pin name.
+ key : str
+ the attribute name.
+ val : str
+ the attribute value.
+ """
+ self._cv.set_pin_attribute(pin_name, key, val)
+
+ def rename_instance(self, old_name: str, new_name: str,
+ conn_list: Optional[Union[Iterable[Tuple[str, str]],
+ ItemsView[str, str]]] = None) -> None:
+ """Renames an instance in this schematic.
+
+ Parameters
+ ----------
+ old_name : str
+ the old instance name.
+ new_name : str
+ the new instance name.
+ conn_list : Optional[Union[Iterable[Tuple[str, str]], ItemsView[str, str]]]
+ an optional connection list.
+ """
+ self._cv.rename_instance(old_name, new_name)
+ self.instances[new_name] = inst = self.instances.pop(old_name)
+ if conn_list:
+ for term, net in conn_list:
+ inst.update_connection(new_name, term, net)
+
+ def remove_instance(self, inst_name: str) -> bool:
+ """Removes the instance with the given name.
+
+ Parameters
+ ----------
+ inst_name : str
+ the child instance to delete.
+
+ Returns
+ -------
+ success : bool
+ True if the instance is successfully found and removed.
+ """
+ success = self._cv.remove_instance(inst_name)
+ if success:
+ del self.instances[inst_name]
+ return success
+
+ def delete_instance(self, inst_name: str) -> bool:
+ """Delete the instance with the given name.
+
+ This method is identical to remove_instance(). It's here only for backwards
+ compatibility.
+ """
+ return self.remove_instance(inst_name)
+
+ def replace_instance_master(self, inst_name: str, lib_name: str, cell_name: str,
+ static: bool = False, keep_connections: bool = False) -> None:
+ """Replace the master of the given instance.
+
+ NOTE: all terminal connections will be reset. Call reconnect_instance_terminal() to modify
+ terminal connections.
+
+ Parameters
+ ----------
+ inst_name : str
+ the child instance to replace.
+ lib_name : str
+ the new library name.
+ cell_name : str
+ the new cell name.
+ static : bool
+ True if we're replacing instance with a static schematic instead of a design module.
+ keep_connections : bool
+ True to keep the old connections when the instance master changed.
+ """
+ if inst_name not in self.instances:
+ raise ValueError('Cannot find instance with name: %s' % inst_name)
+
+ self.instances[inst_name].change_generator(lib_name, cell_name, static=static,
+ keep_connections=keep_connections)
+
+ def reconnect_instance_terminal(self, inst_name: str, term_name: str, net_name: str) -> None:
+ """Reconnect the instance terminal to a new net.
+
+ Parameters
+ ----------
+ inst_name : str
+ the instance to modify.
+ term_name : str
+ the instance terminal name to reconnect.
+ net_name : str
+ the net to connect the instance terminal to.
+ """
+ inst = self.instances.get(inst_name, None)
+ if inst is None:
+ raise ValueError('Cannot find instance {}'.format(inst_name))
+
+ inst.update_connection(inst_name, term_name, net_name)
+
+ def reconnect_instance(self, inst_name: str,
+ term_net_iter: Union[Iterable[Tuple[str, str]],
+ ItemsView[str, str]]) -> None:
+ """Reconnect all give instance terminals
+
+ Parameters
+ ----------
+ inst_name : str
+ the instance to modify.
+ term_net_iter : Union[Iterable[Tuple[str, str]], ItemsView[str, str]]
+ an iterable of (term, net) tuples.
+ """
+ inst = self.instances.get(inst_name, None)
+ if inst is None:
+ raise ValueError('Cannot find instance {}'.format(inst_name))
+
+ for term, net in term_net_iter:
+ inst.update_connection(inst_name, term, net)
+
+ def array_instance(self, inst_name: str,
+ inst_name_list: Optional[List[str]] = None,
+ term_list: Optional[List[Dict[str, str]]] = None,
+ inst_term_list: Optional[List[Tuple[str, Iterable[Tuple[str, str]]]]] = None,
+ dx: int = 0, dy: int = 0) -> None:
+ """Replace the given instance by an array of instances.
+
+ This method will replace self.instances[inst_name] by a list of
+ Modules. The user can then design each of those modules.
+
+ Parameters
+ ----------
+ inst_name : str
+ the instance to array.
+ inst_name_list : Optional[List[str]]
+ a list of the names for each array item.
+ term_list : Optional[List[Dict[str, str]]]
+ a list of modified terminal connections for each array item. The keys are
+ instance terminal names, and the values are the net names to connect
+ them to. Only terminal connections different than the parent instance
+ should be listed here.
+ If None, assume terminal connections are not changed.
+ inst_term_list : Optional[List[Tuple[str, List[Tuple[str, str]]]]]
+ zipped version of inst_name_list and term_list. If given, this is used instead.
+ dx : int
+ the X coordinate shift. If dx = dy = 0, default to shift right.
+ dy : int
+ the Y coordinate shift. If dx = dy = 0, default to shift right.
+ """
+ if inst_term_list is None:
+ if inst_name_list is None:
+ raise ValueError('inst_name_list cannot be None if inst_term_iter is None.')
+ # get instance/terminal list iterator
+ if term_list is None:
+ inst_term_list = zip_longest(inst_name_list, [], fillvalue=[])
+ elif len(inst_name_list) != len(term_list):
+ raise ValueError('inst_name_list and term_list length mismatch.')
+ else:
+ inst_term_list = zip_longest(inst_name_list, (term.items() for term in term_list))
+ else:
+ inst_name_list = [arg[0] for arg in inst_term_list]
+ # array instance
+ self._cv.array_instance(inst_name, dx, dy, inst_term_list)
+
+ # update instance dictionary
+ orig_inst = self.instances.pop(inst_name)
+ db = orig_inst.database
+ for name in inst_name_list:
+ inst_ptr = self._cv.get_inst_ref(name)
+ self.instances[name] = SchInstance(db, inst_ptr, master=orig_inst.master)
+
+ def design_sources_and_loads(self, params_list: Optional[Sequence[Mapping[str, Any]]] = None,
+ default_name: str = 'VDC') -> None:
+ """Convenience function for generating sources and loads,
+
+ Given DC voltage/current bias sources information, array the given voltage/current bias
+ sources and configure the voltage/current.
+
+ Each bias dictionary is a dictionary from bias source name to a 3-element list. The first
+ two elements are the PLUS/MINUS net names, respectively, and the third element is the DC
+ voltage/current value as a string or float. A variable name can be given to define a
+ testbench parameter.
+
+ Parameters
+ ----------
+ params_list : Optional[Sequence[Mapping[str, Any]]]
+ List of dictionaries representing the element to be used
+ Each dictionary should have the following format:
+ 'lib': Optional[str] (default: analogLib) -> lib name of the master
+ 'type': str -> type of of the master (i.e 'vdc')
+ 'value': Union[T, Dict[str, T], T = Union[str, float, int] -> value of the master
+ 'conns': Dict[str, str] -> connections of the master
+ default_name : str
+ Default name of the instance in the testbench
+ """
+
+ if not params_list:
+ self.delete_instance(default_name)
+ return
+
+ # TODO: find better places to put these
+ template_names = {
+ 'analogLib': {
+ 'vdc': 'VDC{}',
+ 'idc': 'IDC{}',
+ 'cap': 'C{}',
+ 'res': 'R{}',
+ 'vcvs': 'VCVS{}',
+ }
+ }
+ type_to_value_dict = {
+ 'analogLib': {
+ 'vdc': 'vdc',
+ 'cap': 'c',
+ 'res': 'r',
+ 'idc': 'idc',
+ 'vpulse': None,
+ 'vcvs': 'egain',
+ },
+ }
+
+ element_list = []
+ name_list = []
+ for i, params_dict in enumerate(params_list):
+ lib = params_dict.get('lib', 'analogLib')
+ cell_type = params_dict['type']
+ value = params_dict['value']
+ conn_dict = params_dict['conns']
+ if not isinstance(conn_dict, Mapping):
+ raise ValueError('Got a non dictionary for the connections in '
+ 'design_sources_and_loads')
+
+ if cell_type not in type_to_value_dict[lib]:
+ raise ValueError(f'Got an unsupported type {cell_type} for element type in '
+ f'design_sources_and_loads')
+
+ # make sure value is either string or dictionary
+ if isinstance(value, (int, float)):
+ value = float_to_si_string(value)
+
+ # create value_dict
+ if isinstance(value, str):
+ key = type_to_value_dict[lib][cell_type]
+ if key is None:
+ raise ValueError(f'{cell_type} source must specify value dictionary.')
+ value_dict = {key: value}
+ else:
+ if not isinstance(value, Mapping):
+ raise ValueError(f'type not supported for value {value} of type {type(value)}')
+
+ value_dict = {}
+ for key, val in value.items():
+ if isinstance(val, (int, float)):
+ value_dict[key] = float_to_si_string(val)
+ elif isinstance(val, str):
+ value_dict[key] = val
+ else:
+ raise ValueError(f'type not supported for key={key}, val={val} '
+ f'with type {type(val)}')
+
+ tmp_name = template_names[lib].get(cell_type, 'X{}').format(i)
+ element_list.append((tmp_name, lib, cell_type, value_dict, conn_dict))
+ name_list.append(tmp_name)
+
+ self.array_instance(default_name, inst_name_list=name_list)
+
+ for name, lib, cell, val_dict, conns in element_list:
+ self.replace_instance_master(name, lib, cell, static=True, keep_connections=True)
+ inst = self.instances[name]
+ for k, v in val_dict.items():
+ inst.set_param(k, v)
+ self.reconnect_instance(name, conns.items())
+
+ def design_dummy_transistors(self, dum_info: List[Tuple[Any]], inst_name: str, vdd_name: str,
+ vss_name: str, net_map: Optional[Dict[str, str]] = None) -> None:
+ """Convenience function for generating dummy transistor schematic.
+
+ Given dummy information (computed by AnalogBase) and a BAG transistor instance,
+ this method generates dummy schematics by arraying and modifying the BAG
+ transistor instance.
+
+ Parameters
+ ----------
+ dum_info : List[Tuple[Any]]
+ the dummy information data structure.
+ inst_name : str
+ the BAG transistor instance name.
+ vdd_name : str
+ VDD net name. Used for PMOS dummies.
+ vss_name : str
+ VSS net name. Used for NMOS dummies.
+ net_map : Optional[Dict[str, str]]
+ optional net name transformation mapping.
+ """
+ if not dum_info:
+ self.delete_instance(inst_name)
+ else:
+ num_arr = len(dum_info)
+ arr_name_list = ['XDUMMY%d' % idx for idx in range(num_arr)]
+ self.array_instance(inst_name, arr_name_list)
+
+ for name, ((mos_type, w, lch, th, s_net, d_net), fg) in zip(arr_name_list, dum_info):
+ if mos_type == 'pch':
+ cell_name = 'pmos4_standard'
+ sup_name = vdd_name
+ else:
+ cell_name = 'nmos4_standard'
+ sup_name = vss_name
+ if net_map is not None:
+ s_net = net_map.get(s_net, s_net)
+ d_net = net_map.get(d_net, d_net)
+ s_name = s_net if s_net else sup_name
+ d_name = d_net if d_net else sup_name
+ inst = self.instances[name]
+ inst.change_generator('BAG_prim', cell_name)
+ inst.update_connection(name, 'G', sup_name)
+ inst.update_connection(name, 'B', sup_name)
+ inst.update_connection(name, 'D', d_name)
+ inst.update_connection(name, 'S', s_name)
+ inst.design(w=w, l=lch, nf=fg, intent=th)
+
+ def design_transistor(self, inst_name: str, w: int, lch: int, seg: int,
+ intent: str, m: str = '', d: str = '', g: Union[str, List[str]] = '',
+ s: str = '', b: str = '', stack: int = 1, mos_type: str = '') -> None:
+ """Design a BAG_prim transistor (with stacking support).
+
+ This is a convenient method to design a stack transistor. Additional transistors
+ will be created on the right. The intermediate nodes of each parallel segment are not
+ shorted together.
+
+ Parameters
+ ----------
+ inst_name : str
+ name of the BAG_prim transistor instance.
+ w : int
+ the width of the transistor, in number of fins or resolution units.
+ lch : int
+ the channel length, in resolution units.
+ seg : int
+ number of parallel segments of stacked transistors.
+ intent : str
+ the threshold flavor.
+ m : str
+ base name of the intermediate nodes. the intermediate nodes will be named
+ 'midX', where X is an non-negative integer.
+ d : str
+ the drain name. Empty string to not rename.
+ g : Union[str, List[str]]
+ the gate name. Empty string to not rename.
+ If a list is given, then a NAND-gate structure will be built where the gate nets
+ may be different. Index 0 corresponds to the gate of the source transistor.
+ s : str
+ the source name. Empty string to not rename.
+ b : str
+ the body name. Empty string to not rename.
+ stack : int
+ number of series stack transistors.
+ mos_type : str
+ if non-empty, will change the transistor master to this type.
+ """
+ inst = self.instances[inst_name]
+ if not issubclass(inst.master_class, MosModuleBase):
+ raise ValueError('This method only works on BAG_prim transistors.')
+ if stack <= 0 or seg <= 0:
+ raise ValueError('stack and seg must be positive')
+
+ if mos_type:
+ cell_name = 'nmos4_standard' if mos_type == 'nch' else 'pmos4_standard'
+ inst.change_generator('BAG_prim', cell_name, keep_connections=True)
+
+ g_is_str = isinstance(g, str)
+ if stack == 1:
+ # design instance
+ inst.design(w=w, l=lch, nf=seg, intent=intent)
+ # connect terminals
+ if not g_is_str:
+ g = g[0]
+ for term, net in (('D', d), ('G', g), ('S', s), ('B', b)):
+ if net:
+ inst.update_connection(inst_name, term, net)
+ else:
+ if not m:
+ raise ValueError('Intermediate node base name cannot be empty.')
+ # design instance
+ inst.design(w=w, l=lch, nf=1, intent=intent)
+ # rename G/B
+ if g_is_str and g:
+ inst.update_connection(inst_name, 'G', g)
+ if b:
+ inst.update_connection(inst_name, 'B', b)
+ if not d:
+ d = inst.get_connection('D')
+ if not s:
+ s = inst.get_connection('S')
+
+ if seg == 1:
+ # only one segment, array instance via naming
+ # rename instance
+ new_name = inst_name + '<0:{}>'.format(stack - 1)
+ self.rename_instance(inst_name, new_name)
+ # rename D/S
+ if stack > 2:
+ m += '<0:{}>'.format(stack - 2)
+ new_s = s + ',' + m
+ new_d = m + ',' + d
+ inst.update_connection(new_name, 'D', new_d)
+ inst.update_connection(new_name, 'S', new_s)
+ if not g_is_str:
+ inst.update_connection(new_name, 'G', ','.join(g))
+ else:
+ # multiple segment and stacks, have to array instance
+ # construct instance name/terminal map iterator
+ inst_term_list = []
+ last_cnt = (stack - 1) * seg
+ g_cnt = 0
+ for cnt in range(0, last_cnt + 1, seg):
+ d_suf = '<{}:{}>'.format(cnt + seg - 1, cnt)
+ s_suf = '<{}:{}>'.format(cnt - 1, cnt - seg)
+ iname = inst_name + d_suf
+ if cnt == 0:
+ s_name = s
+ d_name = m + d_suf
+ elif cnt == last_cnt:
+ s_name = m + s_suf
+ d_name = d
+ else:
+ s_name = m + s_suf
+ d_name = m + d_suf
+ term_list = [('S', s_name), ('D', d_name)]
+ if not g_is_str:
+ term_list.append(('G', g[g_cnt]))
+ g_cnt += 1
+ inst_term_list.append((iname, term_list))
+
+ self.array_instance(inst_name, inst_term_list=inst_term_list)
+
+ def replace_with_ideal_switch(self, inst_name: str, rclosed: str = 'rclosed',
+ ropen: str = 'ropen', vclosed: str = 'vclosed',
+ vopen: str = 'vopen'):
+ # figure out real switch connections
+ inst = self.instances[inst_name]
+ term_net_list = [('N+', inst.get_connection('S')), ('N-', inst.get_connection('D'))]
+ if 'pmos' in inst.cell_name:
+ term_net_list += [('NC+', 'VDD'), ('NC-', inst.get_connection('G'))]
+ elif 'nmos' in inst.cell_name:
+ term_net_list += [('NC+', inst.get_connection('G')), ('NC-', 'VSS')]
+ else:
+ raise ValueError(f'Cannot replace {inst.cell_name} with ideal switch.')
+
+ # replace with ideal switch
+ self.replace_instance_master(inst_name, 'analogLib', 'switch', static=True)
+
+ # reconnect terminals of ideal switch
+ for term, net in term_net_list:
+ self.reconnect_instance_terminal(inst_name, term, net)
+ for key, val in [('vt1', vopen), ('vt2', vclosed), ('ro', ropen), ('rc', rclosed)]:
+ self.instances[inst_name].set_param(key, val)
+
+ # noinspection PyUnusedLocal
+ def get_lef_options(self, options: Dict[str, Any], config: Mapping[str, Any]) -> None:
+ """Populate the LEF options dictionary.
+
+ Parameters
+ ----------
+ options : Dict[str, Any]
+ the result LEF options dictionary.
+ config : Mapping[str, Any]
+ the LEF configuration dictionary.
+ """
+ if not self.finalized:
+ raise ValueError('This method only works on finalized master.')
+
+ pin_groups = {SigType.power: [], SigType.ground: [], SigType.clock: [],
+ SigType.analog: []}
+ out_pins = []
+ for name, term_type in self.pins.items():
+ sig_type = self.get_signal_type(name)
+ pin_list = pin_groups.get(sig_type, None)
+ if pin_list is not None:
+ pin_list.append(name)
+ if term_type is TermType.output:
+ out_pins.append(name)
+
+ options['pwr_pins'] = pin_groups[SigType.power]
+ options['gnd_pins'] = pin_groups[SigType.ground]
+ options['clk_pins'] = pin_groups[SigType.clock]
+ options['analog_pins'] = pin_groups[SigType.analog]
+ options['output_pins'] = out_pins
+
+ def get_instance_hierarchy(self, output_type: DesignOutput,
+ leaf_cells: Optional[Dict[str, List[str]]] = None,
+ default_view_name: str = '') -> Dict[str, Any]:
+ """Returns a nested dictionary representing the modeling instance hierarchy.
+
+ By default, we try to netlist as deeply as possible. This behavior can be modified by
+ specifying the leaf cells.
+
+ Parameters
+ ----------
+ output_type : DesignOutput
+ the behavioral model output type.
+ leaf_cells : Optional[Dict[str, List[str]]]
+ data structure storing leaf cells.
+ default_view_name : str
+ default model view name.
+
+ Returns
+ -------
+ hier : Dict[str, Any]
+ the instance hierarchy dictionary.
+ """
+ is_leaf_table = {}
+ if leaf_cells:
+ for lib_name, cell_list in leaf_cells.items():
+ for cell in cell_list:
+ is_leaf_table[(lib_name, cell)] = True
+
+ return self._get_hierarchy_helper(output_type, is_leaf_table, default_view_name)
+
+ def _get_hierarchy_helper(self, output_type: DesignOutput,
+ is_leaf_table: Dict[Tuple[str, str], bool],
+ default_view_name: str,
+ ) -> Optional[Dict[str, Any]]:
+ model_path = self.get_model_path(output_type, default_view_name)
+
+ key = (self._orig_lib_name, self._orig_cell_name)
+ if self.is_leaf_model() or is_leaf_table.get(key, False):
+ if not model_path.is_file():
+ raise ValueError(f'Cannot find model file for {key}')
+ return dict(view_name=default_view_name)
+
+ ans = {}
+ master_db = self.master_db
+ for inst_name, sch_inst in self.instances.items():
+ if master_db.exclude_model(sch_inst.lib_name, sch_inst.cell_name):
+ continue
+ if sch_inst.is_primitive:
+ # primitive/static instance has no model file.
+ # so we must use model file for this cell
+ if not model_path.is_file():
+ raise ValueError(f'Cannot find model file for {key}')
+ ans.clear()
+ ans['view_name'] = default_view_name
+ return ans
+ else:
+ try:
+ ans[inst_name] = sch_inst.master._get_hierarchy_helper(output_type,
+ is_leaf_table,
+ default_view_name)
+ except ValueError as ex:
+ # cannot generate model for this instance
+ if not model_path.is_file():
+ # Cannot model this schematic too, re-raise error from instance
+ raise ex
+ # otherwise, this is a leaf model cell
+ ans.clear()
+ ans['view_name'] = default_view_name
+ return ans
+
+ # get here if all instances are successfully modeled
+ return ans
+
+
+class MosModuleBase(Module):
+ """The base design class for the bag primitive transistor.
+ """
+
+ def __init__(self, yaml_fname: str, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, yaml_fname, database, params, **kwargs)
+ self._pins = dict(G=TermType.inout, D=TermType.inout, S=TermType.inout, B=TermType.inout)
+
+ @classmethod
+ def is_primitive(cls) -> bool:
+ return True
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='transistor width, in resolution units or number of fins.',
+ l='transistor length, in resolution units.',
+ nf='transistor number of fingers.',
+ intent='transistor threshold flavor.',
+ )
+
+ def design(self, w: int, l: int, nf: int, intent: str) -> None:
+ pass
+
+ def get_schematic_parameters(self) -> Dict[str, str]:
+ w_res = self.tech_info.tech_params['mos']['width_resolution']
+ l_res = self.tech_info.tech_params['mos']['length_resolution']
+ scale = self.sch_scale
+ w_scale = 1 if w_res == 1 else scale
+
+ w: int = self.params['w']
+ l: int = self.params['l']
+ nf: int = self.params['nf']
+
+ wstr = float_to_si_string(int(round(w * w_scale / w_res)) * w_res)
+ lstr = float_to_si_string(int(round(l * scale / l_res)) * l_res)
+ nstr = str(nf)
+
+ return dict(w=wstr, l=lstr, nf=nstr)
+
+ def get_cell_name_from_parameters(self) -> str:
+ mos_type = self.orig_cell_name.split('_')[0]
+ return '{}_{}'.format(mos_type, self.params['intent'])
+
+ def should_delete_instance(self) -> bool:
+ return self.params['nf'] == 0 or self.params['w'] == 0
+
+
+class DiodeModuleBase(Module):
+ """The base design class for the bag primitive transistor.
+ """
+
+ def __init__(self, yaml_fname: str, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, yaml_fname, database, params, **kwargs)
+ self._pins = dict(PLUS=TermType.inout, MINUS=TermType.inout)
+
+ @classmethod
+ def is_primitive(cls) -> bool:
+ return True
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='diode width, in resolution units or number of fins.',
+ l='diode length, in resolution units or number of fingers.',
+ intent='diode flavor.',
+ )
+
+ def design(self, w: int, l: int, intent: str) -> None:
+ pass
+
+ def get_schematic_parameters(self) -> Dict[str, str]:
+ w_res = self.tech_info.tech_params['diode']['width_resolution']
+ l_res = self.tech_info.tech_params['diode']['length_resolution']
+
+ w: int = self.params['w']
+ l: int = self.params['l']
+
+ wstr = float_to_si_string(int(round(w / w_res)) * w_res)
+ if l_res == 1:
+ lstr = str(l)
+ else:
+ lstr = float_to_si_string(int(round(l * self.sch_scale / l_res)) * l_res)
+
+ return dict(w=wstr, l=lstr)
+
+ def get_cell_name_from_parameters(self) -> str:
+ dio_type = self.orig_cell_name.split('_')[0]
+ return '{}_{}'.format(dio_type, self.params['intent'])
+
+ def should_delete_instance(self) -> bool:
+ return self.params['w'] == 0 or self.params['l'] == 0
+
+
+class ResPhysicalModuleBase(Module):
+ """The base design class for a real resistor parametrized by width and length.
+ """
+
+ def __init__(self, yaml_fname: str, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, yaml_fname, database, params, **kwargs)
+ self._pins = dict(PLUS=TermType.inout, MINUS=TermType.inout, BULK=TermType.inout)
+
+ @classmethod
+ def is_primitive(cls) -> bool:
+ return True
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='resistor width, in resolution units.',
+ l='resistor length, in resolution units.',
+ intent='resistor flavor.',
+ )
+
+ def design(self, w: int, l: int, intent: str) -> None:
+ pass
+
+ def get_schematic_parameters(self) -> Dict[str, str]:
+ w: int = self.params['w']
+ l: int = self.params['l']
+ scale = self.sch_scale
+ wstr = float_to_si_string(w * scale)
+ lstr = float_to_si_string(l * scale)
+
+ return dict(w=wstr, l=lstr)
+
+ def get_cell_name_from_parameters(self) -> str:
+ return 'res_{}'.format(self.params['intent'])
+
+ def should_delete_instance(self) -> bool:
+ return self.params['w'] == 0 or self.params['l'] == 0
+
+
+class ResMetalModule(Module):
+ """The base design class for a metal resistor.
+ """
+
+ def __init__(self, yaml_fname: str, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, yaml_fname, database, params, **kwargs)
+ self._pins = dict(PLUS=TermType.inout, MINUS=TermType.inout)
+
+ @classmethod
+ def is_primitive(cls) -> bool:
+ return True
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='resistor width, in resolution units.',
+ l='resistor length, in resolution units.',
+ layer='the metal layer ID.',
+ )
+
+ def design(self, w: int, l: int, layer: int) -> None:
+ pass
+
+ def get_schematic_parameters(self) -> Dict[str, str]:
+ w: int = self.params['w']
+ l: int = self.params['l']
+ scale = self.sch_scale
+ wstr = float_to_si_string(w * scale)
+ lstr = float_to_si_string(l * scale)
+ return dict(w=wstr, l=lstr)
+
+ def get_cell_name_from_parameters(self) -> str:
+ return 'res_metal_{}'.format(self.params['layer'])
+
+ def should_delete_instance(self) -> bool:
+ return self.params['w'] == 0 or self.params['l'] == 0
diff --git a/src/bag/design/netlist.py b/src/bag/design/netlist.py
new file mode 100644
index 0000000..4798f97
--- /dev/null
+++ b/src/bag/design/netlist.py
@@ -0,0 +1,447 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""netlist processing utilities."""
+
+from __future__ import annotations
+
+from typing import Union, List, Dict, Set, Callable, TextIO, Any
+
+import abc
+from pathlib import Path
+
+from pybag.enum import DesignOutput
+
+from ..env import get_bag_device_map
+from ..util.search import get_new_name
+from ..io.file import open_file
+from ..io.string import wrap_string
+
+
+def guess_netlist_type(netlist_in: Union[Path, str]) -> DesignOutput:
+ if isinstance(netlist_in, str):
+ netlist_in = Path(netlist_in)
+
+ ext = netlist_in.suffix
+ if ext == 'cdl' or ext == 'spf' or ext == 'sp':
+ return DesignOutput.CDL
+ elif ext == 'scs':
+ return DesignOutput.SPECTRE
+
+ with open_file(netlist_in, 'r') as f:
+ for line in f:
+ if 'simulator lang' in line or line.startswith('subckt'):
+ return DesignOutput.SPECTRE
+ if line.startswith('.subckt') or line.startswith('.SUBCKT'):
+ return DesignOutput.CDL
+
+ raise ValueError(f'Cannot guess netlist format of file {netlist_in}')
+
+
+def parse_netlist(netlist_in: Union[Path, str], netlist_type: DesignOutput) -> Netlist:
+ if isinstance(netlist_in, str):
+ netlist_in = Path(netlist_in)
+
+ lines = _read_lines(netlist_in)
+
+ if netlist_type is DesignOutput.CDL:
+ return ParserCDL.parse_netlist(lines)
+ elif netlist_type is DesignOutput.SPECTRE:
+ return ParserSpectre.parse_netlist(lines)
+ else:
+ raise ValueError(f'Unsupported netlist format: {netlist_type}')
+
+
+def add_mismatch_offsets(netlist_in: Union[Path, str], netlist_out: Union[Path, str],
+ netlist_type: DesignOutput) -> Dict[str, Any]:
+ return add_internal_sources(netlist_in, netlist_out, netlist_type, ['g'])
+
+
+def add_internal_sources(netlist_in: Union[Path, str], netlist_out: Union[Path, str],
+ netlist_type: DesignOutput, ports: List[str]) -> Dict[str, Any]:
+ netlist = parse_netlist(netlist_in, netlist_type)
+
+ if isinstance(netlist_out, str):
+ netlist_out: Path = Path(netlist_out)
+
+ mos_map = get_bag_device_map('mos')
+ bag_mos = set()
+ pdk_mos = set()
+ for k, v, in mos_map:
+ bag_mos.add(k)
+ pdk_mos.add(v)
+
+ used_names = set()
+ offset_map = {}
+ with open_file(netlist_out, 'w') as f:
+ netlist.netlist_with_offset(f, used_names, offset_map, netlist_type, ports, bag_mos, pdk_mos)
+ return offset_map
+
+
+class NetlistNode(abc.ABC):
+ @abc.abstractmethod
+ def netlist(self, stream: TextIO, netlist_type: DesignOutput) -> None:
+ pass
+
+ @abc.abstractmethod
+ def netlist_with_offset(self, stream: TextIO, used_names: Set[str], offset_map: Dict[str, str],
+ netlist_type: DesignOutput, ports: List[str], bag_mos: Set[str], pdk_mos: Set[str]
+ ) -> None:
+ pass
+
+
+class Netlist(NetlistNode):
+ def __init__(self, header: Header, subckts: List[Subcircuit]) -> None:
+ self._header = header
+ self._subckts = subckts
+ self._used_names = set()
+ for ckt in self._subckts:
+ name = ckt.name
+ if name in self._used_names:
+ raise ValueError(f'Found duplicated subcircuit name: {name}')
+ self._used_names.add(name)
+
+ @property
+ def used_names(self) -> Set[str]:
+ return self._used_names
+
+ def netlist(self, stream: TextIO, netlist_type: DesignOutput) -> None:
+ self._header.netlist(stream, netlist_type)
+ for ckt in self._subckts:
+ ckt.netlist(stream, netlist_type)
+
+ def netlist_with_offset(self, stream: TextIO, used_names: Set[str], offset_map: Dict[str, str],
+ netlist_type: DesignOutput, ports: List[str], bag_mos: Set[str], pdk_mos: Set[str]
+ ) -> None:
+ self._header.netlist(stream, netlist_type)
+ for idx in range(0, len(self._subckts) - 1):
+ self._subckts[idx].netlist(stream, netlist_type)
+ self._subckts[-1].netlist_with_offset(stream, used_names, offset_map, netlist_type,
+ ports, bag_mos, pdk_mos)
+
+
+class Header(NetlistNode):
+ def __init__(self, lines: List[str]) -> None:
+ self.lines = lines
+
+ def netlist(self, stream: TextIO, netlist_type: DesignOutput) -> None:
+ for line in self.lines:
+ stream.write(line)
+ stream.write('\n')
+
+ def netlist_with_offset(self, stream: TextIO, used_names: Set[str], offset_map: Dict[str, str],
+ netlist_type: DesignOutput, ports: List[str], bag_mos: Set[str], pdk_mos: Set[str]
+ ) -> None:
+ self.netlist(stream, netlist_type)
+
+
+class Instance(NetlistNode):
+ def __init__(self, inst_name: str, cell_name: str, ports: List[str], params: List[str]) -> None:
+ self._inst_name = inst_name
+ self._cell_name = cell_name
+ self._ports = ports
+ self._params = params
+
+ def netlist(self, stream: TextIO, netlist_type: DesignOutput) -> None:
+ if netlist_type is DesignOutput.CDL:
+ if self._cell_name:
+ # this is not a primitive instance
+ tmp_list = [self._inst_name]
+ tmp_list.extend(self._ports)
+ tmp_list.append('/')
+ tmp_list.append(self._cell_name)
+ tmp_list.extend(self._params)
+ else:
+ # this is a primitive list
+ tmp_list = [self._inst_name]
+ tmp_list.extend(self._ports)
+ tmp_list.extend(self._params)
+
+ stream.write(wrap_string(tmp_list))
+ elif netlist_type is DesignOutput.SPECTRE:
+ tmp_list = [self._inst_name]
+ tmp_list.extend(self._ports)
+ tmp_list.append(self._cell_name)
+ tmp_list.extend(self._params)
+ stream.write(wrap_string(tmp_list))
+ else:
+ raise ValueError(f'unsupported netlist type: {netlist_type}')
+
+ def netlist_with_offset(self, stream: TextIO, used_names: Set[str], offset_map: Dict[str, str],
+ netlist_type: DesignOutput, ports: List[str], bag_mos: Set[str], pdk_mos: Set[str]
+ ) -> None:
+ if self._cell_name in bag_mos:
+ bag_mos = True
+ body, drain, gate, source = self._ports
+ elif self._cell_name in pdk_mos:
+ bag_mos = False
+ drain, gate, source, body = self._ports
+ else:
+ return self.netlist(stream, netlist_type)
+
+ old_mapping = dict(b=body, d=drain, g=gate, s=source)
+ new_mapping = old_mapping.copy()
+
+ # 1. modify gate connection of device
+ for port in ports:
+ new_mapping[port] = f'new___{old_mapping[port]}_{self._inst_name.replace("/", "_").replace("@", "_")}'
+
+ if bag_mos:
+ tmp_list = [self._inst_name, new_mapping['b'], new_mapping['d'], new_mapping['g'], new_mapping['s']]
+ else:
+ tmp_list = [self._inst_name, new_mapping['d'], new_mapping['g'], new_mapping['s'], new_mapping['b']]
+
+ tmp_list.append(self._cell_name)
+ tmp_list.extend(self._params)
+ stream.write(wrap_string(tmp_list))
+
+ # 2. add voltage source
+ base_name, sep, index = self._inst_name.partition('@')
+ for port in ports:
+ base_name_port = f'{base_name}_{port}'
+ if base_name_port in offset_map.keys(): # different finger of same transistor
+ offset_v_port = offset_map[base_name_port]
+ else: # create unique name
+ offset_v_port = get_new_name(f'v__{base_name_port.replace("/", "_")}', used_names)
+ used_names.add(offset_v_port)
+ offset_map[base_name_port] = offset_v_port
+
+ vdc_name = f'V{offset_v_port}{sep}{index}'
+
+ for port in ports:
+ if netlist_type is DesignOutput.SPECTRE:
+ tmp_list = [vdc_name, new_mapping[port], old_mapping[port], 'vsource', 'type=dc', f'dc={offset_v_port}']
+ else:
+ tmp_list = [vdc_name, new_mapping[port], old_mapping[port], offset_v_port]
+
+ stream.write(wrap_string(tmp_list))
+
+
+class Subcircuit(NetlistNode):
+ def __init__(self, name: str, ports: List[str], items: List[Union[str, Instance]]) -> None:
+ self._name = name
+ self._ports = ports
+ self._items = items
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ def netlist(self, stream: TextIO, netlist_type: DesignOutput) -> None:
+ def inst_fun(inst: NetlistNode) -> None:
+ inst.netlist(stream, netlist_type)
+
+ return self._netlist_helper(stream, netlist_type, inst_fun)
+
+ def netlist_with_offset(self, stream: TextIO, used_names: Set[str], offset_map: Dict[str, str],
+ netlist_type: DesignOutput, ports: List[str], bag_mos: Set[str], pdk_mos: Set[str]
+ ) -> None:
+ def inst_fun(inst: NetlistNode) -> None:
+ inst.netlist_with_offset(stream, used_names, offset_map, netlist_type, ports, bag_mos, pdk_mos)
+
+ return self._netlist_helper(stream, netlist_type, inst_fun)
+
+ def _netlist_helper(self, stream, netlist_type: DesignOutput, fun: Callable[[NetlistNode], None]
+ ) -> None:
+ # header
+ if netlist_type is DesignOutput.SPECTRE:
+ tmp_list = ['subckt', self._name]
+ else:
+ tmp_list = ['.SUBCKT', self._name]
+ tmp_list.extend(self._ports)
+ stream.write(wrap_string(tmp_list))
+ for item in self._items:
+ if isinstance(item, str):
+ stream.write(item)
+ stream.write('\n')
+ else:
+ fun(item)
+
+ # 3. end
+ if netlist_type is DesignOutput.SPECTRE:
+ stream.write(wrap_string(['ends', self._name]))
+ else:
+ stream.write('.ENDS\n')
+
+ stream.write('\n')
+
+
+class Parser(abc.ABC):
+ @classmethod
+ @abc.abstractmethod
+ def is_subckt_start(cls, line: str) -> bool:
+ pass
+
+ @classmethod
+ @abc.abstractmethod
+ def is_subckt_end(cls, line: str) -> bool:
+ pass
+
+ @classmethod
+ @abc.abstractmethod
+ def is_comment(cls, line) -> bool:
+ pass
+
+ @classmethod
+ @abc.abstractmethod
+ def parse_instance(cls, tokens: List[str]) -> Instance:
+ pass
+
+ @classmethod
+ def parse_netlist(cls, lines: List[str]) -> Netlist:
+ subckts = []
+ idx = 0
+ num_lines = len(lines)
+ while idx < num_lines and not cls.is_subckt_start(lines[idx]):
+ idx += 1
+
+ header = cls.parse_header(lines, 0, idx)
+ while idx < num_lines:
+ start_idx = idx
+ while idx < num_lines and not cls.is_subckt_end(lines[idx]):
+ idx += 1
+ if idx == num_lines:
+ raise ValueError('Did not find subcircuit end.')
+ idx += 1
+ subckts.append(cls.parse_subcircuit(lines, start_idx, idx))
+
+ while idx < num_lines and not cls.is_subckt_start(lines[idx]):
+ idx += 1
+
+ return Netlist(header, subckts)
+
+ @classmethod
+ def parse_header(cls, lines: List[str], start: int, stop: int) -> Header:
+ return Header(lines[start:stop])
+
+ @classmethod
+ def parse_subcircuit(cls, lines: List[str], start: int, stop: int) -> Subcircuit:
+ header_tokens = lines[start].split()
+ cell_name = header_tokens[1]
+ ports = header_tokens[2:]
+ items: List[Union[str, Instance]] = []
+ # skip last line because it is end subcircuit line
+ for idx in range(start + 1, stop - 1):
+ cur_line = lines[idx]
+ if cls.is_comment(cur_line):
+ items.append(cur_line)
+ else:
+ tokens = cur_line.split()
+ if tokens:
+ if tokens[0] == 'parameters':
+ items.append(' '.join(tokens))
+ else:
+ items.append(cls.parse_instance(tokens))
+
+ return Subcircuit(cell_name, ports, items)
+
+
+class ParserCDL(Parser):
+ prim_prefix = {'R', 'C', 'V', 'I'}
+
+ @classmethod
+ def is_subckt_start(cls, line: str) -> bool:
+ return line.startswith('.SUBCKT ') or line.startswith('.subckt ')
+
+ @classmethod
+ def is_subckt_end(cls, line: str) -> bool:
+ return line.startswith('.ENDS') or line.startswith('.ends')
+
+ @classmethod
+ def is_comment(cls, line) -> bool:
+ return line.startswith('*')
+
+ @classmethod
+ def parse_instance(cls, tokens: List[str]) -> Instance:
+ inst_name = tokens[0]
+ if inst_name[0] in cls.prim_prefix:
+ # this is a resistor
+ ports = tokens[1:3]
+ params = tokens[3:]
+ return Instance(inst_name, '', ports, params)
+
+ ports = None
+ end_idx = 0
+ for end_idx in range(1, len(tokens)):
+ if tokens[end_idx] == '/':
+ # detect separator
+ ports = tokens[1:end_idx]
+ end_idx += 1
+ break
+ elif '=' in tokens[end_idx]:
+ # detect parameters
+ ports = tokens[1:end_idx - 1]
+ end_idx -= 1
+ break
+
+ if ports is None:
+ # did not hit separator or parameters, assume last index is cell name
+ ports = tokens[1:end_idx]
+ end_idx -= 1
+
+ cell_name = tokens[end_idx]
+ params = tokens[end_idx+1:]
+ return Instance(inst_name, cell_name, ports, params)
+
+
+class ParserSpectre(Parser):
+ @classmethod
+ def is_subckt_start(cls, line: str) -> bool:
+ return line.startswith('subckt ')
+
+ @classmethod
+ def is_subckt_end(cls, line: str) -> bool:
+ return line.startswith('ends')
+
+ @classmethod
+ def is_comment(cls, line) -> bool:
+ return line.startswith('*') or line.startswith('//')
+
+ @classmethod
+ def parse_instance(cls, tokens: List[str]) -> Instance:
+ inst_name = tokens[0]
+ ports = None
+ end_idx = 0
+ for end_idx in range(1, len(tokens)):
+ if '=' in tokens[end_idx]:
+ # detect parameters
+ ports = tokens[1:end_idx - 1]
+ end_idx -= 1
+ break
+
+ if ports is None:
+ # did not hit parameters, assume last index is cell name
+ ports = tokens[1:end_idx]
+ end_idx -= 1
+
+ cell_name = tokens[end_idx]
+ params = tokens[end_idx+1:]
+ return Instance(inst_name, cell_name, ports, params)
+
+
+def _read_lines(netlist: Path) -> List[str]:
+ """Reads the given Spectre or CDL netlist.
+
+ This function process line continuation and comments so we don't have to worry about it.
+ """
+ lines = []
+ with open_file(netlist, 'r') as f:
+ for line in f:
+ if line.startswith('+'):
+ lines[-1] += line[1:-1]
+ else:
+ lines.append(line[:-1])
+
+ return lines
diff --git a/src/bag/env.py b/src/bag/env.py
new file mode 100644
index 0000000..3ca3df2
--- /dev/null
+++ b/src/bag/env.py
@@ -0,0 +1,243 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various methods to query information about the design environment.
+"""
+
+from typing import Tuple, Dict, Any, Optional, Type, List, cast
+
+import os
+import socket
+from pathlib import Path
+
+from .io.file import read_file, read_yaml_env, read_yaml
+from .layout.tech import TechInfo
+from .layout.routing import RoutingGrid
+from .util.importlib import import_class
+
+
+def get_bag_work_path() -> Path:
+ """Returns the BAG working directory."""
+ work_dir = os.environ.get('BAG_WORK_DIR', '')
+ if not work_dir:
+ raise ValueError('Environment variable BAG_WORK_DIR not defined.')
+ work_path = Path(work_dir).resolve()
+ if not work_path.is_dir():
+ raise ValueError(f'$BAG_WORK_DIR = "{work_dir}" is not a directory')
+
+ return work_path
+
+
+def get_bag_tmp_path() -> Path:
+ """Returns the BAG temporary files directory."""
+ tmp_dir = os.environ.get('BAG_TEMP_DIR', '')
+ if not tmp_dir:
+ raise ValueError('Environment variable BAG_TEMP_DIR not defined.')
+ tmp_path = Path(tmp_dir).resolve()
+ tmp_path.mkdir(parents=True, exist_ok=True)
+ if not tmp_path.is_dir():
+ raise ValueError(f'$BAG_TEMP_DIR = "{tmp_dir}" is not a directory')
+
+ return tmp_path
+
+
+def get_tech_path() -> Path:
+ """Returns the technology directory."""
+ tech_dir = os.environ.get('BAG_TECH_CONFIG_DIR', '')
+ if not tech_dir:
+ raise ValueError('Environment variable BAG_TECH_CONFIG_DIR not defined.')
+ tech_path = Path(tech_dir).resolve()
+ if not tech_path.is_dir():
+ raise ValueError('BAG_TECH_CONFIG_DIR = "{}" is not a directory'.format(tech_dir))
+
+ return tech_path
+
+
+def get_bag_work_dir() -> str:
+ """Returns the BAG working directory."""
+ return str(get_bag_work_path())
+
+
+def get_bag_tmp_dir() -> str:
+ """Returns the BAG temporary files directory."""
+ return str(get_bag_tmp_path())
+
+
+def get_tech_dir() -> str:
+ """Returns the technology directory."""
+ return str(get_tech_path())
+
+
+def get_bag_config() -> Dict[str, Any]:
+ """Returns the BAG configuration dictioanry."""
+ bag_config_path = os.environ.get('BAG_CONFIG_PATH', '')
+ if not bag_config_path:
+ raise ValueError('Environment variable BAG_CONFIG_PATH not defined.')
+
+ return read_yaml_env(bag_config_path)
+
+
+def get_tech_params(bag_config: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
+ """Returns the technology parameters dictioanry.
+
+ Parameters
+ ----------
+ bag_config : Optional[Dict[str, Any]]
+ the BAG configuration dictionary. If None, will try to read it from file.
+
+ Returns
+ -------
+ tech_params : Dict[str, Any]
+ the technology configuration dictionary.
+ """
+ if bag_config is None:
+ bag_config = get_bag_config()
+
+ fname = bag_config['tech_config_path']
+ ans = read_yaml_env(bag_config['tech_config_path'])
+ ans['tech_config_fname'] = fname
+ return ans
+
+
+def create_tech_info(bag_config: Optional[Dict[str, Any]] = None) -> TechInfo:
+ """Create TechInfo object."""
+ tech_params = get_tech_params(bag_config=bag_config)
+
+ if 'class' in tech_params:
+ tech_cls = cast(Type[TechInfo], import_class(tech_params['class']))
+ tech_info = tech_cls(tech_params)
+ else:
+ # just make a default tech_info object as place holder.
+ print('*WARNING*: No TechInfo class defined. Using a dummy version.')
+ tech_info = TechInfo(tech_params, {}, '')
+
+ return tech_info
+
+
+def create_routing_grid(tech_info: Optional[TechInfo] = None,
+ bag_config: Optional[Dict[str, Any]] = None) -> RoutingGrid:
+ """Create RoutingGrid object."""
+ if tech_info is None:
+ tech_info = create_tech_info(bag_config=bag_config)
+ return RoutingGrid(tech_info, tech_info.tech_params['tech_config_fname'])
+
+
+def can_connect_to_port(port: int) -> bool:
+ """Check if we can successfully connect to a port.
+
+ Used to check if Virtuoso server is up.
+ """
+ s = socket.socket()
+ try:
+ s.connect(('localhost', port))
+ return True
+ except socket.error:
+ return False
+ finally:
+ s.close()
+
+
+def get_port_number(bag_config: Optional[Dict[str, Any]] = None) -> Tuple[int, str]:
+ """Read the port number from the port file..
+
+ Parameters
+ ----------
+ bag_config : Optional[Dict[str, Any]]
+ the BAG configuration dictionary. If None, will try to read it from file.
+
+ Returns
+ -------
+ port : int
+ the port number. Negative on failure.
+ msg : str
+ Empty string on success, the error message on failure.
+ """
+ if bag_config is None:
+ bag_config = get_bag_config()
+
+ port_file = get_bag_work_path() / bag_config['socket']['port_file']
+ try:
+ port = int(read_file(port_file))
+ except ValueError as err:
+ return -1, str(err)
+ except FileNotFoundError as err:
+ return -1, str(err)
+
+ if can_connect_to_port(port):
+ return port, ''
+ return -1, f'Cannot connect to port {port}'
+
+
+def get_netlist_setup_file() -> str:
+ """Returns the netlist setup file path."""
+ ans = get_tech_path() / 'netlist_setup' / 'netlist_setup.yaml'
+ if not ans.is_file():
+ raise ValueError(f'{ans} is not a file.')
+ return str(ans)
+
+
+def get_gds_layer_map() -> str:
+ """Returns the GDS layer map file."""
+ ans = get_tech_path() / 'gds_setup' / 'gds.layermap'
+ if not ans.is_file():
+ raise ValueError(f'{ans} is not a file.')
+ return str(ans)
+
+
+def get_gds_object_map() -> str:
+ """Returns the GDS object map file."""
+ ans = get_tech_path() / 'gds_setup' / 'gds.objectmap'
+ if not ans.is_file():
+ raise ValueError(f'{ans} is not a file.')
+ return str(ans)
+
+
+def get_bag_device_map(name: str) -> List[Tuple[str, str]]:
+ config_path = get_tech_path() / 'netlist_setup' / 'gen_config.yaml'
+ config = read_yaml(config_path)
+ return config[name]['types']
+
+
+def get_tech_global_info(prj_name: str) -> Dict[str, Any]:
+ path = f'data/{prj_name}/specs_global/tech_global.yaml'
+ return read_yaml(path)
diff --git a/src/bag/interface/__init__.py b/src/bag/interface/__init__.py
new file mode 100644
index 0000000..8f7b58a
--- /dev/null
+++ b/src/bag/interface/__init__.py
@@ -0,0 +1,50 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This packages defines classes to interface with CAD database and circuit simulators.
+"""
+
+from .server import SkillServer
+from .zmqwrapper import ZMQRouter, ZMQDealer
+
+__all__ = ['SkillServer', 'ZMQRouter', 'ZMQDealer', ]
diff --git a/src/bag/interface/abstract.py b/src/bag/interface/abstract.py
new file mode 100755
index 0000000..b2b7545
--- /dev/null
+++ b/src/bag/interface/abstract.py
@@ -0,0 +1,117 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles abstract generation
+"""
+
+from __future__ import annotations
+
+from typing import Dict, Any, Sequence
+
+from pathlib import Path
+
+from ..env import get_bag_work_dir
+from ..io.file import write_file
+from ..io.template import new_template_env_fs
+from ..concurrent.core import SubProcessManager
+
+from .lef import LEFInterface
+
+
+class AbstractInterface(LEFInterface):
+ """A class that creates LEF using the abstract generator.
+
+ Parameters
+ ----------
+ config : Dict[str, Any]
+ the configuration dictionary.
+ """
+
+ def __init__(self, config: Dict[str, Any]) -> None:
+ LEFInterface.__init__(self, config)
+
+ self._manager = SubProcessManager(max_workers=1)
+ self._temp_env_fs = new_template_env_fs()
+
+ def generate_lef(self, impl_lib: str, impl_cell: str, verilog_path: Path, lef_path: Path,
+ run_path: Path, pwr_pins: Sequence[str], gnd_pins: Sequence[str],
+ clk_pins: Sequence[str], analog_pins: Sequence[str],
+ output_pins: Sequence[str], detailed_layers: Sequence[str],
+ cover_layers: Sequence[str], cell_type: str, **kwargs: Any) -> bool:
+ run_path.mkdir(parents=True, exist_ok=True)
+
+ # create options file
+ options_path = (run_path / 'bag_abstract.options').resolve()
+ self._create_options_file(options_path, pwr_pins, gnd_pins, clk_pins, analog_pins,
+ output_pins, detailed_layers, cover_layers, cell_type, impl_cell)
+
+ # create replay file
+ parent_dir: Path = lef_path.parent
+ parent_dir.mkdir(parents=True, exist_ok=True)
+ content = self.render_file_template('abstract.replay',
+ dict(lib_name=impl_lib, cell_name=impl_cell,
+ options_file=str(options_path),
+ verilog_file=str(verilog_path),
+ lef_file=str(lef_path)))
+ replay_path = run_path / 'bag_abstract.replay'
+ write_file(replay_path, content)
+
+ log_path = run_path / f'bag_abstract.log'
+ log_file = str(log_path)
+ cwd = get_bag_work_dir()
+ pinfo_list = [(['abstract', '-replay', str(replay_path), '-nogui'], log_file, None, cwd)]
+ self._manager.batch_subprocess(pinfo_list)
+
+ return lef_path.is_file()
+
+ def _create_options_file(self, out_file: Path, pwr_pins: Sequence[str],
+ gnd_pins: Sequence[str], clk_pins: Sequence[str],
+ analog_pins: Sequence[str], output_pins: Sequence[str],
+ detailed_layers: Sequence[str], cover_layers: Sequence[str],
+ cell_type: str, impl_cell: str) -> None:
+ options_file: str = self.config['options_file']
+
+ options_path = Path(options_file).resolve()
+
+ # check options file exists
+ if not options_path.is_file():
+ raise ValueError(f'Cannot find abstract options template file: {options_path}')
+
+ template = self._temp_env_fs.get_template(str(options_path))
+
+ pwr_regexp = _get_pin_regexp(pwr_pins)
+ gnd_regexp = _get_pin_regexp(gnd_pins)
+ clk_regexp = _get_pin_regexp(clk_pins)
+ ana_regexp = _get_pin_regexp(analog_pins)
+ out_regexp = _get_pin_regexp(output_pins)
+
+ detail_str = ' '.join(detailed_layers)
+ cover_str = ' '.join(cover_layers)
+
+ content = template.render(pwr_regexp=pwr_regexp, gnd_regexp=gnd_regexp,
+ clk_regexp=clk_regexp, ana_regexp=ana_regexp,
+ out_regexp=out_regexp, detail_blk=detail_str,
+ cover_blk=cover_str, cell_type=cell_type, impl_cell=impl_cell)
+
+ write_file(out_file, content)
+
+
+def _get_pin_regexp(pin_list: Sequence[str]) -> str:
+ if not pin_list:
+ return ''
+ elif len(pin_list) == 1:
+ return f'^{pin_list[0]}$'
+
+ return f'^({"|".join(pin_list)})$'
diff --git a/src/bag/interface/base.py b/src/bag/interface/base.py
new file mode 100644
index 0000000..bdb9958
--- /dev/null
+++ b/src/bag/interface/base.py
@@ -0,0 +1,63 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines the base of all interface classes.
+"""
+
+from typing import Dict, Any
+
+from ..io.template import new_template_env
+
+
+class InterfaceBase:
+ """The base class of all interfaces.
+
+ Provides various helper methods common to all interfaces.
+ """
+ def __init__(self) -> None:
+ self._tmp_env = new_template_env('bag.interface', 'templates')
+
+ def render_file_template(self, temp_name: str, params: Dict[str, Any]) -> str:
+ """Returns the rendered content from the given template file."""
+ template = self._tmp_env.get_template(temp_name)
+ return template.render(**params)
diff --git a/src/bag/interface/database.py b/src/bag/interface/database.py
new file mode 100644
index 0000000..754dc7a
--- /dev/null
+++ b/src/bag/interface/database.py
@@ -0,0 +1,884 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines DbAccess, the base class for CAD database manipulation.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, List, Dict, Tuple, Optional, Sequence, Any, Mapping
+
+import abc
+import importlib
+import traceback
+from pathlib import Path
+
+from ..io.file import make_temp_dir, open_file, read_file
+from ..verification import make_checker
+from ..layout.routing.grid import RoutingGrid
+from ..concurrent.core import batch_async_task
+from .base import InterfaceBase
+
+if TYPE_CHECKING:
+ from .zmqwrapper import ZMQDealer
+ from ..verification import Checker
+
+
+def dict_to_item_list(table: Dict[str, Any]) -> List[List[str]]:
+ """Given a Python dictionary, convert to sorted item list.
+
+ Parameters
+ ----------
+ table :
+ a Python dictionary where the keys are strings.
+
+ Returns
+ -------
+ assoc_list :
+ the sorted item list representation of the given dictionary.
+ """
+ return [[key, table[key]] for key in sorted(table.keys())]
+
+
+def format_inst_map(inst_map: Dict[str, Any]) -> List[List[Any]]:
+ """Given instance map from DesignModule, format it for database changes.
+
+ Parameters
+ ----------
+ inst_map :
+ the instance map created by DesignModule.
+
+ Returns
+ -------
+ ans :
+ the database change instance map.
+ """
+ ans = []
+ for old_inst_name, rinst_list in inst_map.items():
+ new_rinst_list = [dict(name=rinst['name'],
+ lib_name=rinst['lib_name'],
+ cell_name=rinst['cell_name'],
+ params=dict_to_item_list(rinst['params']),
+ term_mapping=dict_to_item_list(rinst['term_mapping']),
+ ) for rinst in rinst_list]
+ ans.append([old_inst_name, new_rinst_list])
+ return ans
+
+
+class DbAccess(InterfaceBase, abc.ABC):
+ """A class that manipulates the CAD database.
+
+ Parameters
+ ----------
+ dealer : Optional[ZMQDealer]
+ an optional socket that can be used to communicate with the CAD database.
+ tmp_dir : str
+ temporary file directory for DbAccess.
+ db_config : Dict[str, Any]
+ the database configuration dictionary.
+ lib_defs_file : str
+ name of the file that contains generator library names.
+ """
+
+ def __init__(self, dealer: ZMQDealer, tmp_dir: str, db_config: Dict[str, Any],
+ lib_defs_file: str) -> None:
+ InterfaceBase.__init__(self)
+
+ self.handler: ZMQDealer = dealer
+ self.tmp_dir: str = make_temp_dir('dbTmp', parent_dir=tmp_dir)
+ self.db_config: Dict[str, Any] = db_config
+ # noinspection PyBroadException
+ try:
+ check_kwargs = self.db_config['checker'].copy()
+ check_kwargs['tmp_dir'] = self.tmp_dir
+ self.checker: Optional[Checker] = make_checker(**check_kwargs)
+ except Exception:
+ stack_trace = traceback.format_exc()
+ print('*WARNING* error creating Checker:\n%s' % stack_trace)
+ print('*WARNING* LVS/RCX will be disabled.')
+ self.checker: Optional[Checker] = None
+
+ # set default lib path
+ self._default_lib_path: str = self.get_default_lib_path(db_config)
+
+ # get yaml path mapping
+ self.lib_path_map: Dict[str, str] = {}
+ with open_file(lib_defs_file, 'r') as f:
+ for line in f:
+ lib_name = line.strip()
+ self.add_sch_library(lib_name)
+
+ self._close_all_cv: bool = db_config.get('close_all_cellviews', True)
+
+ @classmethod
+ def get_default_lib_path(cls, db_config: Dict[str, Any]) -> str:
+ default_lib_path = Path(db_config.get('default_lib_path', '.'))
+
+ if not default_lib_path.is_dir():
+ default_lib_path = Path.cwd()
+
+ return str(default_lib_path.resolve())
+
+ @property
+ def default_lib_path(self) -> str:
+ """str: The default directory to create new libraries in.
+ """
+ return self._default_lib_path
+
+ @property
+ def has_bag_server(self) -> bool:
+ """bool: True if the BAG server is up."""
+ return self.handler is not None
+
+ @abc.abstractmethod
+ def get_exit_object(self) -> Any:
+ """Returns an object to send to the server to shut it down.
+
+ Return None if this option is not supported.
+ """
+ return None
+
+ @abc.abstractmethod
+ def get_cells_in_library(self, lib_name: str) -> List[str]:
+ """Get a list of cells in the given library.
+
+ Returns an empty list if the given library does not exist.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+
+ Returns
+ -------
+ cell_list : List[str]
+ a list of cells in the library
+ """
+ return []
+
+ @abc.abstractmethod
+ def create_library(self, lib_name: str, lib_path: str = '') -> None:
+ """Create a new library if one does not exist yet.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ lib_path : str
+ directory to create the library in. If Empty, use default location.
+ """
+ pass
+
+ @abc.abstractmethod
+ def configure_testbench(self, tb_lib: str, tb_cell: str
+ ) -> Tuple[str, List[str], Dict[str, str], Dict[str, str]]:
+ """Update testbench state for the given testbench.
+
+ This method fill in process-specific information for the given testbench.
+
+ Parameters
+ ----------
+ tb_lib : str
+ testbench library name.
+ tb_cell : str
+ testbench cell name.
+
+ Returns
+ -------
+ cur_env : str
+ the current simulation environment.
+ envs : List[str]
+ a list of available simulation environments.
+ parameters : Dict[str, str]
+ a list of testbench parameter values, represented as string.
+ outputs : Dict[str, str]
+ a dictionary of output expressions
+ """
+ return "", [], {}, {}
+
+ @abc.abstractmethod
+ def get_testbench_info(self, tb_lib: str, tb_cell: str
+ ) -> Tuple[List[str], List[str], Dict[str, str], Dict[str, str]]:
+ """Returns information about an existing testbench.
+
+ Parameters
+ ----------
+ tb_lib : str
+ testbench library.
+ tb_cell : str
+ testbench cell.
+
+ Returns
+ -------
+ cur_envs : List[str]
+ the current simulation environments.
+ envs : List[str]
+ a list of available simulation environments.
+ parameters : Dict[str, str]
+ a list of testbench parameter values, represented as string.
+ outputs : Dict[str, str]
+ a list of testbench output expressions.
+ """
+ return [], [], {}, {}
+
+ @abc.abstractmethod
+ def update_testbench(self, lib: str, cell: str, parameters: Dict[str, str],
+ sim_envs: Sequence[str], config_rules: Sequence[List[str]],
+ env_parameters: Sequence[List[Tuple[str, str]]]) -> None:
+ """Update the given testbench configuration.
+
+ Parameters
+ ----------
+ lib : str
+ testbench library.
+ cell : str
+ testbench cell.
+ parameters : Dict[str, str]
+ testbench parameters.
+ sim_envs : Sequence[str]
+ list of enabled simulation environments.
+ config_rules : Sequence[List[str]]
+ config view mapping rules, list of (lib, cell, view) rules.
+ env_parameters : Sequence[List[Tuple[str, str]]]
+ list of param/value list for each simulation environment.
+ """
+ pass
+
+ @abc.abstractmethod
+ def instantiate_layout_pcell(self, lib_name: str, cell_name: str, view_name: str,
+ inst_lib: str, inst_cell: str, params: Dict[str, Any],
+ pin_mapping: Dict[str, str]) -> None:
+ """Create a layout cell with a single pcell instance.
+
+ Parameters
+ ----------
+ lib_name : str
+ layout library name.
+ cell_name : str
+ layout cell name.
+ view_name : str
+ layout view name, default is "layout".
+ inst_lib : str
+ pcell library name.
+ inst_cell : str
+ pcell cell name.
+ params : Dict[str, Any]
+ the parameter dictionary.
+ pin_mapping: Dict[str, str]
+ the pin mapping dictionary.
+ """
+ pass
+
+ @abc.abstractmethod
+ def create_schematics(self, lib_name: str, sch_view: str, sym_view: str,
+ content_list: Sequence[Any]) -> None:
+ """Create the given schematics in CAD database.
+
+ Precondition: the library already exists, all cellviews are writable (i.e. they have been
+ closed already).
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the new library to put the concrete schematics.
+ sch_view : str
+ schematic view name.
+ sym_view : str
+ symbol view name.
+ content_list : Sequence[Any]
+ list of schematics to create.
+ """
+ pass
+
+ @abc.abstractmethod
+ def create_layouts(self, lib_name: str, view: str, content_list: Sequence[Any]) -> None:
+ """Create the given layouts in CAD database.
+
+ Precondition: the library already exists, all cellviews are writable (i.e. they have been
+ closed already).
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the new library to put the concrete schematics.
+ view : str
+ layout view name.
+ content_list : Sequence[Any]
+ list of layouts to create.
+ """
+ pass
+
+ @abc.abstractmethod
+ def close_all_cellviews(self) -> None:
+ """Close all currently opened cellviews in the database."""
+ pass
+
+ @abc.abstractmethod
+ def release_write_locks(self, lib_name: str, cell_view_list: Sequence[Tuple[str, str]]) -> None:
+ """Release write locks from all the given cells.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_view_list : Sequence[Tuple[str, str]]
+ list of cell/view name tuples.
+ """
+ pass
+
+ @abc.abstractmethod
+ def refresh_cellviews(self, lib_name: str, cell_view_list: Sequence[Tuple[str, str]]) -> None:
+ """Refresh the given cellviews in the database.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_view_list : Sequence[Tuple[str, str]]
+ list of cell/view name tuples.
+ """
+ pass
+
+ @abc.abstractmethod
+ def perform_checks_on_cell(self, lib_name: str, cell_name: str, view_name: str) -> None:
+ """Perform checks on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_name : str
+ the cell name.
+ view_name : str
+ the view name.
+ """
+ pass
+
+ @abc.abstractmethod
+ def create_schematic_from_netlist(self, netlist: str, lib_name: str, cell_name: str,
+ sch_view: str = '', **kwargs: Any) -> None:
+ """Create a schematic from a netlist.
+
+ This is mainly used to create extracted schematic from an extracted netlist.
+
+ Parameters
+ ----------
+ netlist : str
+ the netlist file name.
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ sch_view : str
+ schematic view name. The default value is implemendation dependent.
+ **kwargs : Any
+ additional implementation-dependent arguments.
+ """
+ pass
+
+ @abc.abstractmethod
+ def create_verilog_view(self, verilog_file: str, lib_name: str, cell_name: str, **kwargs: Any
+ ) -> None:
+ """Create a verilog view for mix-signal simulation.
+
+ Parameters
+ ----------
+ verilog_file : str
+ the verilog file name.
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ **kwargs : Any
+ additional implementation-dependent arguments.
+ """
+ pass
+
+ @abc.abstractmethod
+ def import_sch_cellview(self, lib_name: str, cell_name: str, view_name: str) -> None:
+ """Recursively import the given schematic and symbol cellview.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ view_name : str
+ view name.
+ """
+ pass
+
+ @abc.abstractmethod
+ def import_design_library(self, lib_name: str, view_name: str) -> None:
+ """Import all design templates in the given library from CAD database.
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the library.
+ view_name : str
+ the view name to import from the library.
+ """
+ pass
+
+ @abc.abstractmethod
+ def import_gds_file(self, gds_fname: str, lib_name: str, layer_map: str, obj_map: str,
+ grid: RoutingGrid) -> None:
+ pass
+
+ def send(self, obj: Any) -> Any:
+ """Send the given Python object to the server, and return result."""
+ if self.handler is None:
+ raise Exception('BAG Server is not set up.')
+
+ self.handler.send_obj(obj)
+ reply = self.handler.recv_obj()
+ return reply
+
+ def close(self) -> None:
+ """Terminate the database server gracefully.
+ """
+ if self.handler is not None:
+ exit_obj = self.get_exit_object()
+ if exit_obj is not None:
+ self.handler.send(exit_obj)
+ self.handler.close()
+ self.handler = None
+
+ def get_python_template(self, lib_name: str, cell_name: str, primitive_table: Dict[str, str]
+ ) -> str:
+ """Returns the default Python Module template for the given schematic.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library name.
+ cell_name : str
+ the cell name.
+ primitive_table : Dict[str, str]
+ a dictionary from primitive cell name to module template file name.
+
+ Returns
+ -------
+ template : str
+ the default Python Module template.
+ """
+ param_dict = dict(lib_name=lib_name, cell_name=cell_name)
+ if lib_name == 'BAG_prim':
+ if cell_name in primitive_table:
+ # load template from user defined file
+ template = self._tmp_env.from_string(read_file(primitive_table[cell_name]))
+ return template.render(**param_dict)
+ else:
+ if cell_name.startswith('nmos4_') or cell_name.startswith('pmos4_'):
+ # transistor template
+ module_name = 'MosModuleBase'
+ elif cell_name.startswith('ndio_') or cell_name.startswith('pdio_'):
+ # diode template
+ module_name = 'DiodeModuleBase'
+ elif cell_name.startswith('res_metal_'):
+ module_name = 'ResMetalModule'
+ elif cell_name.startswith('res_'):
+ # physical resistor template
+ module_name = 'ResPhysicalModuleBase'
+ else:
+ raise Exception('Unknown primitive cell: %s' % cell_name)
+
+ param_dict['module_name'] = module_name
+ return self.render_file_template('PrimModule.pyi', param_dict)
+ else:
+ # use default empty template.
+ return self.render_file_template('Module.pyi', param_dict)
+
+ def instantiate_schematic(self, lib_name: str, content_list: Sequence[Any], lib_path: str = '',
+ sch_view: str = 'schematic', sym_view: str = 'symbol') -> None:
+ """Create the given schematics in CAD database.
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the new library to put the concrete schematics.
+ content_list : Sequence[Any]
+ list of schematics to create.
+ lib_path : str
+ the path to create the library in. If empty, use default location.
+ sch_view : str
+ schematic view name.
+ sym_view : str
+ symbol view name.
+ """
+ cell_view_list = []
+ if self._close_all_cv:
+ self.close_all_cellviews()
+ else:
+ for cell_name, _ in content_list:
+ cell_view_list.append((cell_name, sch_view))
+ cell_view_list.append((cell_name, sym_view))
+ self.release_write_locks(lib_name, cell_view_list)
+
+ self.create_library(lib_name, lib_path=lib_path)
+ self.create_schematics(lib_name, sch_view, sym_view, content_list)
+
+ if cell_view_list:
+ self.refresh_cellviews(lib_name, cell_view_list)
+
+ def instantiate_layout(self, lib_name: str, content_list: Sequence[Any], lib_path: str = '',
+ view: str = 'layout') -> None:
+ """Create a batch of layouts.
+
+ Parameters
+ ----------
+ lib_name : str
+ layout library name.
+ content_list : Sequence[Any]
+ list of layouts to create
+ lib_path : str
+ the path to create the library in. If empty, use default location.
+ view : str
+ layout view name.
+ """
+ cell_view_list = []
+ if self._close_all_cv:
+ self.close_all_cellviews()
+ else:
+ for cell_name, _ in content_list:
+ cell_view_list.append((cell_name, view))
+ self.release_write_locks(lib_name, cell_view_list)
+
+ self.create_library(lib_name, lib_path=lib_path)
+ self.create_layouts(lib_name, view, content_list)
+
+ if cell_view_list:
+ self.refresh_cellviews(lib_name, cell_view_list)
+
+ def run_drc(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """Run DRC on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs :
+ optional keyword arguments. See DbAccess class for details.
+
+ Returns
+ -------
+ value : bool
+ True if DRC succeeds.
+ log_fname : str
+ name of the DRC log file.
+ """
+ coro = self.async_run_drc(lib_name, cell_name, **kwargs)
+ results = batch_async_task([coro])
+ if results is None:
+ return False, ''
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ def run_lvs(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """Run LVS on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs :
+ optional keyword arguments. See DbAccess class for details.
+
+ Returns
+ -------
+ value : bool
+ True if LVS succeeds
+ log_fname : str
+ name of the LVS log file.
+ """
+ coro = self.async_run_lvs(lib_name, cell_name, **kwargs)
+ results = batch_async_task([coro])
+ if results is None:
+ return False, ''
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ def run_rcx(self, lib_name: str, cell_name: str,
+ params: Optional[Mapping[str, Any]] = None) -> Tuple[str, str]:
+ """run RC extraction on the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ params : Optional[Dict[str, Any]]
+ optional RCX parameter values.
+
+ Returns
+ -------
+ netlist : str
+ The RCX netlist file name. empty if RCX failed.
+ log_fname : str
+ RCX log file name.
+ """
+ coro = self.async_run_rcx(lib_name, cell_name, params=params)
+ results = batch_async_task([coro])
+ if results is None:
+ return '', ''
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ def export_layout(self, lib_name: str, cell_name: str, out_file: str, **kwargs: Any) -> str:
+ """Export layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ output file name.
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ log_fname : str
+ log file name. Empty if task cancelled.
+ """
+ coro = self.async_export_layout(lib_name, cell_name, out_file, **kwargs)
+ results = batch_async_task([coro])
+ if results is None:
+ return ''
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ def export_schematic(self, lib_name: str, cell_name: str, out_file: str, **kwargs: Any) -> str:
+ """Export layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ output file name.
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ log_fname : str
+ log file name. Empty if task cancelled.
+ """
+ coro = self.async_export_schematic(lib_name, cell_name, out_file, **kwargs)
+ results = batch_async_task([coro])
+ if results is None:
+ return ''
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ async def async_run_drc(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """A coroutine for running DRC.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ value : bool
+ True if DRC succeeds
+ log_fname : str
+ name of the DRC log file.
+ """
+ if self.checker is None:
+ raise Exception('DRC/LVS/RCX is disabled.')
+ return await self.checker.async_run_drc(lib_name, cell_name, **kwargs)
+
+ async def async_run_lvs(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[bool, str]:
+ """A coroutine for running LVS.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ value : bool
+ True if LVS succeeds
+ log_fname : str
+ name of the LVS log file.
+ """
+ if self.checker is None:
+ raise Exception('DRC/LVS/RCX is disabled.')
+ return await self.checker.async_run_lvs(lib_name, cell_name, **kwargs)
+
+ async def async_run_rcx(self, lib_name: str, cell_name: str, **kwargs: Any) -> Tuple[str, str]:
+ """A coroutine for running RCX.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ netlist : str
+ The RCX netlist file name. empty if RCX failed.
+ log_fname : str
+ RCX log file name.
+ """
+ if self.checker is None:
+ raise Exception('DRC/LVS/RCX is disabled.')
+ return await self.checker.async_run_rcx(lib_name, cell_name, **kwargs)
+
+ async def async_export_layout(self, lib_name: str, cell_name: str,
+ out_file: str, **kwargs: Any) -> str:
+ """Export layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ output file name.
+ **kwargs : Any
+ optional keyword arguments. See Checker class for details.
+
+ Returns
+ -------
+ log_fname : str
+ log file name. Empty if task cancelled.
+ """
+ if self.checker is None:
+ raise Exception('layout export is disabled.')
+
+ return await self.checker.async_export_layout(lib_name, cell_name, out_file, **kwargs)
+
+ async def async_export_schematic(self, lib_name: str, cell_name: str,
+ out_file: str, **kwargs: Any) -> str:
+ if self.checker is None:
+ raise Exception('schematic export is disabled.')
+
+ return await self.checker.async_export_schematic(lib_name, cell_name, out_file, **kwargs)
+
+ def add_sch_library(self, lib_name: str) -> Path:
+ try:
+ lib_module = importlib.import_module(lib_name)
+ except ModuleNotFoundError:
+ raise ModuleNotFoundError(f'Cannot find python package {lib_name}. '
+ 'You can only add schematic library if the corresponding '
+ 'package is on your PYTHONPATH')
+ if not hasattr(lib_module, '__file__'):
+ raise ImportError(f'{lib_name} is not a normal python package '
+ '(no __file__ attribute). Did you create a proper python '
+ 'schematic library?')
+ if lib_module.__file__ is None:
+ raise ImportError(f'{lib_name} has None __file__ attribute. Did you create a proper '
+ 'python schematic library?')
+
+ lib_path: Path = Path(lib_module.__file__).parent
+ sch_lib_path = lib_path / 'schematic'
+ if not sch_lib_path.is_dir():
+ sch_lib_path.mkdir()
+ init_file = sch_lib_path / '__init__.py'
+ with open_file(init_file, 'w'):
+ pass
+
+ netlist_info_path = sch_lib_path / 'netlist_info'
+ if not netlist_info_path.is_dir():
+ netlist_info_path.mkdir()
+
+ sch_lib_path = sch_lib_path.resolve()
+ self.lib_path_map[lib_name] = str(sch_lib_path)
+ return sch_lib_path
+
+ def exclude_model(self, lib_name: str, cell_name: str) -> bool:
+ """True to exclude the given schematic generator when generating behavioral models."""
+ sch_config = self.db_config['schematic']
+ lib_list = sch_config.get('model_exclude_libraries', None)
+ lib_cell_dict = sch_config.get('model_exclude_cells', None)
+
+ if lib_list and lib_name in lib_list:
+ return True
+ if lib_cell_dict:
+ cell_list = lib_cell_dict.get(lib_name, None)
+ return cell_list and cell_name in cell_list
+ return False
diff --git a/src/bag/interface/lef.py b/src/bag/interface/lef.py
new file mode 100644
index 0000000..807d05f
--- /dev/null
+++ b/src/bag/interface/lef.py
@@ -0,0 +1,92 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles abstract generation
+"""
+
+from __future__ import annotations
+
+from typing import Dict, Any, Sequence
+
+import abc
+
+from pathlib import Path
+
+from .base import InterfaceBase
+
+
+class LEFInterface(InterfaceBase, abc.ABC):
+ """An abstract class that defines interface for generating LEF files.
+
+ Parameters
+ ----------
+ config : Dict[str, Any]
+ the configuration dictionary.
+ """
+
+ def __init__(self, config: Dict[str, Any]) -> None:
+ InterfaceBase.__init__(self)
+
+ self._config = config
+
+ @property
+ def config(self) -> Dict[str, Any]:
+ return self._config
+
+ @abc.abstractmethod
+ def generate_lef(self, impl_lib: str, impl_cell: str, verilog_path: Path, lef_path: Path,
+ run_path: Path, pwr_pins: Sequence[str], gnd_pins: Sequence[str],
+ clk_pins: Sequence[str], analog_pins: Sequence[str],
+ output_pins: Sequence[str], detailed_layers: Sequence[str],
+ cover_layers: Sequence[str], cell_type: str, **kwargs: Any) -> bool:
+ """Generate the LEF file.
+
+ Parameters
+ ----------
+ impl_lib : str
+ the implementation library name.
+ impl_cell : str
+ the implementation cell name.
+ verilog_path: Path
+ the verilog shell file.
+ lef_path : Path
+ the output file path.
+ run_path: Path
+ the run directory.
+ pwr_pins : Sequence[str]
+ list of power pin names.
+ gnd_pins : Sequence[str]
+ list of ground pin names.
+ clk_pins : Sequence[str]
+ list of clock pin names.
+ analog_pins : Sequence[str]
+ list of analog pin names.
+ output_pins : Sequence[str]
+ list of output pin names.
+ detailed_layers : Sequence[str]
+ list of detailed layer names.
+ cover_layers : Sequence[str]
+ list of cover layer names.
+ cell_type : str
+ the cell type.
+ **kwargs: Any
+ Tool-specific configuration parameters.
+
+ Returns
+ -------
+ success : bool
+ True if LEF generation succeeded.
+ """
+ pass
diff --git a/src/bag/interface/oa.py b/src/bag/interface/oa.py
new file mode 100644
index 0000000..0bcb25b
--- /dev/null
+++ b/src/bag/interface/oa.py
@@ -0,0 +1,291 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements all CAD database manipulations using OpenAccess plugins.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Sequence, List, Dict, Optional, Any, Tuple
+
+import os
+import shutil
+from pathlib import Path
+
+from pybag.core import PyOADatabase, make_tr_colors
+
+from ..io.file import write_file
+from ..layout.routing.grid import RoutingGrid
+from .database import DbAccess
+from .skill import handle_reply
+
+if TYPE_CHECKING:
+ from .zmqwrapper import ZMQDealer
+
+
+class OAInterface(DbAccess):
+ """OpenAccess interface between bag and Virtuoso.
+ """
+
+ def __init__(self, dealer: ZMQDealer, tmp_dir: str, db_config: Dict[str, Any],
+ lib_defs_file: str) -> None:
+
+ # Create PyOADatabase object before calling super constructor,
+ # So that schematic library yaml path is added correctly.
+ cds_lib_path: str = db_config.get('lib_def_path', '')
+ if not cds_lib_path:
+ cds_lib_path = str((Path(os.environ.get('CDSLIBPATH', '')) / 'cds.lib').resolve())
+
+ self._oa_db = PyOADatabase(cds_lib_path)
+ for lib_name in db_config['schematic']['exclude_libraries']:
+ self._oa_db.add_primitive_lib(lib_name)
+ # BAG_prim is always excluded
+ self._oa_db.add_primitive_lib('BAG_prim')
+
+ DbAccess.__init__(self, dealer, tmp_dir, db_config, lib_defs_file)
+
+ def add_sch_library(self, lib_name: str) -> None:
+ """Override; register yaml path in PyOADatabase too."""
+ lib_path = DbAccess.add_sch_library(self, lib_name)
+ self._oa_db.add_yaml_path(lib_name, str(lib_path / 'netlist_info'))
+
+ def _eval_skill(self, expr: str, input_files: Optional[Dict[str, Any]] = None,
+ out_file: Optional[str] = None) -> str:
+ """Send a request to evaluate the given skill expression.
+
+ Because Virtuoso has a limit on the input/output data (< 4096 bytes),
+ if your input is large, you need to write it to a file and have
+ Virtuoso open the file to parse it. Similarly, if you expect a
+ large output, you need to make Virtuoso write the result to the
+ file, then read it yourself. The parameters input_files and
+ out_file help you achieve this functionality.
+
+ For example, if you need to evaluate "skill_fun(arg fname)", where
+ arg is a file containing the list [1 2 3], and fname is the output
+ file name, you will call this function with:
+
+ expr = "skill_fun({arg} {fname})"
+ input_files = { "arg": [1 2 3] }
+ out_file = "fname"
+
+ the bag server will then a temporary file for arg and fname, write
+ the list [1 2 3] into the file for arg, call Virtuoso, then read
+ the output file fname and return the result.
+
+ Parameters
+ ----------
+ expr :
+ the skill expression to evaluate.
+ input_files :
+ A dictionary of input files content.
+ out_file :
+ the output file name argument in expr.
+
+ Returns
+ -------
+ result :
+ a string representation of the result.
+
+ Raises
+ ------
+ VirtuosoException :
+ if virtuoso encounters errors while evaluating the expression.
+ """
+ request = dict(
+ type='skill',
+ expr=expr,
+ input_files=input_files,
+ out_file=out_file,
+ )
+
+ reply = self.send(request)
+ return handle_reply(reply)
+
+ def close(self) -> None:
+ DbAccess.close(self)
+ if self._oa_db is not None:
+ self._oa_db.close()
+ self._oa_db = None
+
+ def get_exit_object(self) -> Any:
+ return {'type': 'exit'}
+
+ def get_cells_in_library(self, lib_name: str) -> List[str]:
+ return self._oa_db.get_cells_in_lib(lib_name)
+
+ def create_library(self, lib_name: str, lib_path: str = '') -> None:
+ lib_path = lib_path or self.default_lib_path
+ tech_lib = self.db_config['schematic']['tech_lib']
+ self._oa_db.create_lib(lib_name, lib_path, tech_lib)
+
+ def configure_testbench(self, tb_lib: str, tb_cell: str
+ ) -> Tuple[str, List[str], Dict[str, str], Dict[str, str]]:
+ raise NotImplementedError('Not implemented yet.')
+
+ def get_testbench_info(self, tb_lib: str, tb_cell: str
+ ) -> Tuple[List[str], List[str], Dict[str, str], Dict[str, str]]:
+ raise NotImplementedError('Not implemented yet.')
+
+ def update_testbench(self, lib: str, cell: str, parameters: Dict[str, str],
+ sim_envs: Sequence[str], config_rules: Sequence[List[str]],
+ env_parameters: Sequence[List[Tuple[str, str]]]) -> None:
+ raise NotImplementedError('Not implemented yet.')
+
+ def instantiate_layout_pcell(self, lib_name: str, cell_name: str, view_name: str,
+ inst_lib: str, inst_cell: str, params: Dict[str, Any],
+ pin_mapping: Dict[str, str]) -> None:
+ raise NotImplementedError('Not implemented yet.')
+
+ def create_schematics(self, lib_name: str, sch_view: str, sym_view: str,
+ content_list: Sequence[Any]) -> None:
+ self._oa_db.implement_sch_list(lib_name, sch_view, sym_view, content_list)
+
+ def create_layouts(self, lib_name: str, view: str, content_list: Sequence[Any]) -> None:
+ self._oa_db.implement_lay_list(lib_name, view, content_list)
+
+ def close_all_cellviews(self) -> None:
+ if self.has_bag_server:
+ self._eval_skill('close_all_cellviews()')
+
+ def release_write_locks(self, lib_name: str, cell_view_list: Sequence[Tuple[str, str]]) -> None:
+ if self.has_bag_server:
+ cmd = 'release_write_locks( "%s" {cell_view_list} )' % lib_name
+ in_files = {'cell_view_list': cell_view_list}
+ self._eval_skill(cmd, input_files=in_files)
+
+ def refresh_cellviews(self, lib_name: str, cell_view_list: Sequence[Tuple[str, str]]) -> None:
+ if self.has_bag_server:
+ cmd = 'refresh_cellviews( "%s" {cell_view_list} )' % lib_name
+ in_files = {'cell_view_list': cell_view_list}
+ self._eval_skill(cmd, input_files=in_files)
+
+ def perform_checks_on_cell(self, lib_name: str, cell_name: str, view_name: str) -> None:
+ self._eval_skill(
+ 'check_and_save_cell( "{}" "{}" "{}" )'.format(lib_name, cell_name, view_name))
+
+ def create_schematic_from_netlist(self, netlist: str, lib_name: str, cell_name: str,
+ sch_view: str = '', **kwargs: Any) -> None:
+ # get netlists to copy
+ netlist_dir: Path = Path(netlist).parent
+ netlist_files = self.checker.get_rcx_netlists(lib_name, cell_name)
+ if not netlist_files:
+ # some error checking. Shouldn't be needed but just in case
+ raise ValueError('RCX did not generate any netlists')
+
+ # copy netlists to a "netlist" subfolder in the CAD database
+ cell_dir: Path = Path(self.get_cell_directory(lib_name, cell_name))
+ targ_dir = cell_dir / 'netlist'
+ targ_dir.mkdir(parents=True, exist_ok=True)
+ for fname in netlist_files:
+ # TODO: pycharm type-hint bug
+ # noinspection PyTypeChecker
+ shutil.copy(netlist_dir / fname, targ_dir)
+
+ # create symbolic link as aliases
+ symlink = targ_dir / 'netlist'
+ if symlink.exists() or symlink.is_symlink():
+ symlink.unlink()
+ symlink.symlink_to(netlist_files[0])
+
+ def get_cell_directory(self, lib_name: str, cell_name: str) -> str:
+ """Returns the directory name of the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+
+ Returns
+ -------
+ cell_dir : str
+ path to the cell directory.
+ """
+ return str(Path(self._oa_db.get_lib_path(lib_name)) / cell_name)
+
+ def create_verilog_view(self, verilog_file: str, lib_name: str, cell_name: str, **kwargs: Any
+ ) -> None:
+ # delete old verilog view
+ cmd = 'delete_cellview( "%s" "%s" "verilog" )' % (lib_name, cell_name)
+ self._eval_skill(cmd)
+ cmd = 'schInstallHDL("%s" "%s" "verilog" "%s" t)' % (lib_name, cell_name, verilog_file)
+ self._eval_skill(cmd)
+
+ def import_sch_cellview(self, lib_name: str, cell_name: str, view_name: str) -> None:
+ if lib_name not in self.lib_path_map:
+ self.add_sch_library(lib_name)
+
+ # read schematic information
+ cell_list = self._oa_db.read_sch_recursive(lib_name, cell_name, view_name)
+
+ # create python templates
+ self._create_sch_templates(cell_list)
+
+ def import_design_library(self, lib_name: str, view_name: str) -> None:
+ if lib_name not in self.lib_path_map:
+ self.add_sch_library(lib_name)
+
+ if lib_name == 'BAG_prim':
+ # reading BAG primitives library, don't need to parse YAML files,
+ # just get the cell list
+ cell_list = [(lib_name, cell) for cell in self.get_cells_in_library(lib_name)]
+ else:
+ # read schematic information
+ cell_list = self._oa_db.read_library(lib_name, view_name)
+
+ # create python templates
+ self._create_sch_templates(cell_list)
+
+ def import_gds_file(self, gds_fname: str, lib_name: str, layer_map: str, obj_map: str,
+ grid: RoutingGrid) -> None:
+ tr_colors = make_tr_colors(grid.tech_info)
+ self._oa_db.import_gds(gds_fname, lib_name, layer_map, obj_map, grid, tr_colors)
+
+ def _create_sch_templates(self, cell_list: List[Tuple[str, str]]) -> None:
+ for lib, cell in cell_list:
+ python_file = Path(self.lib_path_map[lib]) / (cell + '.py')
+ if not python_file.exists():
+ content = self.get_python_template(lib, cell,
+ self.db_config.get('prim_table', {}))
+ write_file(python_file, content, mkdir=False)
diff --git a/src/bag/interface/ocean.py b/src/bag/interface/ocean.py
new file mode 100644
index 0000000..ddf4512
--- /dev/null
+++ b/src/bag/interface/ocean.py
@@ -0,0 +1,207 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements bag's interaction with an ocean simulator.
+"""
+
+from typing import TYPE_CHECKING, Dict, Any, Optional
+
+import os
+
+import bag.io
+from .simulator import SimProcessManager
+
+if TYPE_CHECKING:
+ from .simulator import ProcInfo
+
+
+class OceanInterface(SimProcessManager):
+ """This class handles interaction with Ocean simulators.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary file directory for SimAccess.
+ sim_config : Dict[str, Any]
+ the simulation configuration dictionary.
+ """
+
+ def __init__(self, tmp_dir, sim_config):
+ # type: (str, Dict[str, Any]) -> None
+ """Initialize a new SkillInterface object.
+ """
+ SimProcessManager.__init__(self, tmp_dir, sim_config)
+
+ def format_parameter_value(self, param_config, precision):
+ # type: (Dict[str, Any], int) -> str
+ """Format the given parameter value as a string.
+
+ To support both single value parameter and parameter sweeps, each parameter value is
+ represented as a string instead of simple floats. This method will cast a parameter
+ configuration (which can either be a single value or a sweep) to a
+ simulator-specific string.
+
+ Parameters
+ ----------
+ param_config: Dict[str, Any]
+ a dictionary that describes this parameter value.
+
+ 4 formats are supported. This is best explained by example.
+
+ single value:
+ dict(type='single', value=1.0)
+
+ sweep a given list of values:
+ dict(type='list', values=[1.0, 2.0, 3.0])
+
+ linear sweep with inclusive start, inclusive stop, and step size:
+ dict(type='linstep', start=1.0, stop=3.0, step=1.0)
+
+ logarithmic sweep with given number of points per decade:
+ dict(type='decade', start=1.0, stop=10.0, num=10)
+
+ precision : int
+ the parameter value precision.
+
+ Returns
+ -------
+ param_str : str
+ a string representation of param_config
+ """
+
+ fmt = '%.{}e'.format(precision)
+ swp_type = param_config['type']
+ if swp_type == 'single':
+ return fmt % param_config['value']
+ elif swp_type == 'list':
+ return ' '.join((fmt % val for val in param_config['values']))
+ elif swp_type == 'linstep':
+ syntax = '{From/To}Linear:%s:%s:%s{From/To}' % (fmt, fmt, fmt)
+ return syntax % (param_config['start'], param_config['step'], param_config['stop'])
+ elif swp_type == 'decade':
+ syntax = '{From/To}Decade:%s:%s:%s{From/To}' % (fmt, '%d', fmt)
+ return syntax % (param_config['start'], param_config['num'], param_config['stop'])
+ else:
+ raise Exception('Unsupported param_config: %s' % param_config)
+
+ def _get_ocean_info(self, save_dir, script_fname, log_fname):
+ """Private helper function that launches ocean process."""
+ # get the simulation command.
+ sim_kwargs = self.sim_config['kwargs']
+ ocn_cmd = sim_kwargs['command']
+ env = sim_kwargs.get('env', None)
+ cwd = sim_kwargs.get('cwd', None)
+ sim_cmd = [ocn_cmd, '-nograph', '-replay', script_fname, '-log', log_fname]
+
+ if cwd is None:
+ # set working directory to BAG_WORK_DIR if None
+ cwd = os.environ['BAG_WORK_DIR']
+
+ # create empty log file to make sure it exists.
+ return sim_cmd, log_fname, env, cwd, save_dir
+
+ def setup_sim_process(self, lib, cell, outputs, precision, sim_tag):
+ # type: (str, str, Dict[str, str], int, Optional[str]) -> ProcInfo
+
+ sim_tag = sim_tag or 'BagSim'
+ job_options = self.sim_config['job_options']
+ init_file = self.sim_config['init_file']
+ view = self.sim_config['view']
+ state = self.sim_config['state']
+
+ # format job options as skill list of string
+ job_opt_str = "'( "
+ for key, val in job_options.items():
+ job_opt_str += '"%s" "%s" ' % (key, val)
+ job_opt_str += " )"
+
+ # create temporary save directory and log/script names
+ save_dir = bag.io.make_temp_dir(prefix='%s_data' % sim_tag, parent_dir=self.tmp_dir)
+ log_fname = os.path.join(save_dir, 'ocn_output.log')
+ script_fname = os.path.join(save_dir, 'run.ocn')
+
+ # setup ocean simulation script
+ script = self.render_file_template('run_simulation.ocn',
+ dict(
+ lib=lib,
+ cell=cell,
+ view=view,
+ state=state,
+ init_file=init_file,
+ save_dir=save_dir,
+ precision=precision,
+ sim_tag=sim_tag,
+ outputs=outputs,
+ job_opt_str=job_opt_str,
+ ))
+ bag.io.write_file(script_fname, script)
+
+ return self._get_ocean_info(save_dir, script_fname, log_fname)
+
+ def setup_load_process(self, lib, cell, hist_name, outputs, precision):
+ # type: (str, str, str, Dict[str, str], int) -> ProcInfo
+
+ init_file = self.sim_config['init_file']
+ view = self.sim_config['view']
+
+ # create temporary save directory and log/script names
+ save_dir = bag.io.make_temp_dir(prefix='%s_data' % hist_name, parent_dir=self.tmp_dir)
+ log_fname = os.path.join(save_dir, 'ocn_output.log')
+ script_fname = os.path.join(save_dir, 'run.ocn')
+
+ # setup ocean load script
+ script = self.render_file_template('load_results.ocn',
+ dict(
+ lib=lib,
+ cell=cell,
+ view=view,
+ init_file=init_file,
+ save_dir=save_dir,
+ precision=precision,
+ hist_name=hist_name,
+ outputs=outputs,
+ ))
+ bag.io.write_file(script_fname, script)
+
+ # launch ocean
+ return self._get_ocean_info(save_dir, script_fname, log_fname)
diff --git a/src/bag/interface/server.py b/src/bag/interface/server.py
new file mode 100644
index 0000000..f95b352
--- /dev/null
+++ b/src/bag/interface/server.py
@@ -0,0 +1,294 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This class defines SkillOceanServer, a server that handles skill/ocean requests.
+
+The SkillOceanServer listens for skill/ocean requests from bag. Skill commands will
+be forwarded to Virtuoso for execution, and Ocean simulation requests will be handled
+by starting an Ocean subprocess. It also provides utility for bag to query simulation
+progress and allows parallel simulation.
+
+Client-side communication:
+
+the client will always send a request object, which is a python dictionary.
+This script processes the request and sends the appropriate commands to
+Virtuoso.
+
+Virtuoso side communication:
+
+To ensure this process receive all the data from Virtuoso properly, Virtuoso
+will print a single line of integer indicating the number of bytes to read.
+Then, virtuoso will print out exactly that many bytes of data, followed by
+a newline (to flush the standard input). This script handles that protcol
+and will strip the newline before sending result back to client.
+"""
+
+import traceback
+
+import bag.io
+
+
+def _object_to_skill_file_helper(py_obj, file_obj):
+ """Recursive helper function for object_to_skill_file
+
+ Parameters
+ ----------
+ py_obj : any
+ the object to convert.
+ file_obj : file
+ the file object to write to. Must be created with bag.io
+ package so that encodings are handled correctly.
+ """
+ # fix potential raw bytes
+ py_obj = bag.io.fix_string(py_obj)
+ if isinstance(py_obj, str):
+ # string
+ file_obj.write(py_obj)
+ elif isinstance(py_obj, float):
+ # prepend type flag
+ file_obj.write('#float {:f}'.format(py_obj))
+ elif isinstance(py_obj, bool):
+ bool_val = 1 if py_obj else 0
+ file_obj.write('#bool {:d}'.format(bool_val))
+ elif isinstance(py_obj, int):
+ # prepend type flag
+ file_obj.write('#int {:d}'.format(py_obj))
+ elif isinstance(py_obj, list) or isinstance(py_obj, tuple):
+ # a list of other objects.
+ file_obj.write('#list\n')
+ for val in py_obj:
+ _object_to_skill_file_helper(val, file_obj)
+ file_obj.write('\n')
+ file_obj.write('#end')
+ elif isinstance(py_obj, dict):
+ # disembodied property lists
+ file_obj.write('#prop_list\n')
+ for key, val in py_obj.items():
+ file_obj.write('{}\n'.format(key))
+ _object_to_skill_file_helper(val, file_obj)
+ file_obj.write('\n')
+ file_obj.write('#end')
+ else:
+ raise Exception('Unsupported python data type: %s' % type(py_obj))
+
+
+def object_to_skill_file(py_obj, file_obj):
+ """Write the given python object to a file readable by Skill.
+
+ Write a Python object to file that can be parsed into equivalent
+ skill object by Virtuoso. Currently only strings, lists, and dictionaries
+ are supported.
+
+ Parameters
+ ----------
+ py_obj : any
+ the object to convert.
+ file_obj : file
+ the file object to write to. Must be created with bag.io
+ package so that encodings are handled correctly.
+ """
+ _object_to_skill_file_helper(py_obj, file_obj)
+ file_obj.write('\n')
+
+
+bag_proc_prompt = 'BAG_PROMPT>>> '
+
+
+class SkillServer(object):
+ """A server that handles skill commands.
+
+ This server is started and ran by virtuoso. It listens for commands from bag
+ from a ZMQ socket, then pass the command to virtuoso. It then gather the result
+ and send it back to bag.
+
+ Parameters
+ ----------
+ router : :class:`bag.interface.ZMQRouter`
+ the :class:`~bag.interface.ZMQRouter` object used for socket communication.
+ virt_in : file
+ the virtuoso input file. Must be created with bag.io
+ package so that encodings are handled correctly.
+ virt_out : file
+ the virtuoso output file. Must be created with bag.io
+ package so that encodings are handled correctly.
+ tmpdir : str or None
+ if given, will save all temporary files to this folder.
+ """
+
+ def __init__(self, router, virt_in, virt_out, tmpdir=None):
+ """Create a new SkillOceanServer instance.
+ """
+ self.handler = router
+ self.virt_in = virt_in
+ self.virt_out = virt_out
+
+ # create a directory for all temporary files
+ self.dtmp = bag.io.make_temp_dir('skillTmp', parent_dir=tmpdir)
+
+ def run(self):
+ """Starts this server.
+ """
+ while not self.handler.is_closed():
+ # check if socket received message
+ if self.handler.poll_for_read(5):
+ req = self.handler.recv_obj()
+ if isinstance(req, dict) and 'type' in req:
+ if req['type'] == 'exit':
+ self.close()
+ elif req['type'] == 'skill':
+ expr, out_file = self.process_skill_request(req)
+ if expr is not None:
+ # send expression to virtuoso
+ self.send_skill(expr)
+ msg = self.recv_skill()
+ self.process_skill_result(msg, out_file)
+ else:
+ msg = '*Error* bag server error: bag request:\n%s' % str(req)
+ self.handler.send_obj(dict(type='error', data=msg))
+ else:
+ msg = '*Error* bag server error: bag request:\n%s' % str(req)
+ self.handler.send_obj(dict(type='error', data=msg))
+
+ def send_skill(self, expr):
+ """Sends expr to virtuoso for evaluation.
+
+ Parameters
+ ----------
+ expr : string
+ the skill expression.
+ """
+ self.virt_in.write(expr)
+ self.virt_in.flush()
+
+ def recv_skill(self):
+ """Receive response from virtuoso"""
+ num_bytes = int(self.virt_out.readline())
+ msg = self.virt_out.read(num_bytes)
+ if msg[-1] == '\n':
+ msg = msg[:-1]
+ return msg
+
+ def close(self):
+ """Close this server."""
+ self.handler.close()
+
+ def process_skill_request(self, request):
+ """Process the given skill request.
+
+ Based on the given request object, returns the skill expression
+ to be evaluated by Virtuoso. This method creates temporary
+ files for long input arguments and long output.
+
+ Parameters
+ ----------
+ request : dict
+ the request object.
+
+ Returns
+ -------
+ expr : str or None
+ expression to be evaluated by Virtuoso. If None, an error occurred and
+ nothing needs to be evaluated
+ out_file : str or None
+ if not None, the result will be written to this file.
+ """
+ try:
+ expr = request['expr']
+ input_files = request['input_files'] or {}
+ out_file = request['out_file']
+ except KeyError as e:
+ msg = '*Error* bag server error: %s' % str(e)
+ self.handler.send_obj(dict(type='error', data=msg))
+ return None, None
+
+ fname_dict = {}
+ # write input parameters to files
+ for key, val in input_files.items():
+ with bag.io.open_temp(prefix=key, delete=False, dir=self.dtmp) as file_obj:
+ fname_dict[key] = '"%s"' % file_obj.name
+ # noinspection PyBroadException
+ try:
+ object_to_skill_file(val, file_obj)
+ except Exception:
+ stack_trace = traceback.format_exc()
+ msg = '*Error* bag server error: \n%s' % stack_trace
+ self.handler.send_obj(dict(type='error', data=msg))
+ return None, None
+
+ # generate output file
+ if out_file:
+ with bag.io.open_temp(prefix=out_file, delete=False, dir=self.dtmp) as file_obj:
+ fname_dict[out_file] = '"%s"' % file_obj.name
+ out_file = file_obj.name
+
+ # fill in parameters to expression
+ expr = expr.format(**fname_dict)
+ return expr, out_file
+
+ def process_skill_result(self, msg, out_file=None):
+ """Process the given skill output, then send result to socket.
+
+ Parameters
+ ----------
+ msg : str
+ skill expression evaluation output.
+ out_file : str or None
+ if not None, read result from this file.
+ """
+ # read file if needed, and only if there are no errors.
+ if msg.startswith('*Error*'):
+ # an error occurred, forward error message directly
+ self.handler.send_obj(dict(type='error', data=msg))
+ elif out_file:
+ # read result from file.
+ try:
+ msg = bag.io.read_file(out_file)
+ data = dict(type='str', data=msg)
+ except IOError:
+ stack_trace = traceback.format_exc()
+ msg = '*Error* error reading file:\n%s' % stack_trace
+ data = dict(type='error', data=msg)
+ self.handler.send_obj(data)
+ else:
+ # return output from virtuoso directly
+ self.handler.send_obj(dict(type='str', data=msg))
diff --git a/src/bag/interface/skill.py b/src/bag/interface/skill.py
new file mode 100644
index 0000000..98f7539
--- /dev/null
+++ b/src/bag/interface/skill.py
@@ -0,0 +1,554 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements all CAD database manipulations using skill commands.
+"""
+
+from typing import TYPE_CHECKING, Sequence, List, Dict, Optional, Any, Tuple, Set
+
+import os
+import shutil
+
+from ..io.common import get_encoding, fix_string
+from ..io.file import open_temp, read_yaml
+from ..io.string import read_yaml_str
+from .database import DbAccess
+
+try:
+ import cybagoa
+except ImportError:
+ cybagoa = None
+
+if TYPE_CHECKING:
+ from .zmqwrapper import ZMQDealer
+
+
+def _dict_to_pcell_params(table):
+ """Convert given parameter dictionary to pcell parameter list format.
+
+ Parameters
+ ----------
+ table : dict[str, any]
+ the parameter dictionary.
+
+ Returns
+ -------
+ param_list : list[any]
+ the Pcell parameter list
+ """
+ param_list = []
+ for key, val in table.items():
+ # python 2/3 compatibility: convert raw bytes to string.
+ val = fix_string(val)
+ if isinstance(val, float):
+ param_list.append([key, "float", val])
+ elif isinstance(val, str):
+ # unicode string
+ param_list.append([key, "string", val])
+ elif isinstance(val, int):
+ param_list.append([key, "int", val])
+ elif isinstance(val, bool):
+ param_list.append([key, "bool", val])
+ else:
+ raise Exception('Unsupported parameter %s with type: %s' % (key, type(val)))
+
+ return param_list
+
+
+def to_skill_list_str(pylist):
+ """Convert given python list to a skill list string.
+
+ Parameters
+ ----------
+ pylist : list[str]
+ a list of string.
+
+ Returns
+ -------
+ ans : str
+ a string representation of the equivalent skill list.
+
+ """
+ content = ' '.join(('"%s"' % val for val in pylist))
+ return "'( %s )" % content
+
+
+def handle_reply(reply):
+ """Process the given reply."""
+ if isinstance(reply, dict):
+ if reply.get('type') == 'error':
+ if 'data' not in reply:
+ raise Exception('Unknown reply format: %s' % reply)
+ raise VirtuosoException(reply['data'])
+ else:
+ try:
+ return reply['data']
+ except Exception:
+ raise Exception('Unknown reply format: %s' % reply)
+ else:
+ raise Exception('Unknown reply format: %s' % reply)
+
+
+class VirtuosoException(Exception):
+ """Exception raised when Virtuoso returns an error."""
+
+ def __init__(self, *args, **kwargs):
+ # noinspection PyArgumentList
+ Exception.__init__(self, *args, **kwargs)
+
+
+class SkillInterface(DbAccess):
+ """Skill interface between bag and Virtuoso.
+
+ This class sends all bag's database and simulation operations to
+ an external Virtuoso process, then get the result from it.
+ """
+
+ def __init__(self, dealer, tmp_dir, db_config, lib_defs_file):
+ # type: (ZMQDealer, str, Dict[str, Any], str) -> None
+ DbAccess.__init__(self, dealer, tmp_dir, db_config, lib_defs_file)
+ self.exc_libs = set(db_config['schematic']['exclude_libraries'])
+ # BAG_prim is always excluded
+ self.exc_libs.add('BAG_prim')
+
+ def _eval_skill(self, expr, input_files=None, out_file=None):
+ # type: (str, Optional[Dict[str, Any]], Optional[str]) -> str
+ """Send a request to evaluate the given skill expression.
+
+ Because Virtuoso has a limit on the input/output data (< 4096 bytes),
+ if your input is large, you need to write it to a file and have
+ Virtuoso open the file to parse it. Similarly, if you expect a
+ large output, you need to make Virtuoso write the result to the
+ file, then read it yourself. The parameters input_files and
+ out_file help you achieve this functionality.
+
+ For example, if you need to evaluate "skill_fun(arg fname)", where
+ arg is a file containing the list [1 2 3], and fname is the output
+ file name, you will call this function with:
+
+ expr = "skill_fun({arg} {fname})"
+ input_files = { "arg": [1 2 3] }
+ out_file = "fname"
+
+ the bag server will then a temporary file for arg and fname, write
+ the list [1 2 3] into the file for arg, call Virtuoso, then read
+ the output file fname and return the result.
+
+ Parameters
+ ----------
+ expr : string
+ the skill expression to evaluate.
+ input_files : dict[string, any] or None
+ A dictionary of input files content.
+ out_file : string or None
+ the output file name argument in expr.
+
+ Returns
+ -------
+ result : str
+ a string representation of the result.
+
+ Raises
+ ------
+ :class: `.VirtuosoException` :
+ if virtuoso encounters errors while evaluating the expression.
+ """
+ request = dict(
+ type='skill',
+ expr=expr,
+ input_files=input_files,
+ out_file=out_file,
+ )
+
+ reply = self.send(request)
+ return handle_reply(reply)
+
+ def get_exit_object(self):
+ # type: () -> Any
+ return {'type': 'exit'}
+
+ def get_cells_in_library(self, lib_name):
+ # type: (str) -> List[str]
+ cmd = 'get_cells_in_library_file( "%s" {cell_file} )' % lib_name
+ return self._eval_skill(cmd, out_file='cell_file').split()
+
+ def create_library(self, lib_name, lib_path=''):
+ # type: (str, str) -> None
+ lib_path = lib_path or self.default_lib_path
+ tech_lib = self.db_config['schematic']['tech_lib']
+ self._eval_skill(
+ 'create_or_erase_library("%s" "%s" "%s" nil)' % (lib_name, tech_lib, lib_path))
+
+ def create_implementation(self, lib_name, template_list, change_list, lib_path=''):
+ # type: (str, Sequence[Any], Sequence[Any], str) -> None
+ lib_path = lib_path or self.default_lib_path
+ tech_lib = self.db_config['schematic']['tech_lib']
+
+ if cybagoa is not None and self.db_config['schematic'].get('use_cybagoa', False):
+ cds_lib_path = os.environ.get('CDS_LIB_PATH', './cds.lib')
+ sch_name = 'schematic'
+ sym_name = 'symbol'
+ encoding = get_encoding()
+ # release write locks
+ cell_view_list = []
+ for _, _, cell_name in template_list:
+ cell_view_list.append((cell_name, sch_name))
+ cell_view_list.append((cell_name, sym_name))
+ self.release_write_locks(lib_name, cell_view_list)
+
+ # create library in case it doesn't exist
+ self.create_library(lib_name, lib_path)
+
+ # write schematic
+ with cybagoa.PyOASchematicWriter(cds_lib_path, lib_name, encoding) as writer:
+ for temp_info, change_info in zip(template_list, change_list):
+ sch_cell = cybagoa.PySchCell(temp_info[0], temp_info[1], temp_info[2], encoding)
+ for old_pin, new_pin in change_info['pin_map']:
+ sch_cell.rename_pin(old_pin, new_pin)
+ for inst_name, rinst_list in change_info['inst_list']:
+ sch_cell.add_inst(inst_name, lib_name, rinst_list)
+ writer.add_sch_cell(sch_cell)
+ writer.create_schematics(sch_name, sym_name)
+
+ copy = 'nil'
+ else:
+ copy = "'t"
+
+ in_files = {'template_list': template_list,
+ 'change_list': change_list}
+ sympin = to_skill_list_str(self.db_config['schematic']['sympin'])
+ ipin = to_skill_list_str(self.db_config['schematic']['ipin'])
+ opin = to_skill_list_str(self.db_config['schematic']['opin'])
+ iopin = to_skill_list_str(self.db_config['schematic']['iopin'])
+ simulators = to_skill_list_str(self.db_config['schematic']['simulators'])
+ cmd = ('create_concrete_schematic( "%s" "%s" "%s" {template_list} '
+ '{change_list} %s %s %s %s %s %s)' % (lib_name, tech_lib, lib_path,
+ sympin, ipin, opin, iopin, simulators, copy))
+
+ self._eval_skill(cmd, input_files=in_files)
+
+ def configure_testbench(self, tb_lib, tb_cell):
+ # type: (str, str) -> Tuple[str, List[str], Dict[str, str], Dict[str, str]]
+
+ tb_config = self.db_config['testbench']
+
+ cmd = ('instantiate_testbench("{tb_cell}" "{targ_lib}" ' +
+ '"{config_libs}" "{config_views}" "{config_stops}" ' +
+ '"{default_corner}" "{corner_file}" {def_files} ' +
+ '"{tech_lib}" {result_file})')
+ cmd = cmd.format(tb_cell=tb_cell,
+ targ_lib=tb_lib,
+ config_libs=tb_config['config_libs'],
+ config_views=tb_config['config_views'],
+ config_stops=tb_config['config_stops'],
+ default_corner=tb_config['default_env'],
+ corner_file=tb_config['env_file'],
+ def_files=to_skill_list_str(tb_config['def_files']),
+ tech_lib=self.db_config['schematic']['tech_lib'],
+ result_file='{result_file}')
+ output = read_yaml(self._eval_skill(cmd, out_file='result_file'))
+ return tb_config['default_env'], output['corners'], output['parameters'], output['outputs']
+
+ def get_testbench_info(self, tb_lib, tb_cell):
+ # type: (str, str) -> Tuple[List[str], List[str], Dict[str, str], Dict[str, str]]
+ cmd = 'get_testbench_info("{tb_lib}" "{tb_cell}" {result_file})'
+ cmd = cmd.format(tb_lib=tb_lib,
+ tb_cell=tb_cell,
+ result_file='{result_file}')
+ output = read_yaml(self._eval_skill(cmd, out_file='result_file'))
+ return output['enabled_corners'], output['corners'], output['parameters'], output['outputs']
+
+ def update_testbench(self, # type: SkillInterface
+ lib, # type: str
+ cell, # type: str
+ parameters, # type: Dict[str, str]
+ sim_envs, # type: List[str]
+ config_rules, # type: List[List[str]]
+ env_parameters, # type: List[List[Tuple[str, str]]]
+ ):
+ # type: (...) -> None
+ cmd = 'modify_testbench("%s" "%s" {conf_rules} ' \
+ '{run_opts} {sim_envs} {params} {env_params})' % (lib, cell)
+ in_files = {'conf_rules': config_rules,
+ 'run_opts': [],
+ 'sim_envs': sim_envs,
+ 'params': list(parameters.items()),
+ 'env_params': list(zip(sim_envs, env_parameters)),
+ }
+ self._eval_skill(cmd, input_files=in_files)
+
+ def instantiate_schematic(self, lib_name, content_list, lib_path='',
+ sch_view='schematic', sym_view='symbol'):
+ # type: (str, Sequence[Any], str, str, str) -> None
+ raise NotImplementedError('Not implemented yet.')
+
+ def instantiate_layout_pcell(self, lib_name, cell_name, view_name,
+ inst_lib, inst_cell, params, pin_mapping):
+ # type: (str, str, str, str, str, Dict[str, Any], Dict[str, str]) -> None
+ # create library in case it doesn't exist
+ self.create_library(lib_name)
+
+ # convert parameter dictionary to pcell params list format
+ param_list = _dict_to_pcell_params(params)
+
+ cmd = ('create_layout_with_pcell( "%s" "%s" "%s" "%s" "%s"'
+ '{params} {pin_mapping} )' % (lib_name, cell_name,
+ view_name, inst_lib, inst_cell))
+ in_files = {'params': param_list, 'pin_mapping': list(pin_mapping.items())}
+ self._eval_skill(cmd, input_files=in_files)
+
+ def instantiate_layout(self, lib_name, content_list, lib_path='', view='layout'):
+ # type: (str, Sequence[Any], str, str) -> None
+ # create library in case it doesn't exist
+ self.create_library(lib_name)
+
+ # convert parameter dictionary to pcell params list format
+ new_layout_list = []
+ for info_list in content_list:
+ new_inst_list = []
+ for inst in info_list[1]:
+ if 'params' in inst:
+ inst = inst.copy()
+ inst['params'] = _dict_to_pcell_params(inst['params'])
+ new_inst_list.append(inst)
+
+ new_info_list = info_list[:]
+ new_info_list[1] = new_inst_list
+ new_layout_list.append(new_info_list)
+
+ tech_lib = self.db_config['schematic']['tech_lib']
+ cmd = 'create_layout( "%s" "%s" "%s" {layout_list} )' % (lib_name, view, tech_lib)
+ in_files = {'layout_list': new_layout_list}
+ self._eval_skill(cmd, input_files=in_files)
+
+ def release_write_locks(self, lib_name, cell_view_list):
+ # type: (str, Sequence[Tuple[str, str]]) -> None
+ cmd = 'release_write_locks( "%s" {cell_view_list} )' % lib_name
+ in_files = {'cell_view_list': cell_view_list}
+ self._eval_skill(cmd, input_files=in_files)
+
+ def refresh_cellviews(self, lib_name, cell_view_list):
+ # type: (str, Sequence[Tuple[str, str]]) -> None
+ cmd = 'refresh_cellviews( "%s" {cell_view_list} )' % lib_name
+ in_files = {'cell_view_list': cell_view_list}
+ self._eval_skill(cmd, input_files=in_files)
+
+ def perform_checks_on_cell(self, lib_name, cell_name, view_name):
+ # type: (str, str, str) -> None
+ self._eval_skill(
+ 'check_and_save_cell( "{}" "{}" "{}" )'.format(lib_name, cell_name, view_name))
+
+ def create_schematic_from_netlist(self, netlist, lib_name, cell_name,
+ sch_view='', **kwargs):
+ # type: (str, str, str, str, **Any) -> None
+ """Create a schematic from a netlist.
+
+ This is mainly used to create extracted schematic from an extracted netlist.
+
+ Parameters
+ ----------
+ netlist : str
+ the netlist file name.
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+ sch_view : Optional[str]
+ schematic view name. The default value is implemendation dependent.
+ **kwargs : Any
+ additional implementation-dependent arguments.
+ """
+ calview_config = self.db_config.get('calibreview', None)
+ use_calibreview = self.db_config.get('use_calibreview', True)
+ if calview_config is not None and use_calibreview:
+ # create calibre view from extraction netlist
+ cell_map = calview_config['cell_map']
+ sch_view = sch_view or calview_config['view_name']
+
+ # create calibre view config file
+ tmp_params = dict(
+ netlist_file=netlist,
+ lib_name=lib_name,
+ cell_name=cell_name,
+ calibre_cellmap=cell_map,
+ view_name=sch_view,
+ )
+ content = self.render_file_template('calibreview_setup.txt', tmp_params)
+ with open_temp(prefix='calview', dir=self.tmp_dir, delete=False) as f:
+ fname = f.name
+ f.write(content)
+
+ # delete old calibre view
+ cmd = 'delete_cellview( "%s" "%s" "%s" )' % (lib_name, cell_name, sch_view)
+ self._eval_skill(cmd)
+ # make extracted schematic
+ cmd = 'mgc_rve_load_setup_file( "%s" )' % fname
+ self._eval_skill(cmd)
+ else:
+ # get netlists to copy
+ netlist_dir = os.path.dirname(netlist)
+ netlist_files = self.checker.get_rcx_netlists(lib_name, cell_name)
+ if not netlist_files:
+ # some error checking. Shouldn't be needed but just in case
+ raise ValueError('RCX did not generate any netlists')
+
+ # copy netlists to a "netlist" subfolder in the CAD database
+ cell_dir = self.get_cell_directory(lib_name, cell_name)
+ targ_dir = os.path.join(cell_dir, 'netlist')
+ os.makedirs(targ_dir, exist_ok=True)
+ for fname in netlist_files:
+ shutil.copy(os.path.join(netlist_dir, fname), targ_dir)
+
+ # create symbolic link as aliases
+ symlink = os.path.join(targ_dir, 'netlist')
+ try:
+ os.remove(symlink)
+ except FileNotFoundError:
+ pass
+ os.symlink(netlist_files[0], symlink)
+
+ def get_cell_directory(self, lib_name, cell_name):
+ # type: (str, str) -> str
+ """Returns the directory name of the given cell.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+
+ Returns
+ -------
+ cell_dir : str
+ path to the cell directory.
+ """
+ # use yaml.load to remove outermost quotation marks
+ lib_dir = read_yaml_str(self._eval_skill('get_lib_directory( "%s" )' % lib_name)) # type: str
+ if not lib_dir:
+ raise ValueError('Library %s not found.' % lib_name)
+ return os.path.join(lib_dir, cell_name)
+
+ def create_verilog_view(self, verilog_file, lib_name, cell_name, **kwargs):
+ # type: (str, str, str, **Any) -> None
+ # delete old verilog view
+ cmd = 'delete_cellview( "%s" "%s" "verilog" )' % (lib_name, cell_name)
+ self._eval_skill(cmd)
+ cmd = 'schInstallHDL("%s" "%s" "verilog" "%s" t)' % (lib_name, cell_name, verilog_file)
+ self._eval_skill(cmd)
+
+ def import_sch_cellview(self, lib_name, cell_name, view_name):
+ # type: (str, str, str) -> None
+ self._import_design(lib_name, cell_name, view_name, set())
+
+ def import_design_library(self, lib_name, view_name):
+ # type: (str, str) -> None
+ imported_cells = set()
+ for cell_name in self.get_cells_in_library(lib_name):
+ self._import_design(lib_name, cell_name, view_name, imported_cells)
+
+ def _import_design(self, lib_name, cell_name, view_name, imported_cells):
+ # type: (str, str, str, Set[str]) -> None
+ """Recursive helper for import_design_library.
+ """
+ # check if we already imported this schematic
+ key = '%s__%s' % (lib_name, cell_name)
+ if key in imported_cells:
+ return
+ imported_cells.add(key)
+
+ # create root directory if missing
+ root_path = dsn_db.get_library_path(lib_name)
+ if not root_path:
+ root_path = new_lib_path
+ dsn_db.append_library(lib_name, new_lib_path)
+
+ package_path = os.path.join(root_path, lib_name)
+ python_file = os.path.join(package_path, '%s.py' % cell_name)
+ yaml_file = os.path.join(package_path, 'netlist_info', '%s.yaml' % cell_name)
+ yaml_dir = os.path.dirname(yaml_file)
+ if not os.path.exists(yaml_dir):
+ os.makedirs(yaml_dir)
+ bag.io.write_file(os.path.join(package_path, '__init__.py'), '\n',
+ mkdir=False)
+
+ # update netlist file
+ content = self.parse_schematic_template(lib_name, cell_name)
+ sch_info = read_yaml_str(content)
+ try:
+ bag.io.write_file(yaml_file, content)
+ except IOError:
+ print('Warning: cannot write to %s.' % yaml_file)
+
+ # generate new design module file if necessary.
+ if not os.path.exists(python_file):
+ content = self.get_python_template(lib_name, cell_name,
+ self.db_config.get('prim_table', {}))
+ bag.io.write_file(python_file, content + '\n', mkdir=False)
+
+ # recursively import all children
+ for inst_name, inst_attrs in sch_info['instances'].items():
+ inst_lib_name = inst_attrs['lib_name']
+ if inst_lib_name not in self.exc_libs:
+ inst_cell_name = inst_attrs['cell_name']
+ self._import_design(inst_lib_name, inst_cell_name, imported_cells, dsn_db,
+ new_lib_path)
+
+ def parse_schematic_template(self, lib_name, cell_name):
+ # type: (str, str) -> str
+ """Parse the given schematic template.
+
+ Parameters
+ ----------
+ lib_name : str
+ name of the library.
+ cell_name : str
+ name of the cell.
+
+ Returns
+ -------
+ template : str
+ the content of the netlist structure file.
+ """
+ cmd = 'parse_cad_sch( "%s" "%s" {netlist_info} )' % (lib_name, cell_name)
+ return self._eval_skill(cmd, out_file='netlist_info')
diff --git a/src/bag/interface/templates/Module.pyi b/src/bag/interface/templates/Module.pyi
new file mode 100644
index 0000000..c6a9dbc
--- /dev/null
+++ b/src/bag/interface/templates/Module.pyi
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+
+from typing import Dict, Any
+
+import pkg_resources
+from pathlib import Path
+
+from bag.design.module import Module
+from bag.design.database import ModuleDB
+from bag.util.immutable import Param
+
+
+# noinspection PyPep8Naming
+class {{ lib_name }}__{{ cell_name }}(Module):
+ """Module for library {{ lib_name }} cell {{ cell_name }}.
+
+ Fill in high level description here.
+ """
+
+ yaml_file = pkg_resources.resource_filename(__name__,
+ str(Path('netlist_info',
+ '{{ cell_name }}.yaml')))
+
+ def __init__(self, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, self.yaml_file, database, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ """Returns a dictionary from parameter names to descriptions.
+
+ Returns
+ -------
+ param_info : Optional[Dict[str, str]]
+ dictionary from parameter names to descriptions.
+ """
+ return dict(
+ )
+
+ def design(self) -> None:
+ """To be overridden by subclasses to design this module.
+
+ This method should fill in values for all parameters in
+ self.parameters. To design instances of this module, you can
+ call their design() method or any other ways you coded.
+
+ To modify schematic structure, call:
+
+ rename_pin()
+ delete_instance()
+ replace_instance_master()
+ reconnect_instance_terminal()
+ restore_instance()
+ array_instance()
+ """
+ pass
diff --git a/src/bag/interface/templates/PrimModule.pyi b/src/bag/interface/templates/PrimModule.pyi
new file mode 100644
index 0000000..8c258c4
--- /dev/null
+++ b/src/bag/interface/templates/PrimModule.pyi
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+
+from typing import Any
+
+
+from bag.design.module import {{ module_name }}
+from bag.design.database import ModuleDB
+from bag.util.immutable import Param
+
+
+# noinspection PyPep8Naming
+class {{ lib_name }}__{{ cell_name }}({{ module_name }}):
+ """design module for {{ lib_name }}__{{ cell_name }}.
+ """
+
+ def __init__(self, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ {{ module_name }}.__init__(self, '', database, params, **kwargs)
diff --git a/src/bag/interface/templates/abstract.replay b/src/bag/interface/templates/abstract.replay
new file mode 100644
index 0000000..07c5415
--- /dev/null
+++ b/src/bag/interface/templates/abstract.replay
@@ -0,0 +1,27 @@
+absSkillMode()
+absDisableUpdate()
+absDeselectBinFrom("Core" "Core")
+absSelectBinFrom("Block" "Block")
+absEnableUpdate()
+absSetLibrary("{{ lib_name }}")
+
+absSetOption("ImportOptionsFile" "{{ options_file }}")
+absImportOptions()
+
+absSetOption("SelectName" "{{ cell_name }}")
+absSetOption("LEFLibraryName" "{{ lib_name }}")
+absSetOption("ImportLogicalFiles" "{{ verilog_file }}")
+absSetOption("ImportVerilogFiles" "{{ verilog_file }}")
+absSetOption("ExportLEFFile" "{{ lef_file }}")
+
+absImportLogical()
+
+absDeselectCells()
+absSelectCellFrom("{{ cell_name }}" "{{ cell_name }}")
+absSelect()
+
+absPins()
+absExtract()
+absAbstract()
+absExportLEF()
+absExit()
diff --git a/src/bag/interface/templates/calibreview_setup.txt b/src/bag/interface/templates/calibreview_setup.txt
new file mode 100644
index 0000000..45833ce
--- /dev/null
+++ b/src/bag/interface/templates/calibreview_setup.txt
@@ -0,0 +1,19 @@
+calibre_view_netlist_file : {{ netlist_file }}
+output_library : {{ lib_name }}
+schematic_library : {{ lib_name }}
+cell_name : {{ cell_name }}
+cellmap_file : {{ calibre_cellmap }}
+calibreview_log_file : ./calview.log
+calibreview_name : {{ view_name }}
+calibreview_type : schematic
+create_terminals : if_matching
+preserve_device_case : on
+execute_callbacks : off
+reset_properties : (m=1)
+magnify_devices_by : 1
+magnify_parasitics_by : 1
+device_placement : arrayed
+parasitic_placement : arrayed
+show_parasitic_polygons : off
+open_calibreview : don't_open
+generate_spectre_netlist : off
diff --git a/src/bag/interface/templates/load_results.ocn b/src/bag/interface/templates/load_results.ocn
new file mode 100644
index 0000000..1861a9e
--- /dev/null
+++ b/src/bag/interface/templates/load_results.ocn
@@ -0,0 +1,197 @@
+lib = "{{ lib }}"
+cell = "{{ cell }}"
+view = "{{ view }}"
+init_file = "{{ init_file }}"
+save_dir = "{{ save_dir }}"
+precision = {{ precision }}
+hist_name = "{{ hist_name }}"
+
+; initialize environment variables
+when( strlen(init_file) > 0
+ load(init_file)
+)
+
+; save parametric waveform values as a flattened list.
+procedure( save_param_wave_values(wave fmt line_fmt fhandle)
+ let( (vec wave_cls tmp_val)
+ if( drIsWaveform(wave) then
+ ; 1D waveform, simply print all values
+ vec = drGetWaveformYVec(wave)
+ wave_cls = className(classOf(drGetElem(vec 0)))
+ if( wave_cls == 'adtComplex then
+ ; print complex
+ for( i 0 drVectorLength(vec) - 1
+ tmp_val = drGetElem(vec i)
+ if( imag(tmp_val) < 0 then
+ ; fix for negative imaginary part.
+ sprintf(line_fmt "%s%sj\n" fmt fmt)
+ else
+ sprintf(line_fmt "%s+%sj\n" fmt fmt)
+ )
+ fprintf(fhandle line_fmt real(tmp_val) imag(tmp_val))
+ )
+ else
+ ; print real value
+ for( i 0 drVectorLength(vec) - 1
+ fprintf(fhandle line_fmt drGetElem(vec i))
+ )
+ )
+ else
+ ; parametric waveform, recurse
+ foreach(val sweepValues(wave)
+ save_param_wave_values(famValue(wave val) fmt line_fmt fhandle)
+ )
+ )
+ )
+)
+
+
+; define save functions
+; save a waveform to file.
+; the given waveform will be saved to the file "/.data" as a flattened 1D array.
+; the sweep parameter names of this waveform will be saved to the file "/.sweep",
+; and the values of each parameter will be saved to the file "/.info".
+; data_list_struct is a tconc struct of (waveform_name, waveform_data_file_handle) pairs.
+procedure( save_waveform(directory var_name wave precision data_list_struct)
+ let( (fmt line_fmt wave_cls entry data_file sweep_file fhandle
+ name_list val_list sweep_df iter_wave)
+ sprintf(fmt "%%.%de" precision)
+ sprintf(line_fmt "%s\n" fmt)
+ wave_cls = className(classOf(wave))
+
+ if( not( entry = assoc( var_name cdar(data_list_struct) ) ) then
+ ; first time saving this variable
+ sprintf(data_file "%s/%s.data" directory var_name)
+ sprintf(sweep_file "%s/%s.sweep" directory var_name)
+ cond(
+ ( or( drIsWaveform(wave) drIsParamWave(wave) )
+ ; save sweep names
+ fhandle = outfile( sweep_file "w" )
+ name_list = sweepNames(wave)
+ foreach(swp_name name_list
+ fprintf(fhandle "%s\n" swp_name)
+ )
+ close(fhandle)
+
+ ; save sweep values
+ iter_wave = wave
+ foreach(swp_name name_list
+ ; save output most sweep values
+ val_list = sweepValues(iter_wave)
+ sprintf(sweep_df "%s/%s.info" directory swp_name)
+ unless( isFile(sweep_df)
+ fhandle = outfile( sweep_df "w" )
+ foreach(val val_list
+ fprintf(fhandle line_fmt val)
+ )
+ close(fhandle)
+ )
+ ; remove outer sweep
+ when( drIsParamWave(iter_wave)
+ iter_wave = famValue(iter_wave car(val_list))
+ )
+ )
+
+ fhandle = outfile( data_file "w" )
+ )
+ ( or( wave_cls == 'flonum wave_cls == 'fixnum wave_cls == 'adtComplex )
+ ; scalar data, make empty sweep file
+ fhandle = outfile( sweep_file "w")
+ close(fhandle)
+ fhandle = outfile( data_file "w" )
+ )
+ ( t
+ ; unsupported type
+ error("Unsupported data for output %s: %A\n" var_name wave)
+ )
+ )
+ tconc( data_list_struct list(var_name fhandle) )
+ else
+ fhandle = cadr(entry)
+ )
+
+ ; append data to file
+ if( or( drIsWaveform(wave) drIsParamWave(wave) ) then
+ save_param_wave_values(wave fmt line_fmt fhandle)
+ else
+ ; print single point value
+ if( wave_cls == 'adtComplex then
+ ; print complex
+ if( imag(wave) < 0 then
+ ; fix for negative imaginary part.
+ sprintf(line_fmt "%s%sj\n" fmt fmt)
+ else
+ sprintf(line_fmt "%s+%sj\n" fmt fmt)
+ )
+ fprintf(fhandle line_fmt real(wave) imag(wave))
+ else
+ fprintf(fhandle line_fmt wave)
+ )
+ )
+ 't
+ )
+)
+
+ocnSetXLMode()
+ocnxlTargetCellView(lib cell view)
+
+; load result database
+rdb = axlReadHistoryResDB(hist_name)
+unless( rdb
+ error("Cannot find database associated with name %s" hist_name)
+)
+point_list = rdb->points()
+
+sprintf(sweep_fname "%s/sweep.info" save_dir)
+sweep_f = outfile( sweep_fname "w" )
+
+; write sweep parameters title
+when( point_list
+ point = car(point_list)
+ test_list = point->tests()
+ when( test_list
+ corner = car(test_list)->cornerName
+ par_names = setof( name point->params(?corner corner ?sortBy 'name)~>name
+ and( (name != "corModelSpec") (name != "temperature") ) )
+
+ fprintf(sweep_f "corner ")
+ fprintf(sweep_f "%s\n" buildString( par_names " " ))
+ )
+)
+
+; iterate through each design point and save data.
+data_list_struct = tconc(nil 0)
+total_points = length(point_list)
+cur_idx = 1
+foreach(point point_list
+ printf("*Info* saving process: %d/%d\n" cur_idx total_points)
+ cur_idx = cur_idx + 1
+ foreach(test point->tests()
+ ; write param values to file.
+ corner = test->cornerName
+ params = setof(par point->params(?corner corner ?sortBy 'name)
+ and( (par->name != "corModelSpec") (par->name != "temperature") ) )
+ param_vals = mapcar( lambda( (par) par->valueAsString(?digits precision ?notation 'eng) ) params )
+ fprintf(sweep_f "%s " corner)
+ fprintf(sweep_f "%s\n" buildString( param_vals " " ))
+
+ ; open results
+ openResults(test->resultsDir)
+
+ {% for var, expr in outputs.items() %}
+ tmp = {{ expr }}
+ save_waveform( save_dir "{{ var }}" tmp precision data_list_struct )
+ {% endfor %}
+
+ )
+)
+
+; close opened files
+close(sweep_f)
+foreach( entry cdar(data_list_struct)
+ close(cadr(entry))
+)
+
+ocnxlEndXLMode()
+
+exit()
diff --git a/src/bag/interface/templates/run_simulation.ocn b/src/bag/interface/templates/run_simulation.ocn
new file mode 100644
index 0000000..57ea61d
--- /dev/null
+++ b/src/bag/interface/templates/run_simulation.ocn
@@ -0,0 +1,205 @@
+lib = "{{ lib }}"
+cell = "{{ cell }}"
+view = "{{ view }}"
+state = "{{ state }}"
+init_file = "{{ init_file }}"
+save_dir = "{{ save_dir }}"
+precision = {{ precision }}
+sim_tag = "{{ sim_tag }}"
+job_opt_list = {{ job_opt_str }}
+
+; initialize environment variables
+when( strlen(init_file) > 0
+ load(init_file)
+)
+
+; save parametric waveform values as a flattened list.
+procedure( save_param_wave_values(wave fmt line_fmt fhandle)
+ let( (vec wave_cls tmp_val)
+ if( drIsWaveform(wave) then
+ ; 1D waveform, simply print all values
+ vec = drGetWaveformYVec(wave)
+ wave_cls = className(classOf(drGetElem(vec 0)))
+ if( wave_cls == 'adtComplex then
+ ; print complex
+ for( i 0 drVectorLength(vec) - 1
+ tmp_val = drGetElem(vec i)
+ if( imag(tmp_val) < 0 then
+ ; fix for negative imaginary part.
+ sprintf(line_fmt "%s%sj\n" fmt fmt)
+ else
+ sprintf(line_fmt "%s+%sj\n" fmt fmt)
+ )
+ fprintf(fhandle line_fmt real(tmp_val) imag(tmp_val))
+ )
+ else
+ ; print real value
+ for( i 0 drVectorLength(vec) - 1
+ fprintf(fhandle line_fmt drGetElem(vec i))
+ )
+ )
+ else
+ ; parametric waveform, recurse
+ foreach(val sweepValues(wave)
+ save_param_wave_values(famValue(wave val) fmt line_fmt fhandle)
+ )
+ )
+ )
+)
+
+
+; define save functions
+; save a waveform to file.
+; the given waveform will be saved to the file "/.data" as a flattened 1D array.
+; the sweep parameter names of this waveform will be saved to the file "/.sweep",
+; and the values of each parameter will be saved to the file "/.info".
+; data_list_struct is a tconc struct of (waveform_name, waveform_data_file_handle) pairs.
+procedure( save_waveform(directory var_name wave precision data_list_struct)
+ let( (fmt line_fmt wave_cls entry data_file sweep_file fhandle
+ name_list val_list sweep_df iter_wave)
+ sprintf(fmt "%%.%de" precision)
+ sprintf(line_fmt "%s\n" fmt)
+ wave_cls = className(classOf(wave))
+
+ if( not( entry = assoc( var_name cdar(data_list_struct) ) ) then
+ ; first time saving this variable
+ sprintf(data_file "%s/%s.data" directory var_name)
+ sprintf(sweep_file "%s/%s.sweep" directory var_name)
+ cond(
+ ( or( drIsWaveform(wave) drIsParamWave(wave) )
+ ; save sweep names
+ fhandle = outfile( sweep_file "w" )
+ name_list = sweepNames(wave)
+ foreach(swp_name name_list
+ fprintf(fhandle "%s\n" swp_name)
+ )
+ close(fhandle)
+
+ ; save sweep values
+ iter_wave = wave
+ foreach(swp_name name_list
+ ; save output most sweep values
+ val_list = sweepValues(iter_wave)
+ sprintf(sweep_df "%s/%s.info" directory swp_name)
+ unless( isFile(sweep_df)
+ fhandle = outfile( sweep_df "w" )
+ foreach(val val_list
+ fprintf(fhandle line_fmt val)
+ )
+ close(fhandle)
+ )
+ ; remove outer sweep
+ when( drIsParamWave(iter_wave)
+ iter_wave = famValue(iter_wave car(val_list))
+ )
+ )
+
+ fhandle = outfile( data_file "w" )
+ )
+ ( or( wave_cls == 'flonum wave_cls == 'fixnum wave_cls == 'adtComplex )
+ ; scalar data, make empty sweep file
+ fhandle = outfile( sweep_file "w")
+ close(fhandle)
+ fhandle = outfile( data_file "w" )
+ )
+ ( t
+ ; unsupported type
+ error("Unsupported data for output %s: %A\n" var_name wave)
+ )
+ )
+ tconc( data_list_struct list(var_name fhandle) )
+ else
+ fhandle = cadr(entry)
+ )
+
+ ; append data to file
+ if( or( drIsWaveform(wave) drIsParamWave(wave) ) then
+ save_param_wave_values(wave fmt line_fmt fhandle)
+ else
+ ; print single point value
+ if( wave_cls == 'adtComplex then
+ ; print complex
+ if( imag(wave) < 0 then
+ ; fix for negative imaginary part.
+ sprintf(line_fmt "%s%sj\n" fmt fmt)
+ else
+ sprintf(line_fmt "%s+%sj\n" fmt fmt)
+ )
+ fprintf(fhandle line_fmt real(wave) imag(wave))
+ else
+ fprintf(fhandle line_fmt wave)
+ )
+ )
+ 't
+ )
+)
+
+ocnSetXLMode()
+ocnxlTargetCellView(lib cell view)
+ocnxlLoadSetupState(state 'overwrite)
+ocnxlHistoryPrefix(sim_tag)
+ocnxlJobSetup(job_opt_list)
+printf("*Info* Creating netlist...\n")
+createNetlist( ?recreateAll t ?display nil )
+printf("*Info* Starting simulation...\n")
+ocnxlRun(?mode 'sweepAndCorners ?nominalCornerEnabled nil ?allCornersEnabled 't
+ ?allSweepsEnabled 't)
+
+; load result database
+hist_name = ocnxlGetCurrentHistory()
+rdb = axlReadHistoryResDB(hist_name)
+point_list = rdb->points()
+
+sprintf(sweep_fname "%s/sweep.info" save_dir)
+sweep_f = outfile( sweep_fname "w" )
+
+; write sweep parameters title
+when( point_list
+ point = car(point_list)
+ test_list = point->tests()
+ when( test_list
+ corner = car(test_list)->cornerName
+ par_names = setof( name point->params(?corner corner ?sortBy 'name)~>name
+ and( (name != "corModelSpec") (name != "temperature") ) )
+
+ fprintf(sweep_f "corner ")
+ fprintf(sweep_f "%s\n" buildString( par_names " " ))
+ )
+)
+
+; iterate through each design point and save data.
+data_list_struct = tconc(nil 0)
+total_points = length(point_list)
+cur_idx = 1
+foreach(point point_list
+ printf("*Info* saving process: %d/%d\n" cur_idx total_points)
+ cur_idx = cur_idx + 1
+ foreach(test point->tests()
+ ; write param values to file.
+ corner = test->cornerName
+ params = setof(par point->params(?corner corner ?sortBy 'name)
+ and( (par->name != "corModelSpec") (par->name != "temperature") ) )
+ param_vals = mapcar( lambda( (par) par->valueAsString(?digits precision ?notation 'eng) ) params )
+ fprintf(sweep_f "%s " corner)
+ fprintf(sweep_f "%s\n" buildString( param_vals " " ))
+
+ ; open results
+ openResults(test->resultsDir)
+
+ {% for var, expr in outputs.items() %}
+ tmp = {{ expr }}
+ save_waveform( save_dir "{{ var }}" tmp precision data_list_struct )
+ {% endfor %}
+
+ )
+)
+
+; close opened files
+close(sweep_f)
+foreach( entry cdar(data_list_struct)
+ close(cadr(entry))
+)
+
+ocnxlEndXLMode()
+
+exit()
diff --git a/src/bag/interface/zmqwrapper.py b/src/bag/interface/zmqwrapper.py
new file mode 100644
index 0000000..8f24f90
--- /dev/null
+++ b/src/bag/interface/zmqwrapper.py
@@ -0,0 +1,327 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various wrapper around ZMQ sockets."""
+
+import os
+import zlib
+import pprint
+
+import zmq
+
+from ..io.file import write_file
+from ..io.common import to_bytes, fix_string
+from ..io.string import read_yaml_str, to_yaml_str
+
+
+class ZMQDealer(object):
+ """A class that interacts with a ZMQ dealer socket.
+
+ a dealer socket is an asynchronous socket that can issue multiple requests
+ without needing to wait for an reply. This class encapsulates the ZMQ
+ socket details and provide more convenient API to use.
+
+ Parameters
+ ----------
+ port : int
+ the port to connect to.
+ pipeline : int
+ number of messages allowed in a pipeline. Only affects file
+ transfer performance.
+ host : str
+ the host to connect to.
+ log_file : str or None
+ the log file. None to disable logging.
+ """
+
+ def __init__(self, port, pipeline=100, host='localhost', log_file=None):
+ """Create a new ZMQDealer object.
+ """
+ context = zmq.Context.instance()
+ # noinspection PyUnresolvedReferences
+ self.socket = context.socket(zmq.DEALER)
+ self.socket.hwm = pipeline
+ self.socket.connect('tcp://%s:%d' % (host, port))
+ self._log_file = log_file
+ self.poller = zmq.Poller()
+ # noinspection PyUnresolvedReferences
+ self.poller.register(self.socket, zmq.POLLIN)
+
+ if self._log_file is not None:
+ self._log_file = os.path.abspath(self._log_file)
+ # If log file directory does not exists, create it
+ log_dir = os.path.dirname(self._log_file)
+ if not os.path.exists(log_dir):
+ os.makedirs(log_dir)
+ # clears any existing log
+ if os.path.exists(self._log_file):
+ os.remove(self._log_file)
+
+ def log_msg(self, msg):
+ """Log the given message"""
+ if self._log_file is not None:
+ write_file(self._log_file, '%s\n' % msg, append=True)
+
+ def log_obj(self, msg, obj):
+ """Log the given object"""
+ if self._log_file is not None:
+ obj_str = pprint.pformat(obj)
+ write_file(self._log_file, '%s\n%s\n' % (msg, obj_str), append=True)
+
+ def close(self):
+ """Close the underlying socket."""
+ self.socket.close()
+
+ def send_obj(self, obj):
+ """Sends a python object using pickle serialization and zlib compression.
+
+ Parameters
+ ----------
+ obj : any
+ the object to send.
+ """
+ p = to_bytes(to_yaml_str(obj))
+ z = zlib.compress(p)
+ self.log_obj('sending data:', obj)
+ self.socket.send(z)
+
+ def recv_obj(self, timeout=None, enable_cancel=False):
+ """Receive a python object, serialized with pickle and compressed with zlib.
+
+ Parameters
+ ----------
+ timeout : int or None
+ the timeout to wait in miliseconds. If None, wait indefinitely.
+ enable_cancel : bool
+ If True, allows the user to press Ctrl-C to abort. For this to work,
+ the other end must know how to process the stop request dictionary.
+ Returns
+ -------
+ obj : any
+ the received object. None if timeout reached.
+ """
+ try:
+ events = self.poller.poll(timeout=timeout)
+ except KeyboardInterrupt:
+ if not enable_cancel:
+ # re-raise exception if cancellation is not enabled.
+ raise
+ self.send_obj(dict(type='stop'))
+ print('Stop signal sent, waiting for reply. Press Ctrl-C again to force exit.')
+ try:
+ events = self.poller.poll(timeout=timeout)
+ except KeyboardInterrupt:
+ print('Force exiting.')
+ return None
+
+ if events:
+ data = self.socket.recv()
+ z = fix_string(zlib.decompress(data))
+ obj = read_yaml_str(z)
+ self.log_obj('received data:', obj)
+ return obj
+ else:
+ self.log_msg('timeout with %d ms reached.' % timeout)
+ return None
+
+ def recv_msg(self):
+ """Receive a string message.
+
+ Returns
+ -------
+ msg : str
+ the received object.
+ """
+ data = self.socket.recv()
+ self.log_msg('received message:\n%s' % data)
+ return data
+
+
+class ZMQRouter(object):
+ """A class that interacts with a ZMQ router socket.
+
+ a router socket is an asynchronous socket that can receive multiple requests
+ without needing to issue an reply. This class encapsulates the ZMQ socket
+ details and provide more convenient API to use.
+
+ Parameters
+ ----------
+ port : int or None
+ the port to connect to. If None, then a random port between min_port and max_port
+ will be chosen.
+ min_port : int
+ the minimum random port number (inclusive).
+ max_port : int
+ the maximum random port number (exclusive).
+ pipeline : int
+ number of messages allowed in a pipeline. Only affects file
+ transfer performance.
+ log_file : str or None
+ the log file. None to disable logging.
+ """
+
+ def __init__(self, port=None, min_port=5000, max_port=9999, pipeline=100, log_file=None):
+ """Create a new ZMQDealer object.
+ """
+ context = zmq.Context.instance()
+ # noinspection PyUnresolvedReferences
+ self.socket = context.socket(zmq.ROUTER)
+ self.socket.hwm = pipeline
+ if port is not None:
+ self.socket.bind('tcp://*:%d' % port)
+ self.port = port
+ else:
+ self.port = self.socket.bind_to_random_port('tcp://*', min_port=min_port, max_port=max_port)
+ self.addr = None
+ self._log_file = log_file
+
+ if self._log_file is not None:
+ self._log_file = os.path.abspath(self._log_file)
+ # If log file directory does not exists, create it
+ log_dir = os.path.dirname(self._log_file)
+ if not os.path.exists(log_dir):
+ os.makedirs(log_dir)
+ # clears any existing log
+ if os.path.exists(self._log_file):
+ os.remove(self._log_file)
+
+ def get_port(self):
+ """Returns the port number."""
+ return self.port
+
+ def is_closed(self):
+ """Returns True if this router is closed."""
+ return self.socket.closed
+
+ def close(self):
+ """Close the underlying socket."""
+ self.socket.close()
+
+ def log_msg(self, msg):
+ """Log the given message"""
+ if self._log_file is not None:
+ write_file(self._log_file, '%s\n' % msg, append=True)
+
+ def log_obj(self, msg, obj):
+ """Log the given object"""
+ if self._log_file is not None:
+ obj_str = pprint.pformat(obj)
+ write_file(self._log_file, '%s\n%s\n' % (msg, obj_str), append=True)
+
+ def send_msg(self, msg, addr=None):
+ """Sends a string message
+
+ Parameters
+ ----------
+ msg : str
+ the message to send.
+ addr : str or None
+ the address to send the object to. If None, send to last sender.
+ """
+ addr = addr or self.addr
+ if addr is None:
+ warn_msg = '*WARNING* No receiver address specified. Message not sent:\n%s' % msg
+ self.log_msg(warn_msg)
+ else:
+ self.log_msg('sending message:\n%s' % msg)
+ self.socket.send_multipart([addr, msg])
+
+ def send_obj(self, obj, addr=None):
+ """Sends a python object using pickle serialization and zlib compression.
+
+ Parameters
+ ----------
+ obj : any
+ the object to send.
+ addr : str or None
+ the address to send the object to. If None, send to last sender.
+ """
+ addr = addr or self.addr
+ if addr is None:
+ warn_msg = '*WARNING* No receiver address specified. Message not sent:'
+ self.log_obj(warn_msg, obj)
+ else:
+ p = to_bytes(to_yaml_str(obj))
+ z = zlib.compress(p)
+ self.log_obj('sending data:', obj)
+ self.socket.send_multipart([addr, z])
+
+ def poll_for_read(self, timeout):
+ """Poll this socket for given timeout for read event.
+
+ Parameters
+ ----------
+ timeout : int
+ timeout in miliseconds.
+
+ Returns
+ -------
+ status : int
+ nonzero value means that this socket is ready for read.
+ """
+ return self.socket.poll(timeout=timeout)
+
+ def recv_obj(self):
+ """Receive a python object, serialized with pickle and compressed with zlib.
+
+ Returns
+ -------
+ obj : any
+ the received object.
+ """
+ self.addr, data = self.socket.recv_multipart()
+
+ z = fix_string(zlib.decompress(data))
+ obj = read_yaml_str(z)
+ self.log_obj('received data:', obj)
+ return obj
+
+ def get_last_sender_addr(self):
+ """Returns the address of the sender of last received message.
+
+ Returns
+ -------
+ addr : str
+ the last sender address
+ """
+ return self.addr
diff --git a/src/bag/io/__init__.py b/src/bag/io/__init__.py
new file mode 100644
index 0000000..9c83e17
--- /dev/null
+++ b/src/bag/io/__init__.py
@@ -0,0 +1,63 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package provides all IO related functionalities for BAG.
+
+Most importantly, this module sorts out all the bytes v.s. unicode differences
+and simplifies writing python2/3 compatible code.
+"""
+
+from .common import fix_string, to_bytes, set_encoding, get_encoding, \
+ set_error_policy, get_error_policy
+from .sim_data import load_sim_results, save_sim_results, load_sim_file
+from .file import read_file, read_resource, read_yaml, readlines_iter, \
+ write_file, make_temp_dir, open_temp, open_file
+
+from . import process
+
+__all__ = ['fix_string', 'to_bytes', 'set_encoding', 'get_encoding',
+ 'set_error_policy', 'get_error_policy',
+ 'load_sim_results', 'save_sim_results', 'load_sim_file',
+ 'read_file', 'read_resource', 'read_yaml', 'readlines_iter',
+ 'write_file', 'make_temp_dir', 'open_temp', 'open_file',
+ ]
diff --git a/src/bag/io/common.py b/src/bag/io/common.py
new file mode 100644
index 0000000..de80eb2
--- /dev/null
+++ b/src/bag/io/common.py
@@ -0,0 +1,140 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module contains some commonly used IO functions.
+
+In particular, this module keeps track of BAG's system-wide encoding/decoding settings.
+"""
+
+# default BAG file encoding.
+bag_encoding = 'utf-8'
+# default codec error policy
+bag_codec_error = 'replace'
+
+
+def fix_string(obj):
+ """Fix the given potential string object to ensure python 2/3 compatibility.
+
+ If the given object is raw bytes, decode it into a string using
+ current encoding and return it. Otherwise, just return the given object.
+
+ This method is useful for writing python 2/3 compatible code.
+
+ Parameters
+ ----------
+ obj :
+ any python object.
+
+ Returns
+ -------
+ val :
+ the given object, or a decoded string if the given object is bytes.
+ """
+ if isinstance(obj, bytes):
+ obj = obj.decode(encoding=bag_encoding, errors=bag_codec_error)
+ return obj
+
+
+def to_bytes(my_str):
+ """Convert the given string to raw bytes.
+
+ Parameters
+ ----------
+ my_str : string
+ the string to encode to bytes.
+
+ Returns
+ -------
+ val : bytes
+ raw bytes of the string.
+ """
+ return bytes(my_str.encode(encoding=bag_encoding, errors=bag_codec_error))
+
+
+def set_encoding(new_encoding):
+ """Sets the BAG input/output encoding.
+
+ Parameters
+ ----------
+ new_encoding : string
+ the new encoding name.
+ """
+ global bag_encoding
+ if not isinstance(new_encoding, str):
+ raise Exception('encoding name must be string/unicode.')
+ bag_encoding = new_encoding
+
+
+def get_encoding():
+ """Returns the BAG input/output encoding.
+
+ Returns
+ -------
+ bag_encoding : unicode
+ the encoding name.
+ """
+ return bag_encoding
+
+
+def set_error_policy(new_policy):
+ """Sets the error policy on encoding/decoding errors.
+
+ Parameters
+ ----------
+ new_policy : string
+ the new error policy name. See codecs package documentation
+ for more information.
+ """
+ global bag_codec_error
+ bag_codec_error = new_policy
+
+
+def get_error_policy():
+ """Returns the current BAG encoding/decoding error policy.
+
+ Returns
+ -------
+ policy : unicode
+ the current error policy name.
+ """
+ return bag_codec_error
diff --git a/src/bag/io/file.py b/src/bag/io/file.py
new file mode 100644
index 0000000..0be39c7
--- /dev/null
+++ b/src/bag/io/file.py
@@ -0,0 +1,297 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles file related IO.
+"""
+
+from typing import TextIO, Any, Iterable, Union, Dict
+
+import os
+import time
+import string
+import codecs
+import tempfile
+import pkg_resources
+from pathlib import Path
+import jinja2
+
+from ruamel.yaml import YAML
+
+from .common import bag_encoding, bag_codec_error
+
+yaml = YAML(typ='unsafe')
+
+
+def render_yaml(fname: Union[str, Path], params: Dict[str, Any]) -> Dict[str, Any]:
+ """Renders a yaml file as a jinja template.
+
+ Parameters
+ ----------
+ fname: Union[str, Path]
+ the yaml file name.
+ params: Dict[str, Any]
+ parameters to be replaced in the yaml template
+
+ Returns
+ -------
+ rendered_content: Dict[str, Any]
+ A dictionary with keywords replaced in the yaml file.
+ """
+ raw_content = read_file(fname)
+ populated_content = jinja2.Template(raw_content).render(**params)
+ return yaml.load(populated_content)
+
+
+def open_file(fname: Union[str, Path], mode: str) -> TextIO:
+ """Opens a file with the correct encoding interface.
+
+ Use this method if you need to have a file handle.
+
+ Parameters
+ ----------
+ fname : str
+ the file name.
+ mode : str
+ the mode, either 'r', 'w', or 'a'.
+
+ Returns
+ -------
+ file_obj : TextIO
+ a file objects that reads/writes string with the BAG system encoding.
+ """
+ if mode != 'r' and mode != 'w' and mode != 'a':
+ raise ValueError("Only supports 'r', 'w', or 'a' mode.")
+ return open(fname, mode, encoding=bag_encoding, errors=bag_codec_error)
+
+
+def read_file(fname: Union[str, Path]) -> str:
+ """Read the given file and return content as string.
+
+ Parameters
+ ----------
+ fname : Union[str, Path]
+ the file name.
+
+ Returns
+ -------
+ content : str
+ the content as a unicode string.
+ """
+ with open_file(fname, 'r') as f:
+ content = f.read()
+ return content
+
+
+def readlines_iter(fname: Union[str, Path]) -> Iterable[str]:
+ """Iterate over lines in a file.
+
+ Parameters
+ ----------
+ fname : str
+ the file name.
+
+ Yields
+ ------
+ line : str
+ a line in the file.
+ """
+ with open_file(fname, 'r') as f:
+ for line in f:
+ yield line
+
+
+def read_yaml(fname: Union[str, Path]) -> Any:
+ """Read the given file using YAML.
+
+ Parameters
+ ----------
+ fname : str
+ the file name.
+
+ Returns
+ -------
+ content : Any
+ the object returned by YAML.
+ """
+ with open_file(fname, 'r') as f:
+ content = yaml.load(f)
+
+ return content
+
+
+def read_yaml_env(fname: str) -> Any:
+ """Parse YAML file with environment variable substitution.
+
+ Parameters
+ ----------
+ fname : str
+ yaml file name.
+
+ Returns
+ -------
+ table : Any
+ the object returned by YAML.
+ """
+ content = read_file(fname)
+ # substitute environment variables
+ content = string.Template(content).substitute(os.environ)
+ return yaml.load(content)
+
+
+def read_resource(package: str, fname: str) -> str:
+ """Read the given resource file and return content as string.
+
+ Parameters
+ ----------
+ package : str
+ the package name.
+ fname : str
+ the resource file name.
+
+ Returns
+ -------
+ content : str
+ the content as a unicode string.
+ """
+ raw_content = pkg_resources.resource_string(package, fname)
+ return raw_content.decode(encoding=bag_encoding, errors=bag_codec_error)
+
+
+def write_file(fname: Union[str, Path], content: str, append: bool = False,
+ mkdir: bool = True) -> None:
+ """Writes the given content to file.
+
+ Parameters
+ ----------
+ fname : Union[str, Path]
+ the file name.
+ content : str
+ the unicode string to write to file.
+ append : bool
+ True to append instead of overwrite.
+ mkdir : bool
+ If True, will create parent directories if they don't exist.
+ """
+ if isinstance(fname, str):
+ fpath = Path(fname)
+ else:
+ fpath = fname
+
+ if mkdir:
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+
+ with open_file(fpath, 'a' if append else 'w') as f:
+ f.write(content)
+
+
+def write_yaml(fname: Union[str, Path], obj: object, mkdir: bool = True) -> None:
+ """Writes the given object to a file using YAML format.
+
+ Parameters
+ ----------
+ fname : Union[str, Path]
+ the file name.
+ obj : object
+ the object to write.
+ mkdir : bool
+ If True, will create parent directories if they don't exist.
+
+ Returns
+ -------
+ content : Any
+ the object returned by YAML.
+ """
+ if isinstance(fname, str):
+ fpath = Path(fname)
+ else:
+ fpath = fname
+
+ if mkdir:
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+
+ with open_file(fpath, 'w') as f:
+ yaml.dump(obj, f)
+
+
+def make_temp_dir(prefix: str, parent_dir: str = '') -> str:
+ """Create a new temporary directory.
+
+ Parameters
+ ----------
+ prefix : str
+ the directory prefix.
+ parent_dir : str
+ the parent directory.
+
+ Returns
+ -------
+ dir_name : str
+ the temporary directory name.
+ """
+ prefix += time.strftime("_%Y%m%d_%H%M%S")
+ parent_dir = parent_dir or tempfile.gettempdir()
+ Path(parent_dir).mkdir(parents=True, exist_ok=True)
+ return tempfile.mkdtemp(prefix=prefix, dir=parent_dir)
+
+
+def open_temp(**kwargs: Any) -> TextIO:
+ """Opens a new temporary file for writing with unicode interface.
+
+ Parameters
+ ----------
+ **kwargs : Any
+ the tempfile keyword arguments. See documentation for
+ :func:`tempfile.NamedTemporaryFile`.
+
+ Returns
+ -------
+ file : TextIO
+ the opened file that accepts unicode input.
+ """
+ timestr = time.strftime("_%Y%m%d_%H%M%S")
+ if 'prefix' in kwargs:
+ kwargs['prefix'] += timestr
+ else:
+ kwargs['prefix'] = timestr
+ temp = tempfile.NamedTemporaryFile(**kwargs)
+ return codecs.getwriter(bag_encoding)(temp, errors=bag_codec_error)
diff --git a/src/bag/io/gui.py b/src/bag/io/gui.py
new file mode 100644
index 0000000..b9d2739
--- /dev/null
+++ b/src/bag/io/gui.py
@@ -0,0 +1,276 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+import subprocess
+import json
+import select
+
+import PyQt5.QtWidgets as QtWidgets
+import PyQt5.QtCore as QtCore
+
+from .file import write_file, open_file
+from .common import to_bytes
+
+if os.name != 'posix':
+ raise Exception('bag.io.gui module current only works for POSIX systems.')
+
+
+class StdinThread(QtCore.QThread):
+ """A QT worker thread that reads stdin."""
+ update = QtCore.pyqtSignal('QString')
+
+ def __init__(self, parent):
+ QtCore.QThread.__init__(self, parent=parent)
+ self.stop = False
+
+ def run(self):
+ while not self.stop:
+ try:
+ stdin, _, _ = select.select([sys.stdin], [], [], 0.05)
+ if stdin:
+ cmd = sys.stdin.readline().strip()
+ else:
+ cmd = None
+ except:
+ cmd = 'exit'
+
+ if cmd is not None:
+ self.stop = (cmd == 'exit')
+ self.update.emit(cmd)
+
+
+class LogWidget(QtWidgets.QFrame):
+ """A Logger window widget.
+
+ Note: due to QPlainTextEdit always adding an extra newline when calling
+ appendPlainText(), we keep track of internal buffer and only print output
+ one line at a time. This may cause some message to not display immediately.
+ """
+
+ def __init__(self, parent=None):
+ QtWidgets.QFrame.__init__(self, parent=parent)
+
+ self.logger = QtWidgets.QPlainTextEdit(parent=self)
+ self.logger.setReadOnly(True)
+ self.logger.setLineWrapMode(QtWidgets.QPlainTextEdit.NoWrap)
+ self.logger.setMinimumWidth(1100)
+ self.buffer = ''
+
+ self.clear_button = QtWidgets.QPushButton('Clear Log', parent=self)
+ self.clear_button.clicked.connect(self.clear_log)
+ self.save_button = QtWidgets.QPushButton('Save Log As...', parent=self)
+ self.save_button.clicked.connect(self.save_log)
+
+ self.lay = QtWidgets.QVBoxLayout(self)
+ self.lay.addWidget(self.logger)
+ self.lay.addWidget(self.clear_button)
+ self.lay.addWidget(self.save_button)
+
+ def clear_log(self):
+ self.logger.setPlainText('')
+ self.buffer = ''
+
+ def save_log(self):
+ root_dir = os.getcwd()
+ fname, _ = QtWidgets.QFileDialog.getSaveFileName(self, 'Save File', root_dir)
+ if fname:
+ write_file(fname, self.logger.toPlainText() + '\n')
+
+ def print_file(self, file_obj):
+ # this code converts all types of newlines (such as '\r\n') to '\n',
+ # and make sure any ending newlines are preserved.
+ for line in file_obj:
+ if self.buffer:
+ line = self.buffer + line
+ self.buffer = ''
+ if line.endswith('\n'):
+ self.logger.appendPlainText(line[:-1])
+ else:
+ self.buffer = line
+
+
+class LogViewer(QtWidgets.QWidget):
+ """A Simple window to see process log in real time.."""
+
+ def __init__(self):
+ QtWidgets.QWidget.__init__(self)
+
+ # combo box label
+ self.label = QtWidgets.QLabel('Log File: ', parent=self)
+ # populate log selection combo box.
+ self.combo_box = QtWidgets.QComboBox(parent=self)
+ self.log_files = []
+ self.reader = None
+
+ self.logger = LogWidget(parent=self)
+
+ # setup GUI
+ self.setWindowTitle('BAG Simulation Log Viewer')
+ self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
+
+ self.layout = QtWidgets.QGridLayout(self)
+ self.layout.addWidget(self.label, 0, 0, alignment=QtCore.Qt.AlignRight)
+ self.layout.addWidget(self.combo_box, 0, 1, alignment=QtCore.Qt.AlignLeft)
+ self.layout.addWidget(self.logger, 1, 0, -1, -1)
+ self.layout.setRowStretch(0, 0.0)
+ self.layout.setRowStretch(1, 1.0)
+ self.layout.setColumnStretch(0, 0.0)
+ self.layout.setColumnStretch(1, 0.0)
+
+ # setup file watcher
+ self.cur_paths = None
+ self.watcher = QtCore.QFileSystemWatcher(parent=self)
+ # setup signals
+ self.watcher.fileChanged.connect(self.update_logfile)
+ self.combo_box.currentIndexChanged.connect(self.change_log)
+
+ # start thread
+ self.thread = StdinThread(self)
+ self.thread.update.connect(self.parse_cmd)
+ self.thread.start()
+
+ def closeEvent(self, evt):
+ if not self.thread.stop:
+ self.thread.stop = True
+ self.thread.wait()
+ QtWidgets.QWidget.closeEvent(self, evt)
+
+ @QtCore.pyqtSlot('QString')
+ def parse_cmd(self, cmd):
+ if cmd == 'exit':
+ self.close()
+ else:
+ try:
+ cmd = json.loads(cmd)
+ if cmd[0] == 'add':
+ self.add_log(cmd[1], cmd[2])
+ elif cmd[0] == 'remove':
+ self.remove_log(cmd[1])
+ except:
+ pass
+
+ @QtCore.pyqtSlot('int')
+ def change_log(self, new_idx):
+ # print('log change called, switching to index %d' % new_idx)
+ if self.cur_paths is not None:
+ self.watcher.removePaths(self.cur_paths)
+ self.logger.clear_log()
+ if self.reader is not None:
+ self.reader.close()
+ self.reader = None
+
+ if new_idx >= 0:
+ fname = os.path.abspath(self.log_files[new_idx])
+ dname = os.path.dirname(fname)
+ self.reader = open_file(fname, 'r')
+ self.logger.print_file(self.reader)
+ self.cur_paths = [dname, fname]
+ self.watcher.addPaths(self.cur_paths)
+
+ @QtCore.pyqtSlot('QString')
+ def update_logfile(self, fname):
+ # print('filechanged called, fname = %s' % fname)
+ if self.reader is not None:
+ self.logger.print_file(self.reader)
+
+ def remove_log(self, log_tag):
+ idx = self.combo_box.findText(log_tag)
+ if idx >= 0:
+ del self.log_files[idx]
+ self.combo_box.removeItem(idx)
+
+ def add_log(self, log_tag, log_file):
+ self.remove_log(log_tag)
+ if os.path.isfile(log_file):
+ self.log_files.append(log_file)
+ self.combo_box.addItem(log_tag)
+
+
+def app_start():
+ app = QtWidgets.QApplication([])
+
+ window = LogViewer()
+ app.window_reference = window
+ window.show()
+ app.exec_()
+
+
+def start_viewer():
+ cmd = [sys.executable, '-m', 'bag.io.gui']
+ devnull = open(os.devnull, 'w')
+ proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=devnull,
+ stderr=subprocess.STDOUT,
+ preexec_fn=os.setpgrp)
+ return proc
+
+
+def add_log(proc, tag, fname):
+ if proc is not None:
+ if proc.poll() is not None or proc.stdin.closed:
+ # process finished
+ return False
+ cmd_str = json.dumps(['add', tag, fname]) + '\n'
+ proc.stdin.write(to_bytes(cmd_str))
+ proc.stdin.flush()
+ return True
+
+
+def remove_log(proc, tag):
+ if proc is not None:
+ if proc.poll() is not None or proc.stdin.closed:
+ # process finished
+ return False
+ cmd_str = json.dumps(['remove', tag]) + '\n'
+ proc.stdin.write(to_bytes(cmd_str))
+ proc.stdin.flush()
+ return True
+
+
+def close(proc):
+ if proc is not None and proc.poll() is None:
+ proc.stdin.close()
+
+if __name__ == '__main__':
+ app_start()
diff --git a/src/bag/io/process.py b/src/bag/io/process.py
new file mode 100644
index 0000000..26f13b7
--- /dev/null
+++ b/src/bag/io/process.py
@@ -0,0 +1,398 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides functions to help you run external processes.
+"""
+
+import os
+import sys
+
+from .common import bag_encoding, bag_codec_error
+from .file import write_file
+
+import multiprocessing
+# noinspection PyCompatibility
+import concurrent.futures
+
+if sys.version_info[0] < 3:
+ # use subprocess32 for timeout feature.
+ if os.name != 'posix':
+ raise Exception('bag.io.process module current only works for POSIX systems.')
+ # noinspection PyUnresolvedReferences,PyPackageRequirements
+ import subprocess32 as subprocess
+else:
+ import subprocess
+
+
+def run_proc_with_quit(proc_id, quit_dict, args, logfile=None, append=False, env=None, cwd=None):
+ if logfile is None:
+ logfile = os.devnull
+
+ mode = 'ab' if append else 'wb'
+ with open(logfile, mode) as logf:
+ if proc_id in quit_dict:
+ return None
+ proc = subprocess.Popen(args, stdout=logf, stderr=subprocess.STDOUT,
+ env=env, cwd=cwd)
+ retcode = None
+ num_kill = 0
+ timeout = 0.05
+ while retcode is None and num_kill <= 2:
+ try:
+ retcode = proc.wait(timeout=timeout)
+ except subprocess.TimeoutExpired:
+ if proc_id in quit_dict:
+ if num_kill == 0:
+ proc.terminate()
+ timeout = quit_dict[proc_id]
+ elif num_kill == 1:
+ proc.kill()
+ num_kill += 1
+
+ return proc.returncode
+
+
+def run_and_wait(args, timeout=None, logfile=None, append=False,
+ env=None, cwd=None):
+ """Run a command in a subprocess, then wait for it to finish.
+
+ Parameters
+ ----------
+ args : string or list[string]
+ the command to run. Should be either a command string or a list
+ of command string and its arguments as strings. A list is preferred;
+ see Python subprocess documentation.
+ timeout : float or None
+ the amount of time to wait for the command to finish, in seconds.
+ If None, waits indefinitely.
+ logfile : string or None
+ If given, stdout and stderr will be written to this file.
+ append : bool
+ True to append to the logfile. Defaults to False.
+ env : dict[string, any]
+ If not None, environment variables of the subprocess will be set
+ according to this dictionary instead of inheriting from current
+ process.
+ cwd : string or None
+ The current working directory of the subprocess.
+
+ Returns
+ -------
+ output : string
+ the standard output and standard error from the command.
+
+ Raises
+ ------
+ subprocess.CalledProcessError
+ if any error occurred in the subprocess.
+ """
+ output = subprocess.check_output(args, stderr=subprocess.STDOUT,
+ timeout=timeout, env=env, cwd=cwd)
+ output = output.decode(encoding=bag_encoding, errors=bag_codec_error)
+
+ if logfile is not None:
+ write_file(logfile, output, append=append)
+
+ return output
+
+
+class ProcessManager(object):
+ """A class that manages subprocesses.
+
+ This class is for starting processes that you do not need to wait on,
+ and allows you to query for their status or terminate/kill them if needed.
+
+ Parameters
+ ----------
+ max_workers : int or None
+ number of maximum allowed subprocesses. If None, defaults to system
+ CPU count.
+ cancel_timeout : float or None
+ Number of seconds to wait for a process to terminate once SIGTERM or
+ SIGKILL is issued. Defaults to 10 seconds.
+ """
+ def __init__(self, max_workers=None, cancel_timeout=10.0):
+ if max_workers is None:
+ max_workers = multiprocessing.cpu_count()
+ if cancel_timeout is None:
+ cancel_timeout = 10.0
+ self._exec = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers)
+ self._cancel_timeout = cancel_timeout
+ self._future_dict = {}
+ self._quit_dict = {}
+
+ def close(self, timeout=10.0):
+ """Cancel all processes.
+
+ Parameters
+ ----------
+ timeout : float
+ time to wait in seconds for each process to terminate.
+ """
+ for proc_id in self._future_dict.keys():
+ self.cancel(proc_id, timeout=timeout)
+ self._exec.shutdown()
+ self._quit_dict.clear()
+ self._future_dict.clear()
+
+ def new_thread(self, fun, basename=None, callback=None):
+ """Put a new custom task in queue.
+
+ Execute the given function in a thread asynchronously. The given function
+ must take two arguments, The first argument is a unique string that represents
+ this task, and the second argument is a dictionary. The dictionary will
+ map the unique string to a timeout (in second) if this task is being cancelled.
+ The function should periodically check the dictionary and terminate gracefully.
+
+ Before function returns, it should also delete the unique string from dictionary
+ if it exists.
+
+ Parameters
+ ----------
+ fun : callable
+ the function to execute in a thread, as described above.
+ basename : string or None
+ If given, this will be used as the basis for generating the unique
+ process ID.
+ callback : callable
+ If given, this function will automatically be executed when the
+ process finished. This function should take a single argument,
+ which is a Future object that returns the return code of the
+ process.
+
+ Returns
+ -------
+ proc_id : string
+ a unique string representing this process. Can be used later
+ to query process status or cancel process.
+ """
+ # find unique process ID
+ proc_id = basename or 'proc'
+ cur_idx = 1
+ while proc_id in self._future_dict:
+ proc_id = '%s_%d' % (proc_id, cur_idx)
+ cur_idx += 1
+
+ future = self._exec.submit(fun, proc_id, self._quit_dict)
+ if callback is not None:
+ future.add_done_callback(callback)
+
+ self._future_dict[proc_id] = future
+ return proc_id
+
+ def new_process(self, args, basename=None, logfile=None, append=False,
+ env=None, cwd=None, callback=None):
+ """Put a new process in queue.
+
+ When the process is done, its return code will be returned.
+
+ Parameters
+ ----------
+ args : string or list[string]
+ the command to run as a string or list of string arguments. See
+ Python subprocess documentation. list of string format is preferred.
+ basename : string or None
+ If given, this will be used as the basis for generating the unique
+ process ID.
+ logfile : string or None
+ If given, stdout and stderr will be written to this file. Otherwise,
+ they will be redirected to `os.devnull`.
+ append : bool
+ True to append to ``logfile`` instead of overwritng it.
+ env : dict[string, string] or None
+ If given, environment variables of the process will be set according
+ to this dictionary.
+ cwd : string or None
+ current working directory of the process.
+ callback : callable
+ If given, this function will automatically be executed when the
+ process finished. This function should take a single argument,
+ which is a Future object that returns the return code of the
+ process.
+
+ Returns
+ -------
+ proc_id : string
+ a unique string representing this process. Can be used later
+ to query process status or cancel process.
+ """
+ # find unique process ID
+ proc_id = basename or 'proc'
+ cur_idx = 1
+ while proc_id in self._future_dict:
+ proc_id = '%s_%d' % (proc_id, cur_idx)
+ cur_idx += 1
+
+ future = self._exec.submit(self._start_cmd, args, proc_id,
+ logfile=logfile, append=append, env=env, cwd=cwd)
+ if callback is not None:
+ future.add_done_callback(callback)
+
+ self._future_dict[proc_id] = future
+ return proc_id
+
+ @staticmethod
+ def _get_output(future, timeout=None):
+ """Get output from future. Return None when exception."""
+ try:
+ if future.exception(timeout=timeout) is None:
+ return future.result()
+ else:
+ return None
+ except concurrent.futures.CancelledError:
+ return None
+
+ def cancel(self, proc_id, timeout=None):
+ """Cancel the given process.
+
+ If the process haven't started, this method prevents it from started.
+ Otherwise, we first send a SIGTERM signal to kill the process. If
+ after ``timeout`` seconds the process is still alive, we will send a
+ SIGKILL signal. If after another ``timeout`` seconds the process is
+ still alive, an Exception will be raised.
+
+ Parameters
+ ----------
+ proc_id : string
+ the process ID to cancel.
+ timeout : float or None
+ number of seconds to wait for cancellation. If None, use default
+ timeout.
+
+ Returns
+ -------
+ output :
+ output of the thread if it successfully terminates.
+ Otherwise, return None.
+ """
+ if timeout is None:
+ timeout = self._cancel_timeout
+
+ future = self._future_dict.get(proc_id, None)
+ if future is None:
+ return None
+ if future.done():
+ # process already done, return status.
+ del self._future_dict[proc_id]
+ return self._get_output(future)
+ if future.cancel():
+ # we cancelled process before it made into the thread pool.
+ del self._future_dict[proc_id]
+ return None
+ else:
+ # inform thread it should try to quit.
+ self._quit_dict[proc_id] = timeout
+ try:
+ output = self._get_output(future, timeout=4 * timeout)
+ del self._future_dict[proc_id]
+ return output
+ except concurrent.futures.TimeoutError:
+ # shouldn't get here, but we did
+ print("*WARNING* worker thread refuse to die...")
+ del self._future_dict[proc_id]
+ return None
+
+ def done(self, proc_id):
+ """Returns True if the given process finished or is cancelled successfully.
+
+ Parameters
+ ----------
+ proc_id : string
+ the process ID.
+
+ Returns
+ -------
+ done : bool
+ True if the process is cancelled or completed.
+ """
+ return self._future_dict[proc_id].done()
+
+ def wait(self, proc_id, timeout=None, cancel_timeout=None):
+ """Wait for the given process to finish, then return its return code.
+
+ If ``timeout`` is None, waits indefinitely. Otherwise, if after
+ ``timeout`` seconds the process is still running, a
+ :class:`concurrent.futures.TimeoutError` will be raised.
+ However, it is safe to catch this error and call wait again.
+
+ If Ctrl-C is pressed before process finish or before timeout
+ is reached, the process will be cancelled.
+
+ Parameters
+ ----------
+ proc_id : string
+ the process ID.
+ timeout : float or None
+ number of seconds to wait. If None, waits indefinitely.
+ cancel_timeout : float or None
+ number of seconds to wait for process cancellation. If None,
+ use default timeout.
+
+ Returns
+ -------
+ output :
+ output of the thread if it successfully terminates. Otherwise return None.
+ """
+ if cancel_timeout is None:
+ cancel_timeout = self._cancel_timeout
+
+ future = self._future_dict[proc_id]
+ try:
+ output = future.result(timeout=timeout)
+ # remove future from dictionary.
+ del self._future_dict[proc_id]
+ return output
+ except KeyboardInterrupt:
+ # cancel the process
+ print('KeyboardInterrupt received, cancelling %s...' % proc_id)
+ return self.cancel(proc_id, timeout=cancel_timeout)
+
+ def _start_cmd(self, args, proc_id, logfile=None, append=False, env=None, cwd=None):
+ """The function that actually starts the subprocess. Executed by thread."""
+
+ retcode = run_proc_with_quit(proc_id, self._quit_dict, args, logfile=logfile,
+ append=append, env=env, cwd=cwd)
+ if proc_id in self._quit_dict:
+ del self._quit_dict[proc_id]
+
+ return retcode
diff --git a/src/bag/io/sim_data.py b/src/bag/io/sim_data.py
new file mode 100644
index 0000000..5fbf0de
--- /dev/null
+++ b/src/bag/io/sim_data.py
@@ -0,0 +1,300 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles simulation data related IO.
+
+Note : when reading data files, we use Numpy to handle the encodings,
+so BAG encoding settings will not apply.
+"""
+
+import os
+import glob
+
+import numpy as np
+import h5py
+
+from .common import bag_encoding, bag_codec_error
+
+illegal_var_name = ['sweep_params']
+
+
+class SweepArray(np.ndarray):
+ """Subclass of numpy array that adds sweep parameters attribute.
+ """
+
+ def __new__(cls, data, sweep_params=None):
+ # Input array is an already formed ndarray instance
+ # We first cast to be our class type
+ obj = np.asarray(data).view(cls)
+ # add the new attribute to the created instance
+ obj.sweep_params = sweep_params
+ # Finally, we must return the newly created object:
+ return obj
+
+ def __array_finalize__(self, obj):
+ # see InfoArray.__array_finalize__ for comments
+ if obj is None:
+ return
+ self.sweep_params = getattr(obj, 'sweep_params', None)
+
+ def __reduce__(self):
+ # Get the parent's __reduce__ tuple
+ pickled_state = super(SweepArray, self).__reduce__()
+ # Create our own tuple to pass to __setstate__
+ new_state = pickled_state[2] + (self.sweep_params,)
+ # Return a tuple that replaces the parent's __setstate__ tuple with our own
+ return pickled_state[0], pickled_state[1], new_state
+
+ # noinspection PyMethodOverriding
+ def __setstate__(self, state):
+ self.sweep_params = state[-1] # Set the info attribute
+ # Call the parent's __setstate__ with the other tuple elements.
+ # noinspection PyArgumentList
+ super(SweepArray, self).__setstate__(state[0:-1])
+
+
+def _get_sweep_params(fname):
+ """Parse the sweep information file and reverse engineer sweep parameters.
+
+ Parameters
+ ----------
+ fname : str
+ the sweep information file name.
+
+ Returns
+ -------
+ swp_list : list[str]
+ list of sweep parameter names. index 0 is the outer-most loop.
+ values_list : list[list[float or str]]
+ list of values list for each sweep parameter.
+ """
+ mat = np.genfromtxt(fname, dtype=np.unicode_)
+ header = mat[0, :]
+ data = mat[1:, :]
+
+ # eliminate same data
+ idx_list = []
+ for idx in range(len(header)):
+ bool_vec = data[:, idx] == data[0, idx] # type: np.ndarray
+ if not np.all(bool_vec):
+ idx_list.append(idx)
+
+ header = header[idx_list]
+ data = data[:, idx_list]
+ # find the first index of last element of each column.
+ last_first_idx = [np.where(data[:, idx] == data[-1, idx])[0][0] for idx in range(len(header))]
+ # sort by first index of last element; the column where the last element
+ # appears the earliest is the inner most loop.
+ order_list = np.argsort(last_first_idx) # type: np.ndarray
+
+ # get list of values
+ values_list = []
+ skip_len = 1
+ for idx in order_list:
+ end_idx = last_first_idx[idx] + 1
+ values = data[0:end_idx:skip_len, idx]
+ if header[idx] != 'corner':
+ values = values.astype(np.float)
+ skip_len *= len(values)
+ values_list.append(values)
+
+ swp_list = header[order_list][::-1].tolist()
+ values_list.reverse()
+ return swp_list, values_list
+
+
+def load_sim_results(save_dir):
+ """Load exported simulation results from the given directory.
+
+ Parameters
+ ----------
+ save_dir : str
+ the save directory path.
+
+ Returns
+ -------
+ results : dict[str, any]
+ the simulation data dictionary.
+
+ most keys in result is either a sweep parameter or an output signal.
+ the values are the corresponding data as a numpy array. In addition,
+ results has a key called 'sweep_params', which contains a dictionary from
+ output signal name to a list of sweep parameters of that output.
+
+ """
+ if not save_dir:
+ return None
+
+ results = {}
+ sweep_params = {}
+
+ # load sweep parameter values
+ top_swp_list, values_list = _get_sweep_params(os.path.join(save_dir, 'sweep.info'))
+ top_shape = []
+ for swp, values in zip(top_swp_list, values_list):
+ results[swp] = values
+ top_shape.append(len(values))
+
+ for swp_name in glob.glob(os.path.join(save_dir, '*.sweep')):
+ base_name = os.path.basename(swp_name).split('.')[0]
+ data_name = os.path.join(save_dir, '%s.data' % base_name)
+ try:
+ data_arr = np.loadtxt(data_name)
+ except ValueError:
+ # try loading complex
+ data_arr = np.loadtxt(data_name, dtype=complex)
+
+ # get sweep parameter names
+ with open(swp_name, 'r', encoding='utf-8') as f:
+ swp_list = [str(line.strip()) for line in f]
+
+ # make a copy of master sweep list and sweep shape
+ cur_swp_list = list(top_swp_list)
+ cur_shape = list(top_shape)
+
+ for swp in swp_list:
+ if swp not in results:
+ fname = os.path.join(save_dir, '%s.info' % swp)
+ results[swp] = np.loadtxt(fname)
+
+ # if sweep has more than one element.
+ if results[swp].shape:
+ cur_swp_list.append(swp)
+ cur_shape.append(results[swp].shape[0])
+
+ # sanity check
+ if base_name in results:
+ raise Exception('Error: output named %s already in results' % base_name)
+
+ # reshape data array
+ data_arr = data_arr.reshape(cur_shape)
+ results[base_name] = SweepArray(data_arr, cur_swp_list)
+ # record sweep parameters for this data
+ sweep_params[base_name] = cur_swp_list
+
+ if 'sweep_params' in results:
+ raise Exception('illegal output name: sweep_params')
+
+ results['sweep_params'] = sweep_params
+
+ return results
+
+
+def save_sim_results(results, fname, compression='gzip'):
+ """Saves the given simulation results dictionary as a HDF5 file.
+
+ Parameters
+ ----------
+ results : dict[string, any]
+ the results dictionary.
+ fname : str
+ the file to save results to.
+ compression : str
+ HDF5 compression method. Defaults to 'gzip'.
+ """
+ # create directory if it didn't exist.
+ fname = os.path.abspath(fname)
+ dir_name = os.path.dirname(fname)
+ if not os.path.exists(dir_name):
+ os.makedirs(dir_name)
+
+ sweep_info = results['sweep_params']
+ with h5py.File(fname, 'w') as f:
+ for name, swp_vars in sweep_info.items():
+ # store data
+ data = np.asarray(results[name])
+ if not data.shape:
+ dset = f.create_dataset(name, data=data)
+ else:
+ dset = f.create_dataset(name, data=data, compression=compression)
+ # h5py workaround: need to explicitly store unicode
+ dset.attrs['sweep_params'] = [swp.encode(encoding=bag_encoding, errors=bag_codec_error)
+ for swp in swp_vars]
+
+ # store sweep parameter values
+ for var in swp_vars:
+ if var not in f:
+ swp_data = results[var]
+ if np.issubdtype(swp_data.dtype, np.unicode_):
+ # we need to explicitly encode unicode strings to bytes
+ swp_data = [v.encode(encoding=bag_encoding, errors=bag_codec_error) for v in swp_data]
+
+ f.create_dataset(var, data=swp_data, compression=compression)
+
+
+def load_sim_file(fname):
+ """Read simulation results from HDF5 file.
+
+ Parameters
+ ----------
+ fname : str
+ the file to read.
+
+ Returns
+ -------
+ results : dict[str, any]
+ the result dictionary.
+ """
+ if not os.path.isfile(fname):
+ raise ValueError('%s is not a file.' % fname)
+
+ results = {}
+ sweep_params = {}
+ with h5py.File(fname, 'r') as f:
+ for name in f:
+ dset = f[name]
+ dset_data = dset[()]
+ if np.issubdtype(dset.dtype, np.bytes_):
+ # decode byte values to unicode arrays
+ dset_data = np.array([v.decode(encoding=bag_encoding, errors=bag_codec_error) for v in dset_data])
+
+ if 'sweep_params' in dset.attrs:
+ cur_swp = [swp.decode(encoding=bag_encoding, errors=bag_codec_error)
+ for swp in dset.attrs['sweep_params']]
+ results[name] = SweepArray(dset_data, cur_swp)
+ sweep_params[name] = cur_swp
+ else:
+ results[name] = dset_data
+
+ results['sweep_params'] = sweep_params
+ return results
diff --git a/src/bag/io/string.py b/src/bag/io/string.py
new file mode 100644
index 0000000..001598f
--- /dev/null
+++ b/src/bag/io/string.py
@@ -0,0 +1,71 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles string related IO.
+"""
+from typing import Iterable
+
+from io import StringIO
+from textwrap import fill
+
+from ruamel.yaml import YAML
+
+yaml = YAML(typ='unsafe')
+
+
+def read_yaml_str(content: str) -> object:
+ """Parse the given yaml str and return the python object."""
+ return yaml.load(content)
+
+
+def to_yaml_str(obj: object) -> str:
+ """Converts the given python object into a YAML string."""
+ stream = StringIO()
+ yaml.dump(obj, stream)
+ return stream.getvalue()
+
+
+def wrap_string(str_list: Iterable[str], wrap_length: int = 80, indent_char: str = '+') -> str:
+ ans = fill(' '.join(str_list), width=wrap_length, subsequent_indent=indent_char)
+ ans += '\n'
+ return ans
diff --git a/src/bag/io/template.py b/src/bag/io/template.py
new file mode 100644
index 0000000..8bd8015
--- /dev/null
+++ b/src/bag/io/template.py
@@ -0,0 +1,89 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines methods to create files from templates.
+"""
+
+import os
+
+from jinja2 import (
+ Environment, PackageLoader, select_autoescape, BaseLoader, TemplateNotFound, StrictUndefined
+)
+
+
+class FileLoader(BaseLoader):
+ """A loader that loads files"""
+
+ def __init__(self) -> None:
+ BaseLoader.__init__(self)
+
+ def get_source(self, environment: Environment, template: str):
+ if not os.path.isfile(template):
+ raise TemplateNotFound(template)
+
+ mtime = os.path.getmtime(template)
+ with open(template, 'r') as f:
+ source = f.read()
+ return source, template, lambda: mtime == os.path.getmtime(template)
+
+
+def new_template_env(parent_package: str, tmp_folder: str) -> Environment:
+ return Environment(trim_blocks=True,
+ lstrip_blocks=True,
+ keep_trailing_newline=True,
+ autoescape=select_autoescape(default_for_string=False),
+ loader=PackageLoader(parent_package, package_path=tmp_folder),
+ enable_async=False,
+ undefined=StrictUndefined,
+ )
+
+
+def new_template_env_fs() -> Environment:
+ return Environment(trim_blocks=True,
+ lstrip_blocks=True,
+ keep_trailing_newline=True,
+ autoescape=select_autoescape(default_for_string=False),
+ loader=FileLoader(),
+ enable_async=False,
+ undefined=StrictUndefined,
+ )
diff --git a/src/bag/layout/__init__.py b/src/bag/layout/__init__.py
new file mode 100644
index 0000000..7d4a42e
--- /dev/null
+++ b/src/bag/layout/__init__.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package contains code for templated based layout.
+"""
diff --git a/src/bag/layout/core.py b/src/bag/layout/core.py
new file mode 100644
index 0000000..3087c69
--- /dev/null
+++ b/src/bag/layout/core.py
@@ -0,0 +1,394 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines some core layout classes
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Tuple, TypeVar, Union, Iterable, List, Callable
+
+from pybag.core import PyLayInstRef, BBox, Transform, BBoxArray
+
+from .routing.base import Port, WireArray
+
+if TYPE_CHECKING:
+ from .template import TemplateBase
+
+T = TypeVar('T')
+
+
+class PyLayInstance:
+ def __init__(self, parent: TemplateBase, master: TemplateBase, ref: PyLayInstRef) -> None:
+ self._parent = parent
+ self._master = master
+ self._ref = ref
+
+ @property
+ def name(self) -> str:
+ return self._ref.inst_name
+
+ @property
+ def committed(self) -> bool:
+ return self._ref.committed
+
+ @property
+ def nx(self) -> int:
+ """int: Number of columns."""
+ return self._ref.nx
+
+ @property
+ def ny(self) -> int:
+ """int: Number of rows."""
+ return self._ref.ny
+
+ @property
+ def spx(self) -> int:
+ """int: The column pitch."""
+ return self._ref.spx
+
+ @property
+ def spy(self) -> int:
+ """int: The row pitch."""
+ return self._ref.spy
+
+ @property
+ def master(self) -> TemplateBase:
+ """TemplateBase: The master of this instance."""
+ return self._master
+
+ @property
+ def transformation(self) -> Transform:
+ """Transform: The instance transformation object."""
+ return self._ref.xform
+
+ @property
+ def bound_box(self) -> BBox:
+ """BBox: Returns the overall bounding box of this instance."""
+ barr = BBoxArray(self._master.bound_box, nx=self.nx, ny=self.ny, spx=self.spx, spy=self.spy)
+ return barr.transform(self.transformation).bound_box
+
+ @property
+ def array_box(self) -> BBox:
+ """Returns the array box of this instance."""
+ master_box: BBox = getattr(self._master, 'array_box', None)
+ if master_box is None:
+ raise ValueError('Master template array box is not defined.')
+
+ barr = BBoxArray(master_box, nx=self.nx, ny=self.ny, spx=self.spx, spy=self.spy)
+ return barr.transform(self.transformation).bound_box
+
+ @property
+ def fill_box(self) -> BBox:
+ """Returns the fill box of this instance."""
+ master_box: BBox = getattr(self._master, 'fill_box', None)
+ if master_box is None:
+ raise ValueError('Master template fill box is not defined.')
+
+ barr = BBoxArray(master_box, nx=self.nx, ny=self.ny, spx=self.spx, spy=self.spy)
+ return barr.transform(self.transformation).bound_box
+
+ @nx.setter
+ def nx(self, val: int) -> None:
+ self._ref.nx = val
+
+ @ny.setter
+ def ny(self, val: int) -> None:
+ self._ref.ny = val
+
+ @spx.setter
+ def spx(self, val: int) -> None:
+ self._ref.spx = val
+
+ @spy.setter
+ def spy(self, val: int) -> None:
+ self._ref.spy = val
+
+ def get_item_location(self, row: int = 0, col: int = 0) -> Tuple[int, int]:
+ """Returns the location of the given item in the array.
+
+ Parameters
+ ----------
+ row : int
+ the item row index. 0 is the bottom-most row.
+ col : int
+ the item column index. 0 is the left-most column.
+
+ Returns
+ -------
+ xo : int
+ the item X coordinate.
+ yo : int
+ the item Y coordinate.
+ """
+ if row < 0 or row >= self.ny or col < 0 or col >= self.nx:
+ raise ValueError('Invalid row/col index: row=%d, col=%d' % (row, col))
+
+ return col * self.spx, row * self.spy
+
+ def get_bound_box_of(self, row: int = 0, col: int = 0) -> BBox:
+ """Returns the bounding box of an instance in this mosaic.
+
+ Parameters
+ ----------
+ row : int
+ the item row index. 0 is the bottom-most row.
+ col : int
+ the item column index. 0 is the left-most column.
+
+ Returns
+ -------
+ bbox : BBox
+ the bounding box.
+ """
+ dx, dy = self.get_item_location(row=row, col=col)
+ box = self._master.bound_box.get_transform(self.transformation)
+ return box.move_by(dx, dy)
+
+ def move_by(self, dx: int = 0, dy: int = 0) -> None:
+ """Moves this instance by the given amount.
+
+ Parameters
+ ----------
+ dx : int
+ the X shift.
+ dy : int
+ the Y shift.
+ """
+ self._ref.move_by(dx, dy)
+
+ def transform(self, xform: Transform) -> None:
+ """Transform the location of this instance.
+
+ Parameters
+ ----------
+ xform : Transform
+ the transformation to apply to this instance.
+ """
+ self._ref.transform(xform)
+
+ def new_master_with(self, **kwargs: Any) -> None:
+ """Change the master template of this instance.
+
+ This method will get the old master template layout parameters, update
+ the parameter values with the given dictionary, then create a new master
+ template with those parameters and associate it with this instance.
+
+ Parameters
+ ----------
+ **kwargs : Any
+ a dictionary of new parameter values.
+ """
+ self._master = self._master.new_template_with(**kwargs)
+ self._ref.set_master(self._master.layout_cellview)
+
+ def transform_master_object(self, obj: T, row: int = 0, col: int = 0) -> T:
+ """Transforms the given object in instance master w.r.t. this instance's Transform object.
+
+ Parameters
+ ----------
+ obj : T
+ the object to transform. Must have get_transform() method defined.
+ row : int
+ the instance row index. Index 0 is the bottom-most row.
+ col : int
+ the instance column index. Index 0 is the left-most column.
+
+ Returns
+ -------
+ ans : T
+ the transformed object.
+ """
+ dx, dy = self.get_item_location(row=row, col=col)
+ xform = self.transformation.get_move_by(dx, dy)
+ if isinstance(obj, Transform):
+ return obj.get_transform_by(xform)
+ return obj.get_transform(xform)
+
+ def get_port(self, name: str = '', row: int = 0, col: int = 0) -> Port:
+ """Returns the port object of the given instance in the array.
+
+ Parameters
+ ----------
+ name : str
+ the port terminal name. If empty, check if this
+ instance has only one port, then return it.
+ row : int
+ the instance row index. Index 0 is the bottom-most row.
+ col : int
+ the instance column index. Index 0 is the left-most column.
+
+ Returns
+ -------
+ port : Port
+ the port object.
+ """
+ return self.transform_master_object(self._master.get_port(name), row, col)
+
+ def get_primitive_port(self, name: str, check_fun: Callable[[int], bool],
+ row: int = 0, col: int = 0) -> Port:
+ """Returns the port object of the given instance in the array.
+
+ Parameters
+ ----------
+ name : str
+ the port terminal name. If empty, check if this
+ instance has only one port, then return it.
+ check_fun : Callable[[int], bool]
+ a function that returns true if a layer is considered "private".
+ row : int
+ the instance row index. Index 0 is the bottom-most row.
+ col : int
+ the instance column index. Index 0 is the left-most column.
+
+ Returns
+ -------
+ port : Port
+ the port object.
+ """
+ port = self._master.get_port(name).to_primitive(self._master.tr_colors, check_fun)
+ return self.transform_master_object(port, row, col)
+
+ def get_pin(self, name: str = '', row: int = 0, col: int = 0, layer: int = Port.default_layer
+ ) -> Union[WireArray, BBox]:
+ """Returns the first pin with the given name.
+
+ This is an efficient method if you know this instance has exactly one pin.
+
+ Parameters
+ ----------
+ name : str
+ the port terminal name. If None or empty, check if this
+ instance has only one port, then return it.
+ row : int
+ the instance row index. Index 0 is the bottom-most row.
+ col : int
+ the instance column index. Index 0 is the left-most column.
+ layer : int
+ the pin layer. If negative, check to see if the given port has only one layer.
+ If so then use that layer.
+
+ Returns
+ -------
+ pin : Union[WireArray, BBox]
+ the first pin associated with the port of given name.
+ """
+ return self.get_port(name, row, col).get_pins(layer)[0]
+
+ def port_pins_iter(self, name: str = '', layer: Union[int, str] = Port.default_layer
+ ) -> Iterable[Union[WireArray, BBox]]:
+ """Iterate through all pins of all ports with the given name in this instance array.
+
+ Parameters
+ ----------
+ name : str
+ the port terminal name. If None or empty, check if this
+ instance has only one port, then return it.
+ layer : Union[int, str]
+ the pin layer ID or primitive pin layer name. If negative, check to see if the
+ given port has only one layer. If so then use that layer.
+
+ Yields
+ ------
+ pin : Union[WireArray, BBox]
+ the pin as WireArray or bounding box.
+ """
+ for col in range(self.nx):
+ for row in range(self.ny):
+ try:
+ port = self.get_port(name, row, col)
+ except KeyError:
+ return
+ for obj in port.get_pins(layer):
+ yield obj
+
+ def get_all_port_pins(self, name: str = '', layer: Union[int, str] = Port.default_layer
+ ) -> Union[List[WireArray], List[BBox]]:
+ """Returns a list of all pins of all ports with the given name in this instance array.
+
+ This method gathers ports from all instances in this array with the given name,
+ then find all pins of those ports on the given layer, then return as list of WireArrays.
+
+ Parameters
+ ----------
+ name : str
+ the port terminal name. If None or empty, check if this
+ instance has only one port, then return it.
+ layer : Union[int, str]
+ the pin layer ID or primitive pin layer name. If negative, check to see if the
+ given port has only one layer. If so then use that layer.
+
+ Returns
+ -------
+ pin_list : Union[List[WireArray], List[BBox]]
+ the list of pins as WireArrays or bounding boxes.
+ """
+ return list(self.port_pins_iter(name=name, layer=layer))
+
+ def port_names_iter(self) -> Iterable[str]:
+ """Iterates over port names in this instance.
+
+ Yields
+ ------
+ port_name : str
+ name of a port in this instance.
+ """
+ return self._master.port_names_iter()
+
+ def has_port(self, port_name: str) -> bool:
+ """Returns True if this instance has the given port."""
+ return self._master.has_port(port_name)
+
+ def commit(self) -> None:
+ parent_grid = self._parent.grid
+ old_master = self._master
+ if parent_grid.tech_info.use_track_coloring and old_master.use_color:
+ # update track parity
+ tr_colors = parent_grid.get_track_coloring_at(self._parent.tr_colors,
+ self.transformation, old_master.grid,
+ old_master.top_layer)
+ if tr_colors != old_master.tr_colors:
+ self.new_master_with(tr_colors=tr_colors)
+
+ self._parent.add_child_key(self._master.key)
+ self._ref.commit()
diff --git a/src/bag/layout/data.py b/src/bag/layout/data.py
new file mode 100644
index 0000000..055ec37
--- /dev/null
+++ b/src/bag/layout/data.py
@@ -0,0 +1,79 @@
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various data classes used in layout.
+"""
+
+from __future__ import annotations
+
+from typing import Dict, Any, Tuple, Mapping
+
+from dataclasses import dataclass
+
+from pybag.enum import Orient2D, Direction2D
+
+from ..util.immutable import Param
+
+
+class TemplateEdgeInfo:
+ def __init__(self, west: Param, south: Param, east: Param, north: Param):
+ self._info = {
+ Direction2D.WEST: west,
+ Direction2D.EAST: east,
+ Direction2D.SOUTH: south,
+ Direction2D.NORTH: north,
+ }
+
+ def get_edge_params(self, direction: Direction2D) -> Param:
+ return self._info[direction]
+
+ def to_tuple(self) -> Tuple[Param, Param, Param, Param]:
+ return (self._info[Direction2D.WEST], self._info[Direction2D.SOUTH],
+ self._info[Direction2D.EAST], self._info[Direction2D.NORTH])
+
+
+class MOMCapInfo:
+ """Class providing convenient MOM cap information lookup"""
+
+ def __init__(self, cap_info: Mapping[str, Any],
+ port_widths: Mapping[int, int], port_pleft: Mapping[int, bool]):
+ self._bot_dir = Orient2D[cap_info['bot_dir']]
+ self._cap_info: Dict[int, Tuple[int, int, int, int, int]] = cap_info['info']
+ self._bot_layer = min(self._cap_info.keys())
+ self._port_widths = port_widths
+ self._port_pleft = port_pleft
+
+ def get_direction(self, layer: int) -> Orient2D:
+ diff = layer - self._bot_layer
+ return self._bot_dir if diff & 1 == 0 else self._bot_dir.perpendicular()
+
+ def get_port_tr_w(self, layer: int) -> int:
+ return self._port_widths.get(layer, self._cap_info[layer][4])
+
+ def get_port_plow(self, layer: int) -> bool:
+ return self._port_pleft.get(layer, False)
+
+ def get_cap_specs(self, layer: int) -> Tuple[int, int, int, int]:
+ return self._cap_info[layer][:-1]
+
+
+@dataclass(frozen=True)
+class MaxSpaceFillInfo:
+ info: Tuple[int, int, int, int, float]
+
+ def get_space(self, orient: Orient2D) -> int:
+ return self.info[orient.value]
+
+ def get_margin(self, orient: Orient2D) -> int:
+ return self.info[orient.value + 2]
diff --git a/src/bag/layout/routing/__init__.py b/src/bag/layout/routing/__init__.py
new file mode 100644
index 0000000..637189b
--- /dev/null
+++ b/src/bag/layout/routing/__init__.py
@@ -0,0 +1,48 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package provide routing grid related classes and methods.
+"""
+
+from .base import WireArray, TrackID
+from .grid import RoutingGrid
diff --git a/src/bag/layout/routing/base.py b/src/bag/layout/routing/base.py
new file mode 100644
index 0000000..3d584dc
--- /dev/null
+++ b/src/bag/layout/routing/base.py
@@ -0,0 +1,952 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides basic routing classes.
+"""
+
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING, Tuple, Union, Iterable, Iterator, Dict, List, Sequence, Any, Optional, Mapping,
+ cast, Callable
+)
+
+from pybag.enum import RoundMode
+from pybag.core import BBox, Transform, PyTrackID, TrackColoring, get_wire_iterator
+
+from ...typing import TrackType
+from ...util.math import HalfInt
+from ...util.immutable import ImmutableSortedDict, combine_hash
+from ...util.search import BinaryIterator
+
+if TYPE_CHECKING:
+ from .grid import RoutingGrid
+
+WDictType = Mapping[str, Mapping[int, int]]
+SpDictType = Mapping[Tuple[str, str], Mapping[int, TrackType]]
+
+
+class TrackID(PyTrackID):
+ """A class that represents locations of track(s) on the routing grid.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ track_idx : TrackType
+ the smallest middle track index in the array. Multiples of 0.5
+ width : int
+ width of one track in number of tracks.
+ num : int
+ number of tracks in this array.
+ pitch : TrackType
+ pitch between adjacent tracks, in number of track pitches.
+ grid: Optional[RoutingGrid]
+ the routing grid associated with this TrackID object.
+ """
+
+ def __init__(self, layer_id: int, track_idx: TrackType, width: int = 1, num: int = 1,
+ pitch: TrackType = 0, grid: Optional[RoutingGrid] = None) -> None:
+ if num < 1:
+ raise ValueError('TrackID must have 1 or more tracks.')
+
+ PyTrackID.__init__(self, layer_id, int(round(2 * track_idx)), width, num,
+ int(round(2 * pitch)))
+ self._grid = grid
+
+ def __iter__(self) -> Iterator[HalfInt]:
+ """Iterate over all middle track indices in this TrackID."""
+ return (HalfInt(self.base_htr + idx * self.htr_pitch) for idx in range(self.num))
+
+ @property
+ def base_index(self) -> HalfInt:
+ """HalfInt: the base index."""
+ return HalfInt(self.base_htr)
+
+ @property
+ def pitch(self) -> HalfInt:
+ """HalfInt: the track pitch."""
+ return HalfInt(self.htr_pitch)
+
+ @property
+ def grid(self) -> Optional[RoutingGrid]:
+ """Optional[RoutingGrid]: the routing grid of this TrackID object."""
+ return self._grid
+
+ def __getitem__(self, idx: Union[int, slice]) -> TrackID:
+ num = self.num
+ pitch = self.pitch
+ if isinstance(idx, int):
+ if idx < 0:
+ idx += num
+ if idx < 0 or idx >= num:
+ raise ValueError(f'Invalid index {idx} with {num} wires.')
+ return TrackID(self.layer_id, self.base_index + idx * pitch, width=self.width,
+ grid=self._grid)
+ else:
+ start = idx.start
+ stop = idx.stop
+ step = idx.step
+ if step is None:
+ step = 1
+ elif not isinstance(step, int):
+ raise ValueError(f'TrackID slicing step {step} has to be integer')
+
+ if start is None:
+ start = 0
+ elif start < 0:
+ start += num
+ if start < 0 or start >= num:
+ raise ValueError(f'Invalid start index {start} with {num} wires.')
+
+ if stop is None:
+ stop = num
+ elif stop < 0:
+ stop += num
+ if stop <= 0 or stop > num:
+ raise ValueError(f'Invalid stop index {stop} with {num} wires.')
+
+ if stop <= start:
+ raise ValueError('slice got empty TrackID.')
+
+ q, r = divmod(stop - start, step)
+ return TrackID(self.layer_id, self.base_index + start * pitch, width=self.width,
+ num=q + (r != 0), pitch=step * pitch, grid=self._grid)
+
+ def transform(self, xform: Transform) -> TrackID:
+ """Transform this TrackID."""
+ if self._grid is None:
+ raise ValueError('Cannot transform TrackID without RoutingGrid.')
+
+ lay_id = self.layer_id
+ self.base_htr = self._grid.transform_htr(lay_id, self.base_htr, xform)
+ axis_scale = xform.axis_scale[1 - self._grid.get_direction(lay_id).value]
+ self.htr_pitch = self.htr_pitch * axis_scale
+ return self
+
+ def get_transform(self, xform: Transform) -> TrackID:
+ """returns a transformed TrackID."""
+ return TrackID(self.layer_id, self.base_index, width=self.width,
+ num=self.num, pitch=self.pitch, grid=self._grid).transform(xform)
+
+ def copy_with(self, grid: RoutingGrid) -> TrackID:
+ return TrackID(self.layer_id, self.base_index, width=self.width,
+ num=self.num, pitch=self.pitch, grid=grid)
+
+
+class WireArray:
+ """An array of wires on the routing grid.
+
+ Parameters
+ ----------
+ track_id : TrackID
+ TrackArray representing the track locations of this wire array.
+ lower : int
+ the lower coordinate along the track direction.
+ upper : int
+ the upper coordinate along the track direction.
+ """
+
+ def __init__(self, track_id: TrackID, lower: int, upper: int) -> None:
+ self._tid = track_id
+ self._lower = lower
+ self._upper = upper
+
+ @property
+ def track_id(self) -> TrackID:
+ """TrackID: The TrackID of this WireArray."""
+ return self._tid
+
+ @property
+ def layer_id(self) -> int:
+ return self._tid.layer_id
+
+ @property
+ def lower(self) -> int:
+ return self._lower
+
+ @property
+ def upper(self) -> int:
+ return self._upper
+
+ @property
+ def middle(self) -> int:
+ return (self._lower + self._upper) // 2
+
+ @property
+ def bound_box(self) -> BBox:
+ """BBox: the bounding box of this WireArray."""
+ tid = self._tid
+ layer_id = tid.layer_id
+ grid = tid.grid
+ if grid is None:
+ raise ValueError('Cannot computing WireArray bounding box without RoutingGrid.')
+
+ lower, upper = grid.get_wire_bounds_htr(layer_id, tid.base_htr, tid.width)
+
+ delta = (tid.num - 1) * int(tid.pitch * grid.get_track_pitch(layer_id))
+ if delta >= 0:
+ upper += delta
+ else:
+ lower += delta
+
+ return BBox(grid.get_direction(layer_id), self._lower, self._upper, lower, upper)
+
+ @classmethod
+ def list_to_warr(cls, warr_list: Sequence[WireArray]) -> WireArray:
+ """Convert a list of WireArrays to a single WireArray.
+
+ this method assumes all WireArrays have the same layer, width, and lower/upper coordinates.
+ Overlapping WireArrays will be compacted.
+ """
+ if len(warr_list) == 1:
+ return warr_list[0]
+
+ tid0 = warr_list[0]._tid
+ layer = tid0.layer_id
+ width = tid0.width
+ lower = warr_list[0].lower
+ upper = warr_list[0].upper
+ tid_list = sorted(set((idx for warr in warr_list for idx in warr.track_id)))
+ base_idx = tid_list[0]
+ if len(tid_list) < 2:
+ return WireArray(TrackID(layer, base_idx, width=width, grid=tid0.grid), lower, upper)
+ diff = tid_list[1] - tid_list[0]
+ for idx in range(1, len(tid_list) - 1):
+ if tid_list[idx + 1] - tid_list[idx] != diff:
+ raise ValueError('pitch mismatch.')
+
+ return WireArray(TrackID(layer, base_idx, width=width, num=len(tid_list), pitch=diff,
+ grid=tid0.grid), lower, upper)
+
+ @classmethod
+ def single_warr_iter(cls, warr: Union[WireArray, Sequence[WireArray]]) -> Iterable[WireArray]:
+ """Iterate through single wires in the given WireArray or WireArray list."""
+ if isinstance(warr, WireArray):
+ yield from warr.warr_iter()
+ else:
+ for w in warr:
+ yield from w.warr_iter()
+
+ @classmethod
+ def wire_grp_iter(cls, warr: Union[WireArray, Sequence[WireArray]]) -> Iterable[WireArray]:
+ """Iterate through WireArrays in the given WireArray or WireArray list."""
+ if isinstance(warr, WireArray):
+ yield warr
+ else:
+ yield from warr
+
+ def __getitem__(self, idx: int) -> WireArray:
+ return WireArray(self._tid[idx], self._lower, self._upper)
+
+ def __repr__(self) -> str:
+ return f'WireArray({self._tid}, {self._lower}, {self._upper})'
+
+ def to_warr_list(self) -> List[WireArray]:
+ """Convert this WireArray into a list of single wires."""
+ return list(self.warr_iter())
+
+ def warr_iter(self) -> Iterable[WireArray]:
+ """Iterates through single wires in this WireArray."""
+ tid = self._tid
+ layer = tid.layer_id
+ width = tid.width
+ lower = self.lower
+ upper = self.upper
+ for tr in tid:
+ yield WireArray(TrackID(layer, tr, width=width, grid=tid.grid), lower, upper)
+
+ def wire_iter(self, tr_colors: TrackColoring) -> Iterable[Tuple[str, str, BBox]]:
+ return get_wire_iterator(self._tid.grid, tr_colors, self._tid, self._lower, self._upper)
+
+ def transform(self, xform: Transform) -> WireArray:
+ """Transform this WireArray.
+
+ Parameters
+ ----------
+ xform : Transform
+ the transformation object.
+
+ Returns
+ -------
+ warr : WireArray
+ a reference to this object.
+ """
+ # noinspection PyAttributeOutsideInit
+ self._tid = self._tid.get_transform(xform)
+ layer_id = self._tid.layer_id
+ dir_idx = self._tid.grid.get_direction(layer_id).value
+ scale = xform.axis_scale[dir_idx]
+ delta = xform.location[dir_idx]
+ if scale < 0:
+ tmp = -self._upper + delta
+ self._upper = -self._lower + delta
+ self._lower = tmp
+ else:
+ self._lower += delta
+ self._upper += delta
+
+ return self
+
+ def get_transform(self, xform: Transform) -> WireArray:
+ """Return a new transformed WireArray.
+
+ Parameters
+ ----------
+ xform : Transform
+ the transformation object.
+
+ Returns
+ -------
+ warr : WireArray
+ the new WireArray object.
+ """
+ return WireArray(self.track_id, self.lower, self.upper).transform(xform)
+
+
+class Port:
+ """A layout port.
+
+ a port is a group of pins that represent the same net.
+ The pins can be on different layers.
+
+ Parameters
+ ----------
+ term_name : str
+ the terminal name of the port.
+ pin_dict : Dict[Union[int, str], Union[List[WireArray], List[BBox]]]
+ a dictionary from layer ID to pin geometries on that layer.
+ label : str
+ the label of this port.
+ """
+
+ default_layer = -1000
+
+ def __init__(self, term_name: str,
+ pin_dict: Dict[Union[int, str], Union[List[WireArray], List[BBox]]],
+ label: str, hidden: bool) -> None:
+ self._term_name = term_name
+ self._pin_dict = pin_dict
+ self._label = label
+ self._hidden = hidden
+
+ def get_single_layer(self) -> Union[int, str]:
+ """Returns the layer of this port if it only has a single layer."""
+ if len(self._pin_dict) > 1:
+ raise ValueError('This port has more than one layer.')
+ return next(iter(self._pin_dict))
+
+ def _get_layer(self, layer: Union[int, str]) -> Union[int, str]:
+ """Get the layer ID or name."""
+ if isinstance(layer, str):
+ return self.get_single_layer() if not layer else layer
+ else:
+ return self.get_single_layer() if layer == Port.default_layer else layer
+
+ @property
+ def net_name(self) -> str:
+ """str: The net name of this port."""
+ return self._term_name
+
+ @property
+ def label(self) -> str:
+ """str: The label of this port."""
+ return self._label
+
+ @property
+ def hidden(self) -> bool:
+ """bool: True if this is a hidden port."""
+ return self._hidden
+
+ def items(self) -> Iterable[Union[int, str], Union[List[WireArray], List[BBox]]]:
+ return self._pin_dict.items()
+
+ def get_pins(self, layer: Union[int, str] = -1000) -> Union[List[WireArray], List[BBox]]:
+ """Returns the pin geometries on the given layer.
+
+ Parameters
+ ----------
+ layer : Union[int, str]
+ the layer ID. If equal to Port.default_layer, check if this port is on a single layer,
+ then return the result.
+
+ Returns
+ -------
+ track_bus_list : Union[List[WireArray], List[BBox]]
+ pins on the given layer representing as WireArrays.
+ """
+ layer = self._get_layer(layer)
+ return self._pin_dict.get(layer, [])
+
+ def get_bounding_box(self, layer: Union[int, str] = -1000) -> BBox:
+ """Calculate the overall bounding box of this port on the given layer.
+
+ Parameters
+ ----------
+ layer : Union[int, str]
+ the layer ID. If Negative, check if this port is on a single layer,
+ then return the result.
+
+ Returns
+ -------
+ bbox : BBox
+ the bounding box.
+ """
+ layer = self._get_layer(layer)
+ box = BBox.get_invalid_bbox()
+ for geo in self._pin_dict[layer]:
+ if isinstance(geo, BBox):
+ box.merge(geo)
+ else:
+ box.merge(geo.bound_box)
+ return box
+
+ def get_transform(self, xform: Transform) -> Port:
+ """Return a new transformed Port.
+
+ Parameters
+ ----------
+ xform : Transform
+ the transform object.
+ """
+ new_pin_dict = {}
+ for lay, geo_list in self._pin_dict.items():
+ if isinstance(lay, str):
+ new_geo_list = [cast(BBox, geo).get_transform(xform) for geo in geo_list]
+ else:
+ new_geo_list = [geo.get_transform(xform) for geo in geo_list]
+ new_pin_dict[lay] = new_geo_list
+
+ return Port(self._term_name, new_pin_dict, self._label, self._hidden)
+
+ def to_primitive(self, tr_colors: TrackColoring, check_fun: Callable[[int], bool]) -> Port:
+ new_pin_dict = {}
+ for lay, geo_list in self._pin_dict.items():
+ if isinstance(lay, int) and check_fun(lay):
+ for geo in geo_list:
+ for blay, _, bbox in geo.wire_iter(tr_colors):
+ box_list: List[BBox] = new_pin_dict.get(blay, None)
+ if box_list is None:
+ new_pin_dict[blay] = box_list = []
+ box_list.append(bbox)
+ else:
+ new_pin_dict[lay] = geo_list
+
+ return Port(self._term_name, new_pin_dict, self._label, self._hidden)
+
+
+class TrackManager:
+ """A class that makes it easy to compute track locations.
+
+ This class provides many helper methods for computing track locations and spacing when
+ each track could have variable width. All methods in this class accepts a "track_type",
+ which is either a string in the track dictionary or an integer representing the track
+ width.
+
+ Parameters
+ ----------
+ grid : RoutingGrid
+ the RoutingGrid object.
+ tr_widths : WDictType
+ dictionary from wire types to its width on each layer.
+ tr_spaces : SpDictType
+ dictionary from wire types to its spaces on each layer.
+ **kwargs : Any
+ additional options.
+ """
+
+ def __init__(self, grid: RoutingGrid, tr_widths: WDictType, tr_spaces: SpDictType,
+ **kwargs: Any) -> None:
+ half_space = kwargs.get('half_space', True)
+
+ self._grid = grid
+ self._tr_widths = ImmutableSortedDict(tr_widths)
+ self._tr_spaces = ImmutableSortedDict(tr_spaces)
+ self._half_space = half_space
+
+ # compute hash
+ seed = hash(self._grid)
+ seed = combine_hash(seed, hash(self._tr_widths))
+ seed = combine_hash(seed, hash(self._tr_spaces))
+ seed = combine_hash(seed, hash(self._half_space))
+ self._hash = seed
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __eq__(self, other: Any) -> bool:
+ if isinstance(other, TrackManager):
+ return (self._grid == other._grid and self._tr_widths == other._tr_widths and
+ self._tr_spaces == other._tr_spaces and self._half_space == other._half_space)
+ else:
+ return False
+
+ @classmethod
+ def _get_space_from_tuple(cls, layer_id: int, ntup: Tuple[str, str],
+ sp_dict: Optional[SpDictType]) -> Optional[TrackType]:
+ if sp_dict is not None:
+ test = sp_dict.get(ntup, None)
+ if test is not None:
+ return test.get(layer_id, None)
+ ntup = (ntup[1], ntup[0])
+ test = sp_dict.get(ntup, None)
+ if test is not None:
+ return test.get(layer_id, None)
+ return None
+
+ @property
+ def grid(self) -> RoutingGrid:
+ return self._grid
+
+ @property
+ def half_space(self) -> bool:
+ return self._half_space
+
+ @property
+ def tr_widths(self) -> ImmutableSortedDict[str, ImmutableSortedDict[int, int]]:
+ return self._tr_widths
+
+ @property
+ def tr_spaces(self) -> ImmutableSortedDict[Tuple[str, str],
+ ImmutableSortedDict[int, TrackType]]:
+ return self._tr_spaces
+
+ def get_width(self, layer_id: int, track_type: Union[str, int]) -> int:
+ """Returns the track width.
+
+ Parameters
+ ----------
+ layer_id : int
+ the track layer ID.
+ track_type : Union[str, int]
+ the track type.
+ """
+ if isinstance(track_type, int):
+ return track_type
+ if track_type not in self._tr_widths:
+ return 1
+ return self._tr_widths[track_type].get(layer_id, 1)
+
+ def get_sep(self, layer_id: int, type_tuple: Tuple[Union[str, int], Union[str, int]],
+ **kwargs: Any) -> HalfInt:
+ """Returns the track separation.
+
+ Parameters
+ ----------
+ layer_id : int
+ the track layer ID.
+ type_tuple : Tuple[Union[str, int], Union[str, int]],
+ Tuple of the two types of wire. If a type is an integer instead of a string,
+ we use that as the track width.
+ **kwargs : Any
+ optional parameters.
+
+ Returns
+ -------
+ tr_sp : TrackType
+ the track spacing
+ """
+ same_color = kwargs.get('same_color', False)
+ half_space = kwargs.get('half_space', self._half_space)
+ sp_override = kwargs.get('sp_override', None)
+ sp_dict = self._tr_spaces if sp_override is None else sp_override
+
+ if isinstance(type_tuple[0], int):
+ w1 = type_tuple[0]
+ if isinstance(type_tuple[1], int):
+ # user specify track width for both wires
+ w2 = type_tuple[1]
+ extra_sep = 0
+ else:
+ w2 = self.get_width(layer_id, type_tuple[1])
+ extra_sep = self._get_space_from_tuple(layer_id, (type_tuple[1], ''), sp_dict)
+ if extra_sep is None:
+ extra_sep = 0
+ else:
+ w1 = self.get_width(layer_id, type_tuple[0])
+ if isinstance(type_tuple[1], int):
+ w2 = type_tuple[1]
+ extra_sep = self._get_space_from_tuple(layer_id, (type_tuple[0], ''), sp_dict)
+ if extra_sep is None:
+ extra_sep = 0
+ else:
+ w2 = self.get_width(layer_id, type_tuple[1])
+ extra_sep = self._get_space_from_tuple(layer_id, type_tuple, sp_dict)
+ if extra_sep is None:
+ # check single spacing
+ extra_sep1 = self._get_space_from_tuple(layer_id, (type_tuple[0], ''), sp_dict)
+ if extra_sep1 is None:
+ extra_sep1 = 0
+ extra_sep2 = self._get_space_from_tuple(layer_id, (type_tuple[1], ''), sp_dict)
+ if extra_sep2 is None:
+ extra_sep2 = 0
+ extra_sep = max(extra_sep1, extra_sep2)
+
+ ans = self._grid.get_sep_tracks(layer_id, w1, w2, same_color=same_color) + extra_sep
+ return ans.up_even(not half_space)
+
+ def get_next_track(self, layer_id: int, cur_idx: TrackType, cur_type: Union[str, int],
+ next_type: Union[str, int], up: Union[bool, int] = True, **kwargs: Any
+ ) -> HalfInt:
+ """Compute the track location of a wire next to a given one.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ cur_idx : TrackType
+ the current wire track index.
+ cur_type : Union[str, int]
+ the current wire type.
+ next_type : Union[str, int]
+ the next wire type.
+ up : Union[bool, int]
+ True to return the next track index that is larger than cur_idx. Can also be integer
+ to count number of tracks.
+ **kwargs : Any
+ optional parameters.
+
+ Returns
+ -------
+ next_int : HalfInt
+ the next track index.
+ """
+ sep = self.get_sep(layer_id, (cur_type, next_type), **kwargs)
+ sep1 = self.get_sep(layer_id, (next_type, next_type), **kwargs)
+ cur_idx = HalfInt.convert(cur_idx)
+
+ if isinstance(up, bool):
+ up: int = 2 * int(up) - 1
+
+ delta = sep + (abs(up) - 1) * sep1
+ sign = up > 0
+ return cur_idx + (2 * sign - 1) * delta
+
+ def get_num_wires_between(self, layer_id: int, bot_wire: str, bot_idx: HalfInt,
+ top_wire: str, top_idx: HalfInt, fill_wire: str) -> int:
+ idx0 = self.get_next_track(layer_id, bot_idx, bot_wire, fill_wire, up=True)
+ idx1 = self.get_next_track(layer_id, top_idx, top_wire, fill_wire, up=False)
+ if idx1 < idx0:
+ return 0
+
+ sep = self.get_sep(layer_id, (fill_wire, fill_wire))
+ return ((idx1.dbl_value - idx0.dbl_value) // sep.dbl_value) + 1
+
+ def place_wires(self, layer_id: int, type_list: Sequence[Union[str, int]],
+ align_track: Optional[HalfInt] = None, align_idx: int = 0,
+ center_coord: Optional[int] = None, **kwargs: Any
+ ) -> Tuple[HalfInt, List[HalfInt]]:
+ """Place the given wires next to each other.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer of the tracks.
+ type_list : Sequence[Union[str, int]]
+ list of wire types.
+ align_track : Optional[HalfInt]
+ If not None, will make sure the the track at location align_idx has this value.
+ align_idx : Optional[int]
+ the align wire index.
+ center_coord : Optional[int]
+ If not None, will try to center the wires around this coordinate.
+ align_track takes precedence over center_coord.
+ **kwargs : Any
+ optional parameters for get_num_space_tracks() method of RoutingGrid.
+
+ Returns
+ -------
+ num_tracks : HalfInt
+ number of tracks used.
+ locations : List[HalfInt]
+ the center track index of each wire.
+ """
+ if not type_list:
+ return HalfInt(0), []
+
+ grid = self.grid
+
+ w0 = self.get_width(layer_id, type_list[0])
+ mid_idx = grid.find_next_track(layer_id, 0, tr_width=w0, half_track=True,
+ mode=RoundMode.GREATER_EQ)
+
+ ans = [mid_idx]
+ num_wires = len(type_list)
+ idx_half = num_wires // 2
+ for idx in range(1, num_wires):
+ cur_idx = self.get_next_track(layer_id, ans[-1], type_list[idx - 1],
+ type_list[idx], up=True, **kwargs)
+ ans.append(cur_idx)
+
+ if align_track is not None:
+ delta = align_track - ans[align_idx]
+ for idx in range(num_wires):
+ ans[idx] += delta
+ elif center_coord is not None:
+ if num_wires & 1:
+ mid_coord = grid.track_to_coord(layer_id, ans[idx_half])
+ else:
+ coord1 = grid.track_to_coord(layer_id, ans[idx_half - 1])
+ coord2 = grid.track_to_coord(layer_id, ans[idx_half])
+ mid_coord = (coord1 + coord2) // 2
+
+ coord_delta = center_coord - mid_coord
+ delta = grid.coord_to_track(layer_id, coord_delta, mode=RoundMode.NEAREST)
+ delta -= grid.coord_to_track(layer_id, 0)
+ for idx in range(num_wires):
+ ans[idx] += delta
+
+ w1 = self.get_width(layer_id, type_list[-1])
+ upper = grid.get_wire_bounds(layer_id, ans[-1], width=w1)[1]
+ top_idx = grid.coord_to_track(layer_id, upper, mode=RoundMode.GREATER_EQ)
+ lower = grid.get_wire_bounds(layer_id, ans[0],
+ width=self.get_width(layer_id, type_list[0]))[0]
+ bot_idx = grid.coord_to_track(layer_id, lower, mode=RoundMode.LESS_EQ)
+ ntr = top_idx - bot_idx
+
+ return ntr, ans
+
+ @classmethod
+ def _get_align_delta(cls, tot_ntr: TrackType, num_used: TrackType, alignment: int) -> HalfInt:
+ if alignment == -1 or num_used == tot_ntr:
+ # we already aligned to left
+ return HalfInt(0)
+ elif alignment == 0:
+ # center tracks
+ return HalfInt.convert(tot_ntr - num_used).div2()
+ elif alignment == 1:
+ # align to right
+ return HalfInt.convert(tot_ntr - num_used)
+ else:
+ raise ValueError('Unknown alignment code: %d' % alignment)
+
+ def align_wires(self, layer_id: int, type_list: Sequence[Union[str, int]], tot_ntr: TrackType,
+ alignment: int = 0, start_idx: TrackType = 0, **kwargs: Any) -> List[HalfInt]:
+ """Place the given wires in the given space with the specified alignment.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer of the tracks.
+ type_list : Sequence[Union[str, int]]
+ list of wire types.
+ tot_ntr : TrackType
+ total available space in number of tracks.
+ alignment : int
+ If alignment == -1, will "left adjust" the wires (left is the lower index direction).
+ If alignment == 0, will center the wires in the middle.
+ If alignment == 1, will "right adjust" the wires.
+ start_idx : TrackType
+ the starting track index.
+ **kwargs : Any
+ optional parameters for place_wires().
+
+ Returns
+ -------
+ locations : List[HalfInt]
+ the center track index of each wire.
+ """
+ num_used, idx_list = self.place_wires(layer_id, type_list, start_idx=start_idx, **kwargs)
+ if num_used > tot_ntr:
+ raise ValueError('Given tracks occupy more space than given.')
+
+ delta = self._get_align_delta(tot_ntr, num_used, alignment)
+ return [idx + delta for idx in idx_list]
+
+ def get_next_track_obj(self,
+ warr_tid_obj: Union[TrackID, WireArray],
+ cur_type: Union[str, int],
+ next_type: Union[str, int],
+ count_rel_tracks: int = 1,
+ **kwargs) -> TrackID:
+ """Computes next TrackID relative the WireArray or TrackID object, given wire types
+
+ Parameters
+ ----------
+ warr_tid_obj: Union[TrackID, WireArray]
+ the wire array or track id object used as the reference
+ cur_type: Union[str, int]
+ the wire type of current reference warr/tid
+ next_type: Union[str, int]
+ the wire type of the returned tid
+ count_rel_tracks: int
+ the number of spacings to skip
+ +1 means the immediate next track id
+ -1 means immediate previous track id,
+ +2 means the one after the next track id, etc.
+ if |count_rel_tracks| > 1, the skipped distance is
+ space(cur_type, next_type) + (|count_rel_tracks| - 1) * space(next_type, next_type)
+
+ Returns
+ -------
+ track_id : TrackID
+ the TrackID object of the next track id
+ """
+
+ layer_id = warr_tid_obj.layer_id
+ if isinstance(warr_tid_obj, TrackID):
+ cur_idx = warr_tid_obj.base_index
+ else:
+ cur_idx = warr_tid_obj.track_id.base_index
+
+ sep0 = self.get_sep(layer_id, (cur_type, next_type), **kwargs)
+ sep1 = self.get_sep(layer_id, (next_type, next_type), **kwargs)
+ cur_idx = HalfInt.convert(cur_idx)
+
+ sign = count_rel_tracks > 0
+ delta = sep0 + (abs(count_rel_tracks) - 1) * sep1
+ next_tidx = cur_idx + (2 * sign - 1) * delta
+
+ return TrackID(layer_id, next_tidx, width=self.get_width(layer_id, next_type))
+
+ def get_shield_tracks(self, layer_id: int, tidx_lo: HalfInt, tidx_hi: HalfInt,
+ wtype_lo: Union[str, int], wtype_hi: Union[str, int]) -> List[TrackID]:
+ """Fill the given space with shielding tracks
+
+ Try to fill with the widest metal allowed in the PDK
+ Respect DRC spacing rules relative to lower and higher wires
+ Currently this method just returns a bunch of width 1 wires.
+
+ Parameters
+ ----------
+ layer_id : int
+ the track layer ID.
+ tidx_lo : HalfInt
+ lower bound track index
+ tidx_hi : HalfInt
+ upper bound track index
+ wtype_lo: Union[str, int]
+ type of lower bound wire
+ wtype_hi: Union[str, int]
+ type of upper bound wire
+
+ Returns
+ -------
+ idx_list : List[TrackID]
+ list of TrackIDs
+ """
+ tr_width = 1
+
+ sh_tr_lower = self.get_next_track(layer_id, tidx_lo, wtype_lo, tr_width, up=True)
+ sh_tr_upper = self.get_next_track(layer_id, tidx_hi, wtype_hi, tr_width, up=False)
+ num_tracks = (sh_tr_upper - sh_tr_lower + 1) // 1
+ tr_locs = [sh_tr_lower + i for i in range(num_tracks)]
+ return [TrackID(layer_id, tr_idx, width=tr_width) for tr_idx in tr_locs]
+
+ def spread_wires(self, layer_id: int, type_list: Sequence[Union[str, int]],
+ lower: HalfInt, upper: HalfInt, sp_type: Tuple[str, str],
+ alignment: int = 0, max_iter: int = 1000) -> List[HalfInt]:
+ """Spread out the given wires in the given space.
+
+ This method tries to spread out wires by increasing the space around the given
+ wire/combination of wires.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer of the tracks.
+ type_list : Sequence[Union[str, int]]
+ list of wire types.
+ lower : HalfInt
+ the lower bound track index, inclusive.
+ upper : HalfInt
+ the upper bound track index, inclusive.
+ sp_type : Tuple[str, str]
+ The space to increase.
+ alignment : int
+ If alignment == -1, will "left adjust" the wires (left is the lower index direction).
+ If alignment == 0, will center the wires in the middle.
+ If alignment == 1, will "right adjust" the wires.
+ max_iter : int
+ maximum number of iterations.
+
+ Returns
+ -------
+ locations : List[HalfInt]
+ the center track index of each wire.
+ """
+ test_dict = self._tr_spaces.get(sp_type, None)
+ if test_dict is not None:
+ cur_sp = test_dict.get(layer_id, 0)
+ else:
+ cur_sp = 0
+ cur_sp = HalfInt.convert(cur_sp)
+
+ sp_override = self._tr_spaces.to_dict()
+ sp_override[sp_type] = cur_dict = {layer_id: cur_sp}
+
+ if alignment < 0:
+ align_track = lower
+ align_idx = 0
+ center_coord = None
+ elif alignment > 0:
+ align_track = upper
+ align_idx = len(type_list) - 1
+ center_coord = None
+ else:
+ grid = self.grid
+ align_track = None
+ align_idx = 0
+ c0 = grid.track_to_coord(layer_id, lower)
+ c1 = grid.track_to_coord(layer_id, upper)
+ center_coord = (c0 + c1) // 2
+
+ bin_iter = BinaryIterator(cur_sp.dbl_value, None)
+ for cnt in range(max_iter):
+ if not bin_iter.has_next():
+ break
+ new_sp_dbl = bin_iter.get_next()
+ cur_dict[layer_id] = HalfInt(new_sp_dbl)
+ result = self.place_wires(layer_id, type_list, align_track=align_track,
+ align_idx=align_idx, center_coord=center_coord,
+ sp_override=sp_override)[1]
+ if result[0] < lower or result[-1] > upper:
+ bin_iter.down()
+ else:
+ bin_iter.save_info(result)
+ bin_iter.up()
+
+ if bin_iter.get_last_save_info() is None:
+ raise ValueError(f'Unable to place specified wires in range [{lower}, {upper}].')
+
+ return bin_iter.get_last_save_info()
diff --git a/src/bag/layout/routing/fill.py b/src/bag/layout/routing/fill.py
new file mode 100644
index 0000000..c4d3841
--- /dev/null
+++ b/src/bag/layout/routing/fill.py
@@ -0,0 +1,905 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines classes that provides automatic fill utility on a grid.
+"""
+
+from typing import Optional, List, Tuple
+
+import dataclasses
+
+from bag.util.search import BinaryIterator, minimize_cost_golden
+
+
+@dataclasses.dataclass(eq=True)
+class FillInfo:
+ tot_area: int
+ sp_nominal: int
+ sp_edge: int
+ sp_mid: int
+ blk0: int
+ blk1: int
+ blkm: int
+ num_half: int
+ num_diff_sp: int
+ num_blk1_half: int
+ inc_sp: bool
+ fill_on_edge: bool
+ cyclic: bool
+ invert: bool
+
+ @property
+ def num_fill(self) -> int:
+ if self.invert:
+ return 2 * (self.num_half - self.fill_on_edge) + (self.blkm >= 0) + 1
+ else:
+ return 2 * self.num_half + (self.blkm >= 0)
+
+ @property
+ def sp_max(self) -> int:
+ if self.invert:
+ return max(self.blk0, self.blk1)
+ else:
+ return self.sp_nominal + (self.num_diff_sp > 0 and self.inc_sp)
+
+ @property
+ def blk_min(self) -> int:
+ if self.invert:
+ return self.sp_nominal - (self.num_diff_sp > 0 and not self.inc_sp)
+ else:
+ return min(self.blk0, self.blk1)
+
+ @property
+ def blk_max(self) -> int:
+ if self.invert:
+ return self.sp_nominal + (self.num_diff_sp > 0 and self.inc_sp)
+ else:
+ return max(self.blk0, self.blk1)
+
+ def get_fill_area(self, scale: int, extend: int) -> int:
+ k = self.num_blk1_half
+ m = self.num_half
+ ans = 2 * (k * self.blk1 + (m - k) * self.blk0) + (self.blkm >= 0) * self.blkm
+ # subtract double counted edge block
+ ans -= (self.cyclic and self.fill_on_edge) * self.blk1
+ ans = self.invert * self.tot_area + (1 - 2 * self.invert) * ans
+ return scale * ans + extend * self.num_fill
+
+ def meet_area_specs(self, area_specs: List[Tuple[int, int, int]]) -> bool:
+ for target, scale, extend in area_specs:
+ if self.get_fill_area(scale, extend) < target:
+ return False
+ return True
+
+ def get_area_fom(self, area_specs: List[Tuple[int, int, int]]) -> int:
+ fom = 0
+ for target, scale, extend in area_specs:
+ cur_area = self.get_fill_area(scale, extend)
+ fom += min(0, cur_area - target)
+ return fom
+
+
+def fill_symmetric_max_density(area: int, n_min: int, n_max: int, sp_min: int,
+ area_specs: List[Tuple[int, int, int]],
+ sp_max: Optional[int] = None, fill_on_edge: bool = True,
+ cyclic: bool = False) -> List[Tuple[int, int]]:
+ """Fill the given 1-D area with density constraints, using largest blocks possible.
+
+ Compute fill location such that the given area is filled with the following properties:
+
+ 1. the area is as uniform as possible.
+ 2. the area is symmetric with respect to the center
+ 3. all fill blocks have lengths between n_min and n_max.
+ 4. all fill blocks are at least sp_min apart.
+
+ Parameters
+ ----------
+ area : int
+ total number of space we need to fill.
+ n_min : int
+ minimum length of the fill block. Must be less than or equal to n_max.
+ n_max : int
+ maximum length of the fill block.
+ sp_min : int
+ minimum space between each fill block.
+ area_specs : List[Tuple[int, int, int]]
+ list of area specifications, in (target, scale, extension) format.
+ sp_max : Optional[int]
+ if given, make sure space between blocks does not exceed this value.
+ Must be greater than sp_min
+ fill_on_edge : bool
+ If True, we put fill blocks on area boundary. Otherwise, we put space block on
+ area boundary.
+ cyclic : bool
+ If True, we assume we're filling in a cyclic area (it wraps around).
+
+ Returns
+ -------
+ fill_interval : List[Tuple[int, int]]
+ a list of [start, stop) intervals that needs to be filled.
+ """
+ max_result = fill_symmetric_max_density_info(area, n_min, n_max, sp_min, area_specs,
+ sp_max=sp_max, fill_on_edge=fill_on_edge,
+ cyclic=cyclic)
+ return fill_symmetric_interval(max_result)
+
+
+def fill_symmetric_min_density(area: int, n_min: int, n_max: int, sp_min: int,
+ area_specs: List[Tuple[int, int, int]],
+ sp_max: Optional[int] = None, fill_on_edge: bool = True,
+ cyclic: bool = False) -> List[Tuple[int, int]]:
+ info = fill_symmetric_min_density_info(area, n_min, n_max, sp_min, area_specs,
+ sp_max=sp_max, fill_on_edge=fill_on_edge, cyclic=cyclic)
+ return fill_symmetric_interval(info)
+
+
+def fill_symmetric_min_density_info(area: int, n_min: int, n_max: int, sp_min: int,
+ area_specs: List[Tuple[int, int, int]],
+ sp_max: Optional[int] = None, fill_on_edge: bool = True,
+ cyclic: bool = False) -> FillInfo:
+ """Fill the given 1-D area to satisfy minimum density constraint
+
+ Compute fill location such that the given area is filled with the following properties:
+
+ 1. the area is as uniform as possible.
+ 2. the area is symmetric with respect to the center
+ 3. all fill blocks have lengths between n_min and n_max.
+ 4. all fill blocks are at least sp_min apart.
+
+ Parameters
+ ----------
+ area : int
+ total number of space we need to fill.
+ n_min : int
+ minimum length of the fill block. Must be less than or equal to n_max.
+ n_max : int
+ maximum length of the fill block.
+ sp_min : int
+ minimum space between each fill block.
+ area_specs : List[Tuple[int, int, int]]
+ list of area specifications, in (target, scale, extension) format.
+ sp_max : Optional[int]
+ if given, make sure space between blocks does not exceed this value.
+ Must be greater than sp_min
+ fill_on_edge : bool
+ If True, we put fill blocks on area boundary. Otherwise, we put space block on
+ area boundary.
+ cyclic : bool
+ If True, we assume we're filling in a cyclic area (it wraps around).
+
+ Returns
+ -------
+ info : FillInfo
+ the fill information object.
+ """
+ # first, fill as much as possible using scale/extension of the first area spec.
+ max_result = fill_symmetric_max_density_info(area, n_min, n_max, sp_min, area_specs,
+ sp_max=sp_max, fill_on_edge=fill_on_edge,
+ cyclic=cyclic)
+
+ if not max_result.meet_area_specs(area_specs):
+ # we cannot meet area spec; return max result
+ return max_result
+
+ # now, reduce fill by doing binary search on n_max
+ nfill_opt = max_result.num_fill
+ n_max_iter = BinaryIterator(n_min, n_max)
+ while n_max_iter.has_next():
+ n_max_cur = n_max_iter.get_next()
+ try:
+ info = fill_symmetric_max_num_info(area, nfill_opt, n_min, n_max_cur, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ if info.meet_area_specs(area_specs) and (sp_max is None or info.sp_max <= sp_max):
+ # both specs passed
+ n_max_iter.save_info(info)
+ n_max_iter.down()
+ else:
+ # reduce n_max too much
+ n_max_iter.up()
+
+ except ValueError:
+ # get here if n_min == n_max and there's no solution.
+ n_max_iter.up()
+
+ last_save = n_max_iter.get_last_save_info()
+ if last_save is None:
+ # no solution, return max result
+ return max_result
+ else:
+ max_result = last_save
+
+ # see if we can further reduce fill by doing binary search on nfill_opt
+ nfill_iter = BinaryIterator(1, nfill_opt)
+ n_max = n_max_iter.get_last_save()
+ while nfill_iter.has_next():
+ nfill_cur = nfill_iter.get_next()
+ try:
+ info = fill_symmetric_max_num_info(area, nfill_cur, n_min, n_max, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ if info.meet_area_specs(area_specs) and (sp_max is None or info.sp_max <= sp_max):
+ # both specs passed
+ nfill_iter.save_info(info)
+ nfill_iter.down()
+ else:
+ # reduce nfill too much
+ nfill_iter.up()
+
+ except ValueError:
+ nfill_iter.up()
+
+ last_save = nfill_iter.get_last_save_info()
+ if last_save is None:
+ return max_result
+ # return new minimum solution
+ return last_save
+
+
+def fill_symmetric_max_density_info(area: int, n_min: int, n_max: int, sp_min: int,
+ area_specs: List[Tuple[int, int, int]],
+ sp_max: Optional[int] = None, fill_on_edge: bool = True,
+ cyclic: bool = False) -> FillInfo:
+ """Fill the given 1-D area with density constraints, using largest blocks possible.
+
+ Compute fill location such that the given area is filled with the following properties:
+
+ 1. the area is as uniform as possible.
+ 2. the area is symmetric with respect to the center
+ 3. all fill blocks have lengths between n_min and n_max.
+ 4. all fill blocks are at least sp_min apart.
+ 5. we do the best to meet area specs by using the largest blocks possible.
+
+ Parameters
+ ----------
+ area : int
+ total number of space we need to fill.
+ n_min : int
+ minimum length of the fill block. Must be less than or equal to n_max.
+ n_max : int
+ maximum length of the fill block.
+ sp_min : int
+ minimum space between each fill block.
+ area_specs : List[Tuple[int, int, int]]
+ list of area specifications, in (target, scale, extension) format.
+ sp_max : Optional[int]
+ if given, make sure space between blocks does not exceed this value.
+ Must be greater than sp_min
+ fill_on_edge : bool
+ If True, we put fill blocks on area boundary. Otherwise, we put space block on
+ area boundary.
+ cyclic : bool
+ If True, we assume we're filling in a cyclic area (it wraps around).
+
+ Returns
+ -------
+ info : FillInfo
+ the fill information object.
+ """
+
+ # min area test
+ nfill_min = 1
+ try:
+ try:
+ fill_symmetric_max_num_info(area, nfill_min, n_min, n_max, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ except (NoFillAbutEdgeError, NoFillChoiceError):
+ # we need at least 2 fiils
+ nfill_min = 2
+ fill_symmetric_max_num_info(area, nfill_min, n_min, n_max, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ except InsufficientAreaError:
+ # cannot fill at all
+ return _fill_symmetric_info(area, 0, area, inc_sp=False,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+
+ if sp_max is not None:
+ # find minimum nfill that meets sp_max spec
+ if sp_max <= sp_min:
+ raise ValueError(f'Cannot have sp_max = {sp_max} <= {sp_min} = sp_min')
+
+ def sp_max_fun(nfill):
+ try:
+ info2 = fill_symmetric_max_num_info(area, nfill, n_min, n_max, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ return -info2.sp_max
+ except ValueError:
+ return -sp_max - 1
+
+ min_result = minimize_cost_golden(sp_max_fun, -sp_max, offset=nfill_min, maxiter=None)
+ if min_result.x is None:
+ # try even steps
+ min_result = minimize_cost_golden(sp_max_fun, -sp_max, offset=nfill_min,
+ step=2, maxiter=None)
+ nfill_min = min_result.x
+ if nfill_min is None:
+ raise MaxSpaceTooStrictError(f'No solution for sp_max = {sp_max}')
+ else:
+ nfill_min = min_result.x
+
+ # fill area first monotonically increases with number of fill blocks, then monotonically
+ # decreases (as we start adding more space than fill). Therefore, a golden section search
+ # can be done on the number of fill blocks to determine the optimum.
+ worst_fom = -sum((spec[0] for spec in area_specs))
+
+ def area_fun(nfill):
+ try:
+ info2 = fill_symmetric_max_num_info(area, nfill, n_min, n_max, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ return info2.get_area_fom(area_specs)
+ except ValueError:
+ return worst_fom
+
+ min_result = minimize_cost_golden(area_fun, area, offset=nfill_min, maxiter=None)
+ nfill_opt = min_result.x
+ if nfill_opt is None:
+ nfill_opt = min_result.xmax
+ info = fill_symmetric_max_num_info(area, nfill_opt, n_min, n_max, sp_min,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ return info
+
+
+class MaxSpaceTooStrictError(ValueError):
+ pass
+
+
+class InsufficientAreaError(ValueError):
+ pass
+
+
+class FillTooSmallError(ValueError):
+ pass
+
+
+class NoFillAbutEdgeError(ValueError):
+ pass
+
+
+class NoFillChoiceError(ValueError):
+ pass
+
+
+class EmptyRegionError(ValueError):
+ pass
+
+
+def fill_symmetric_max_num_info(tot_area: int, nfill: int, n_min: int, n_max: int, sp_min: int,
+ fill_on_edge: bool = True, cyclic: bool = False) -> FillInfo:
+ """Fill the given 1-D area as much as possible with given number of fill blocks.
+
+ Compute fill location such that the given area is filled with the following properties:
+
+ 1. the area is as uniform as possible.
+ 2. the area is symmetric with respect to the center
+ 3. the area is filled as much as possible with exactly nfill blocks,
+ with lengths between n_min and n_max.
+ 4. all fill blocks are at least sp_min apart.
+
+ Parameters
+ ----------
+ tot_area : int
+ total number of space we need to fill.
+ nfill : int
+ number of fill blocks to draw.
+ n_min : int
+ minimum length of the fill block. Must be less than or equal to n_max.
+ n_max : int
+ maximum length of the fill block.
+ sp_min : int
+ minimum space between each fill block.
+ fill_on_edge : bool
+ If True, we put fill blocks on area boundary. Otherwise, we put space block on
+ area boundary.
+ cyclic : bool
+ If True, we assume we're filling in a cyclic area (it wraps around).
+
+ Returns
+ -------
+ info : FillInfo
+ the fill information object.
+ """
+ # error checking
+ if nfill < 0:
+ raise ValueError(f'nfill = {nfill} < 0')
+ if n_min > n_max:
+ raise ValueError(f'n_min = {n_min} > {n_max} = n_max')
+ if n_min <= 0:
+ raise ValueError(f'n_min = {n_min} <= 0')
+
+ if nfill == 0:
+ # no fill at all
+ return _fill_symmetric_info(tot_area, 0, tot_area, inc_sp=False,
+ fill_on_edge=False, cyclic=False)
+
+ # check no solution
+ sp_delta = 0 if cyclic else (-1 if fill_on_edge else 1)
+ nsp = nfill + sp_delta
+ if n_min * nfill + nsp * sp_min > tot_area:
+ raise InsufficientAreaError(f'Cannot draw {nfill} fill blocks with n_min = {n_min}')
+
+ # first, try drawing nfill blocks without block length constraint.
+ # may throw exception if no solution
+ info = _fill_symmetric_info(tot_area, nfill, sp_min, inc_sp=True,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ if info.blk_min < n_min:
+ # could get here if cyclic = True, fill_on_edge = True, n_min is odd
+ # in this case actually no solution
+ raise FillTooSmallError(f'Cannot draw {nfill} fill blocks with n_min = {n_min}')
+ if info.blk_max <= n_max:
+ # we satisfy block length constraint, just return
+ return info
+
+ # we broke maximum block length constraint, so we flip
+ # space and fill to have better control on fill length
+ if nsp == 0 and n_max != tot_area and n_max - 1 != tot_area:
+ # we get here only if nfill = 1 and fill_on_edge is True.
+ # In this case there's no way to draw only one fill and abut both edges
+ raise NoFillAbutEdgeError('Cannot draw only one fill abutting both edges.')
+ info = _fill_symmetric_info(tot_area, nsp, n_max, inc_sp=False,
+ fill_on_edge=not fill_on_edge, cyclic=cyclic)
+ if info.num_diff_sp > 0 and n_min == n_max:
+ # no solution with same fill length, but we must have same fill length everywhere.
+ raise NoFillChoiceError(f'Cannot draw {nfill} fill blocks with n_min = n_max = {n_min}')
+ info.invert = True
+ return info
+
+
+def fill_symmetric_const_space(area: int, sp_max: int, n_min: int, n_max: int
+ ) -> List[Tuple[int, int]]:
+ """Fill the given 1-D area given maximum space spec alone.
+
+ The method draws the minimum number of fill blocks needed to satisfy maximum spacing spec.
+ The given area is filled with the following properties:
+
+ 1. all spaces are as close to the given space as possible (differ by at most 1),
+ without exceeding it.
+ 2. the filled area is as uniform as possible.
+ 3. the filled area is symmetric about the center.
+ 4. fill is drawn as much as possible given the above constraints.
+
+ fill is drawn such that space blocks abuts both area boundaries.
+
+ Parameters
+ ----------
+ area : int
+ the 1-D area to fill.
+ sp_max : int
+ the maximum space.
+ n_min : int
+ minimum fill length.
+ n_max : int
+ maximum fill length
+
+ Returns
+ -------
+ fill_intv : List[Tuple[int, int]]
+ list of fill intervals.
+ """
+ if n_min > n_max:
+ raise ValueError('min fill length = %d > %d = max fill length' % (n_min, n_max))
+
+ # suppose we draw N fill blocks, then the filled area is A - (N + 1) * sp.
+ # therefore, to maximize fill, with A and sp given, we need to minimize N.
+ # since N = (A - sp) / (f + sp), where f is length of the fill, this tells
+ # us we want to try filling with max block.
+ # so we calculate the maximum number of fill blocks we'll use if we use
+ # largest fill block.
+ num_fill = -(-(area - sp_max) // (n_max + sp_max))
+ if num_fill == 0:
+ # we don't need fill; total area is less than sp_max.
+ return []
+
+ # at this point, using (num_fill - 1) max blocks is not enough, but num_fill
+ # max blocks either fits perfectly or exceeds area.
+
+ # calculate the fill block length if we use num_fill fill blocks, and sp_max
+ # between blocks.
+ blk_len = (area - (num_fill + 1) * sp_max) // num_fill
+ if blk_len >= n_min:
+ # we can draw fill using num_fill fill blocks.
+ return fill_symmetric_helper(area, num_fill, sp_max, inc_sp=False,
+ invert=False, fill_on_edge=False, cyclic=False)
+
+ # trying to draw num_fill fill blocks with sp_max between them results in fill blocks
+ # that are too small. This means we need to reduce the space between fill blocks.
+ sp_max, remainder = divmod(area - num_fill * n_min, num_fill + 1)
+ # we can achieve the new sp_max using fill with length n_min or n_min + 1.
+ if n_max > n_min or remainder == 0:
+ # if everything divides evenly or we can use two different fill lengths,
+ # then we're done.
+ return fill_symmetric_helper(area, num_fill, sp_max, inc_sp=False,
+ invert=False, fill_on_edge=False, cyclic=False)
+ # If we're here, then we must use only one fill length
+ # fill by inverting fill/space to try to get only one fill length
+ sol, num_diff_sp = fill_symmetric_helper(area, num_fill + 1, n_max, inc_sp=False,
+ invert=True, fill_on_edge=True, cyclic=False)
+ if num_diff_sp == 0:
+ # we manage to fill using only one fill length
+ return sol
+
+ # If we're here, that means num_fill + 1 is even. So using num_fill + 2 will
+ # guarantee solution.
+ return fill_symmetric_helper(area, num_fill + 2, n_max, inc_sp=False,
+ invert=True, fill_on_edge=True, cyclic=False)
+
+
+def fill_symmetric_helper(tot_area: int, num_blk_tot: int, sp: int,
+ inc_sp: bool = True, invert: bool = False,
+ fill_on_edge: bool = True, cyclic: bool = False) -> List[Tuple[int, int]]:
+ """Helper method for all fill symmetric methods.
+
+ This method fills an area with given number of fill blocks such that the space between
+ blocks is equal to the given space. Other fill_symmetric methods basically transpose
+ the constraints into this problem, with the proper options.
+
+ The solution has the following properties:
+
+ 1. it is symmetric about the center.
+ 2. it is as uniform as possible.
+ 3. it uses at most 3 consecutive values of fill lengths.
+ 4. it uses at most 2 consecutive values of space lengths. If inc_sp is True,
+ we use sp and sp + 1. If inc_sp is False, we use sp - 1 and sp. In addition,
+ at most two space blocks have length different than sp.
+
+ Here are all the scenarios that affect the number of different fill/space lengths:
+
+ 1. All spaces will be equal to sp under the following condition:
+ i. cyclic is False, and num_blk_tot is odd.
+ ii. cyclic is True, fill_on_edge is True, and num_blk_tot is even.
+ iii. cyclic is True, fill_on_edge is False, sp is even, and num_blk_tot is odd.
+
+ In particular, this means if you must have the same space between fill blocks, you
+ can change num_blk_tot by 1.
+ 2. The only case where at most 2 space blocks have length different than sp is
+ when cyclic is True, fill_on_edge is False, sp is odd, and num_blk_tot is even.
+ 3. In all other cases, at most 1 space block have legnth different than sp.
+ 4, The only case where at most 3 fill lengths are used is when cyclic is True,
+ fill_on_edge is True, and num_blk_tot is even,
+
+ Parameters
+ ----------
+ tot_area : int
+ the fill area length.
+ num_blk_tot : int
+ total number of fill blocks to use.
+ sp : int
+ space between blocks. We will try our best to keep this spacing constant.
+ inc_sp : bool
+ If True, then we use sp + 1 if necessary. Otherwise, we use sp - 1
+ if necessary.
+ invert : bool
+ If True, we return space intervals instead of fill intervals.
+ fill_on_edge : bool
+ If True, we put fill blocks on area boundary. Otherwise, we put space block on
+ area boundary.
+ cyclic : bool
+ If True, we assume we're filling in a cyclic area (it wraps around).
+
+ Returns
+ -------
+ ans : List[(int, int)]
+ list of fill or space intervals.
+ """
+ fill_info = _fill_symmetric_info(tot_area, num_blk_tot, sp, inc_sp=inc_sp,
+ fill_on_edge=fill_on_edge, cyclic=cyclic)
+ fill_info.invert = invert
+ return fill_symmetric_interval(fill_info)
+
+
+def _fill_symmetric_info(tot_area: int, num_blk_tot: int, sp: int, inc_sp: bool = True,
+ fill_on_edge: bool = True, cyclic: bool = False) -> FillInfo:
+ """Calculate symmetric fill information.
+
+ This method computes fill information without generating fill interval list. This makes
+ it fast to explore various fill settings. See fill_symmetric_helper() to see a description
+ of the fill algorithm.
+
+ Parameters
+ ----------
+ tot_area : int
+ the fill area length.
+ num_blk_tot : int
+ total number of fill blocks to use.
+ sp : int
+ space between blocks. We will try our best to keep this spacing constant.
+ inc_sp : bool
+ If True, then we use sp + 1 if necessary. Otherwise, we use sp - 1
+ if necessary.
+ fill_on_edge : bool
+ If True, we put fill blocks on area boundary. Otherwise, we put space block on
+ area boundary.
+ cyclic : bool
+ If True, we assume we're filling in a cyclic area (it wraps around).
+
+ Returns
+ -------
+ info : FillInfo
+ the fill information object.
+ """
+ # error checking
+ if num_blk_tot < 0:
+ raise ValueError(f'num_blk_tot = {num_blk_tot} < 0')
+
+ adj_sp_sgn = 1 if inc_sp else -1
+ if num_blk_tot == 0:
+ # special case, no fill at all
+ if sp == tot_area:
+ return FillInfo(tot_area, tot_area, tot_area, tot_area, 0, 0, -1, 0, 0, 0,
+ inc_sp, False, cyclic, False)
+ elif sp == tot_area - adj_sp_sgn:
+ return FillInfo(tot_area, tot_area, tot_area, tot_area, 0, 0, -1, 0, 1, 0,
+ inc_sp, False, cyclic, False)
+ else:
+ raise EmptyRegionError(f'Cannot have empty region = {tot_area} with sp = {sp}')
+
+ # determine the number of space blocks
+ if cyclic:
+ num_sp_tot = num_blk_tot
+ else:
+ if fill_on_edge:
+ num_sp_tot = num_blk_tot - 1
+ else:
+ num_sp_tot = num_blk_tot + 1
+
+ # compute total fill area
+ fill_area = tot_area - num_sp_tot * sp
+
+ # find minimum fill length
+ blk_len, num_blk1 = divmod(fill_area, num_blk_tot)
+ # find number of fill intervals
+ if cyclic and fill_on_edge:
+ # if cyclic and fill on edge, number of intervals = number of blocks + 1,
+ # because the interval on the edge double counts.
+ num_blk_interval = num_blk_tot + 1
+ else:
+ num_blk_interval = num_blk_tot
+
+ # find space length on edge, if applicable
+ num_diff_sp = 0
+ sp_edge = sp
+ if cyclic and not fill_on_edge and sp_edge % 2 == 1:
+ # edge space must be even. To fix, we convert space to fill
+ num_diff_sp += 1
+ sp_edge += adj_sp_sgn
+ num_blk1 += -adj_sp_sgn
+ fill_area += -adj_sp_sgn
+ if num_blk1 == num_blk_tot:
+ blk_len += 1
+ num_blk1 = 0
+ elif num_blk1 < 0:
+ blk_len -= 1
+ num_blk1 += num_blk_tot
+
+ blk_m = sp_mid = -1
+ # now we have num_blk_tot blocks with length blk0. We have num_blk1 fill units
+ # remaining that we need to distribute to the fill blocks
+ if num_blk_interval % 2 == 0:
+ # we have even number of fill intervals, so we have a space block in the middle
+ sp_mid = sp
+ # test condition for cyclic and fill_on_edge is different than other cases
+ test_val = num_blk1 + blk_len if cyclic and fill_on_edge else num_blk1
+ if test_val % 2 == 1:
+ # we cannot distribute remaining fill units evenly, have to convert to space
+ num_diff_sp += 1
+ sp_mid += adj_sp_sgn
+ num_blk1 += -adj_sp_sgn
+ fill_area += -adj_sp_sgn
+ if num_blk1 == num_blk_tot:
+ blk_len += 1
+ num_blk1 = 0
+ elif num_blk1 < 0:
+ blk_len -= 1
+ num_blk1 += num_blk_tot
+ if num_blk1 % 2 == 1:
+ # the only way we get here is if cyclic and fill_on_edge is True.
+ # in this case, we need to add one to fill unit to account
+ # for edge fill double counting.
+ num_blk1 += 1
+
+ # get number of half fill intervals
+ num_half = num_blk_interval // 2
+ else:
+ # we have odd number of fill intervals, so we have a fill block in the middle
+ blk_m = blk_len
+ if cyclic and fill_on_edge:
+ # special handling for this case, because edge fill block must be even
+ if blk_len % 2 == 0 and num_blk1 % 2 == 1:
+ # assign one fill unit to middle block
+ blk_m += 1
+ num_blk1 -= 1
+ elif blk_len % 2 == 1:
+ # edge fill block is odd; we need odd number of fill units so we can
+ # correct this.
+ if num_blk1 % 2 == 0:
+ # we increment middle fill block to get odd number of fill units
+ blk_m += 1
+ num_blk1 -= 1
+ if num_blk1 < 0:
+ # we get here only if num_blk1 == 0. This means middle blk
+ # borrow one unit from edge block. So we set num_blk1 to
+ # num_blk_tot - 2 to make sure rest of the blocks are one
+ # larger than edge block.
+ blk_len -= 1
+ num_blk1 = num_blk_tot - 2
+ else:
+ # Add one to account for edge fill double counting.
+ num_blk1 += 1
+ else:
+ # Add one to account for edge fill double counting.
+ num_blk1 += 1
+ elif num_blk1 % 2 == 1:
+ # assign one fill unit to middle block
+ blk_m += 1
+ num_blk1 -= 1
+
+ num_half = (num_blk_interval - 1) // 2
+
+ if blk_len <= 0:
+ raise InsufficientAreaError('Insufficient area; cannot draw fill with length <= 0.')
+
+ # now we need to distribute the fill units evenly. We do so using cumulative modding
+ num_large = num_blk1 // 2
+ num_small = num_half - num_large
+ if cyclic and fill_on_edge:
+ # if cyclic and fill is on the edge, we need to make sure left-most block is even length
+ if blk_len % 2 == 0:
+ blk1, blk0 = blk_len, blk_len + 1
+ num_blk1_half = num_small
+ else:
+ blk0, blk1 = blk_len, blk_len + 1
+ num_blk1_half = num_large
+ else:
+ # make left-most fill interval be the most frequent fill length
+ if num_large >= num_small:
+ blk0, blk1 = blk_len, blk_len + 1
+ num_blk1_half = num_large
+ else:
+ blk1, blk0 = blk_len, blk_len + 1
+ num_blk1_half = num_small
+
+ return FillInfo(tot_area, sp, sp_edge, sp_mid, blk0, blk1, blk_m, num_half, num_diff_sp,
+ num_blk1_half, inc_sp, fill_on_edge, cyclic, False)
+
+
+def fill_symmetric_interval(info: FillInfo, d0: int = 0, d1: int = 0, scale: int = 1
+ ) -> List[Tuple[int, int]]:
+ """Construct interval list from FillInfo object.
+
+ Parameters
+ ----------
+ info : FillInfo
+ the Fillinfo object.
+ d0 : int
+ offset of the starting coordinate.
+ d1 : int
+ offset of the stopping coordinate.
+ scale : int
+ the scale factor.
+ """
+ tot_area = info.tot_area
+ sp_nominal = info.sp_nominal
+ sp_edge = info.sp_edge
+ sp_mid = info.sp_mid
+ blk0 = info.blk0
+ blk1 = info.blk1
+ blkm = info.blkm
+ num_half = info.num_half
+ num_blk1_half = info.num_blk1_half
+ fill_on_edge = info.fill_on_edge
+ cyclic = info.cyclic
+ invert = info.invert
+
+ ans: List[Tuple[int, int]] = []
+ if cyclic:
+ if fill_on_edge:
+ marker = -(blk1 // 2)
+ else:
+ marker = -(sp_edge // 2)
+ else:
+ marker = 0
+ cur_sum = 0
+ prev_sum = 1
+ for fill_idx in range(num_half):
+ # determine current fill length from cumulative modding result
+ if cur_sum <= prev_sum:
+ cur_len = blk1
+ else:
+ cur_len = blk0
+
+ cur_sp = sp_edge if fill_idx == 0 else sp_nominal
+ # record fill/space interval
+ if invert:
+ if fill_on_edge:
+ ans.append((scale * (marker + cur_len) + d0,
+ scale * (marker + cur_sp + cur_len) + d1))
+ else:
+ ans.append((scale * marker + d0,
+ scale * (marker + cur_sp) + d1))
+ else:
+ if fill_on_edge:
+ ans.append((scale * marker + d0,
+ scale * (marker + cur_len) + d1))
+ else:
+ ans.append((scale * (marker + cur_sp) + d0,
+ scale * (marker + cur_sp + cur_len) + d1))
+
+ marker += cur_len + cur_sp
+ prev_sum = cur_sum
+ cur_sum = (cur_sum + num_blk1_half) % num_half
+
+ # add middle fill or space
+ if blkm >= 0:
+ # fill in middle
+ if invert:
+ if not fill_on_edge:
+ # we have one more space block before reaching middle block
+ cur_sp = sp_edge if num_half == 0 else sp_nominal
+ ans.append((scale * marker + d0, scale * (marker + cur_sp) + d1))
+ half_len = len(ans)
+ else:
+ # we don't want to replicate middle fill, so get half length now
+ half_len = len(ans)
+ if fill_on_edge:
+ ans.append((scale * marker + d0, scale * (marker + blkm) + d1))
+ else:
+ cur_sp = sp_edge if num_half == 0 else sp_nominal
+ ans.append((scale * (marker + cur_sp) + d0, scale * (marker + cur_sp + blkm) + d1))
+ else:
+ # space in middle
+ if invert:
+ if fill_on_edge:
+ # the last space we added is wrong, we need to remove
+ del ans[-1]
+ marker -= sp_nominal
+ # we don't want to replicate middle space, so get half length now
+ half_len = len(ans)
+ ans.append((scale * marker + d0, scale * (marker + sp_mid) + d1))
+ else:
+ # don't need to do anything if we're recording blocks
+ half_len = len(ans)
+
+ # now add the second half of the list
+ shift = scale * tot_area + d0 + d1
+ for idx in range(half_len - 1, -1, -1):
+ start, stop = ans[idx]
+ ans.append((shift - stop, shift - start))
+
+ return ans
diff --git a/src/bag/layout/routing/grid.py b/src/bag/layout/routing/grid.py
new file mode 100644
index 0000000..080bf1d
--- /dev/null
+++ b/src/bag/layout/routing/grid.py
@@ -0,0 +1,758 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines the RoutingGrid class.
+"""
+
+from __future__ import annotations
+
+from typing import Tuple, List, Optional, Dict, Any, Union
+
+from warnings import warn
+from dataclasses import dataclass
+
+from pybag.core import PyRoutingGrid, Transform, coord_to_custom_htr
+from pybag.enum import Orient2D, Direction, RoundMode
+
+from bag.util.search import BinaryIterator
+from bag.math import lcm
+from bag.layout.tech import TechInfo
+
+from ...util.math import HalfInt
+from ...typing import TrackType
+
+SizeType = Tuple[int, HalfInt, HalfInt]
+FillConfigType = Dict[int, Tuple[int, int, int, int]]
+OptHalfIntType = Optional[HalfInt]
+
+
+@dataclass(eq=True, frozen=True)
+class TrackSpec:
+ layer: int
+ direction: Orient2D
+ width: int
+ space: int
+ offset: int
+
+
+class RoutingGrid(PyRoutingGrid):
+ """A class that represents the routing grid.
+
+ This class provides various methods to convert between Cartesian coordinates and
+ routing tracks. This class assumes the lower-left coordinate is (0, 0)
+
+ the track numbers are at half-track pitch. That is, even track numbers corresponds
+ to physical tracks, and odd track numbers corresponds to middle between two tracks.
+ This convention is chosen so it is easy to locate a via for 2-track wide wires, for
+ example.
+
+ Assumptions:
+
+ 1. the pitch of all layers evenly divides the largest pitch.
+
+ Parameters
+ ----------
+ tech_info : TechInfo
+ the TechInfo instance used to create metals and vias.
+ config_fname : str
+ the routing grid configuration file.
+ copy : Optional[PyRoutingGrid]
+ copy create a new routing grid that's the same as the given copy
+ """
+
+ def __init__(self, tech_info: TechInfo, config_fname: str,
+ copy: Optional[PyRoutingGrid] = None) -> None:
+ if copy is None:
+ PyRoutingGrid.__init__(self, tech_info, config_fname)
+ else:
+ PyRoutingGrid.__init__(self, copy)
+ self._tech_info = tech_info
+
+ @classmethod
+ def get_middle_track(cls, tr1: TrackType, tr2: TrackType, round_up: bool = False) -> HalfInt:
+ """Get the track between the two given tracks."""
+ tmp = HalfInt.convert(tr1)
+ return (tmp + tr2).div2(round_up=round_up)
+
+ @property
+ def tech_info(self) -> TechInfo:
+ """TechInfo: The TechInfo technology object."""
+ return self._tech_info
+
+ def is_horizontal(self, layer_id: int) -> bool:
+ """Returns true if the given layer is horizontal."""
+ return self.get_direction(layer_id) is Orient2D.x
+
+ def get_num_tracks(self, size: SizeType, layer_id: int) -> HalfInt:
+ """Returns the number of tracks on the given layer for a block with the given size.
+
+ Parameters
+ ----------
+ size : SizeType
+ the block size tuple.
+ layer_id : int
+ the layer ID.
+
+ Returns
+ -------
+ num_tracks : HalfInt
+ number of tracks on that given layer.
+ """
+ blk_dim = self.get_size_dimension(size)[self.get_direction(layer_id).value]
+ tr_half_pitch = self.get_track_pitch(layer_id) // 2
+ return HalfInt(blk_dim // tr_half_pitch)
+
+ def dim_to_num_tracks(self, layer_id: int, dim: int, round_mode: RoundMode = RoundMode.NONE
+ ) -> HalfInt:
+ """Returns how many track pitches are in the given dimension."""
+ tr_pitch2 = self.get_track_pitch(layer_id) // 2
+ q, r = divmod(dim, tr_pitch2)
+ if round_mode is RoundMode.NONE:
+ if r != 0:
+ raise ValueError(f'Dimension {dim} is not divisible by half-pitch {tr_pitch2}')
+ elif round_mode is RoundMode.LESS:
+ q -= (r == 0)
+ elif round_mode is RoundMode.GREATER_EQ:
+ q += (r != 0)
+ elif round_mode is RoundMode.GREATER:
+ q += 1
+
+ return HalfInt(q)
+
+ def get_sep_tracks(self, layer: int, ntr1: int = 1, ntr2: int = 1,
+ same_color: bool = False, half_space: bool = True) -> HalfInt:
+ """Returns the track separations needed between two adjacent wires.
+
+ Parameters
+ ----------
+ layer : int
+ wire layer ID.
+ ntr1 : int
+ width (in number of tracks) of the first wire.
+ ntr2 : int
+ width (in number of tracks) of the second wire.
+ same_color : bool
+ True to assume they have the same color.
+ half_space : bool
+ True to allow half-track spacing.
+
+ Returns
+ -------
+ sep_index : HalfInt
+ minimum track index difference of the adjacent wires
+ """
+ htr = self.get_sep_htr(layer, ntr1, ntr2, same_color)
+ return HalfInt(htr + (htr & (not half_space)))
+
+ def get_line_end_sep_tracks(self, layer_dir: Direction, le_layer: int, le_ntr: int = 1,
+ adj_ntr: int = 1, half_space: bool = True) -> HalfInt:
+ """Returns the track separations needed to satisfy via extension + line-end constraints.
+
+ When you have two separate wires on the same track and need to connect them to adjacent
+ layers, if the adjacent wires are too close, the via extensions could violate
+ line-end spacing constraints. This method computes the minimum track index difference
+ those two wires must have to avoid this error.
+
+ Parameters
+ ----------
+ layer_dir : Direction
+ the direction of the specified layer. LOWER if the layer is the
+ bottom layer, UPPER if the layer is the top layer.
+ le_layer : int
+ line-end wire layer ID.
+ le_ntr : int
+ width (in number of tracks) of the line-end wire.
+ adj_ntr : int
+ width (in number of tracks) of the wire on the adjacent layer.
+ half_space : bool
+ True to allow half-track spacing.
+
+ Returns
+ -------
+ sep_index : HalfInt
+ minimum track index difference of the adjacent wires
+ """
+ htr = self.get_line_end_sep_htr(layer_dir.value, le_layer, le_ntr, adj_ntr)
+ return HalfInt(htr + (htr & (not half_space)))
+
+ def get_max_track_width(self, layer_id: int, num_tracks: int, tot_space: int,
+ half_end_space: bool = False) -> int:
+ """Compute maximum track width and space that satisfies DRC rule.
+
+ Given available number of tracks and numbers of tracks needed, returns
+ the maximum possible track width.
+
+ Parameters
+ ----------
+ layer_id : int
+ the track layer ID.
+ num_tracks : int
+ number of tracks to draw.
+ tot_space : int
+ available number of tracks.
+ half_end_space : bool
+ True if end spaces can be half of minimum spacing. This is true if you're
+ these tracks will be repeated, or there are no adjacent tracks.
+
+ Returns
+ -------
+ tr_w : int
+ track width.
+ """
+ bin_iter = BinaryIterator(1, None)
+ while bin_iter.has_next():
+ tr_w = bin_iter.get_next()
+ tr_sep = self.get_sep_tracks(layer_id, tr_w, tr_w)
+ if half_end_space:
+ used_tracks = tr_sep * num_tracks
+ else:
+ used_tracks = tr_sep * (num_tracks - 1) + 2 * self.get_sep_tracks(layer_id, tr_w, 1)
+ if used_tracks > tot_space:
+ bin_iter.down()
+ else:
+ bin_iter.save()
+ bin_iter.up()
+
+ opt_w = bin_iter.get_last_save()
+ return opt_w
+
+ @staticmethod
+ def get_evenly_spaced_tracks(num_tracks: int, tot_space: int, track_width: int,
+ half_end_space: bool = False) -> List[HalfInt]:
+ """Evenly space given number of tracks in the available space.
+
+ Currently this method may return half-integer tracks.
+
+ Parameters
+ ----------
+ num_tracks : int
+ number of tracks to draw.
+ tot_space : int
+ avilable number of tracks.
+ track_width : int
+ track width in number of tracks.
+ half_end_space : bool
+ True if end spaces can be half of minimum spacing. This is true if you're
+ these tracks will be repeated, or there are no adjacent tracks.
+
+ Returns
+ -------
+ idx_list : List[HalfInt]
+ list of track indices. 0 is the left-most track.
+ """
+ if half_end_space:
+ tot_space_htr = 2 * tot_space
+ scale = 2 * tot_space_htr
+ offset = tot_space_htr + num_tracks
+ den = 2 * num_tracks
+ else:
+ tot_space_htr = 2 * tot_space
+ width_htr = 2 * track_width - 2
+ # magic math. You can work it out
+ scale = 2 * (tot_space_htr + width_htr)
+ offset = 2 * tot_space_htr - width_htr * (num_tracks - 1) + (num_tracks + 1)
+ den = 2 * (num_tracks + 1)
+
+ return [HalfInt((scale * idx + offset) // den - 1) for idx in range(num_tracks)]
+
+ def get_fill_size(self, top_layer: int, fill_config: FillConfigType, *,
+ include_private: bool = False, half_blk_x: bool = True,
+ half_blk_y: bool = True) -> Tuple[int, int]:
+ """Returns unit block size given the top routing layer and power fill configuration.
+
+ Parameters
+ ----------
+ top_layer : int
+ the top layer ID.
+ fill_config : Dict[int, Tuple[int, int, int, int]]
+ the fill configuration dictionary.
+ include_private : bool
+ True to include private layers in block size calculation.
+ half_blk_x : bool
+ True to allow half-block widths.
+ half_blk_y : bool
+ True to allow half-block heights.
+
+ Returns
+ -------
+ block_width : int
+ the block width in resolution units.
+ block_height : int
+ the block height in resolution units.
+ """
+ blk_w, blk_h = self.get_block_size(top_layer, include_private=include_private,
+ half_blk_x=half_blk_x, half_blk_y=half_blk_y)
+
+ dim_list = [[blk_w], [blk_h]]
+ for lay, (tr_w, tr_sp, _, _) in fill_config.items():
+ if lay <= top_layer:
+ cur_pitch = self.get_track_pitch(lay)
+ cur_dim = (tr_w + tr_sp) * cur_pitch * 2
+ dim_list[1 - self.get_direction(lay).value].append(cur_dim)
+
+ blk_w = lcm(dim_list[0])
+ blk_h = lcm(dim_list[1])
+ return blk_w, blk_h
+
+ def get_size_tuple(self, layer_id: int, width: int, height: int, *, round_up: bool = False,
+ half_blk_x: bool = False, half_blk_y: bool = False) -> SizeType:
+ """Compute the size tuple corresponding to the given width and height from block pitch.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ width : int
+ width of the block, in resolution units.
+ height : int
+ height of the block, in resolution units.
+ round_up : bool
+ True to round up instead of raising an error if the given width and height
+ are not on pitch.
+ half_blk_x : bool
+ True to allow half-block widths.
+ half_blk_y : bool
+ True to allow half-block heights.
+
+ Returns
+ -------
+ size : SizeType
+ the size tuple. the first element is the top layer ID, second element is the width in
+ number of vertical tracks, and third element is the height in number of
+ horizontal tracks.
+ """
+ w_pitch, h_pitch = self.get_size_pitch(layer_id)
+
+ wblk, hblk = self.get_block_size(layer_id, half_blk_x=half_blk_x, half_blk_y=half_blk_y)
+ if width % wblk != 0:
+ if round_up:
+ width = -(-width // wblk) * wblk
+ else:
+ raise ValueError('width = %d not on block pitch (%d)' % (width, wblk))
+ if height % hblk != 0:
+ if round_up:
+ height = -(-height // hblk) * hblk
+ else:
+ raise ValueError('height = %d not on block pitch (%d)' % (height, hblk))
+
+ return layer_id, HalfInt(2 * width // w_pitch), HalfInt(2 * height // h_pitch)
+
+ def get_size_dimension(self, size: SizeType) -> Tuple[int, int]:
+ """Compute width and height from given size.
+
+ Parameters
+ ----------
+ size : SizeType
+ size of a block.
+
+ Returns
+ -------
+ width : int
+ the width in resolution units.
+ height : int
+ the height in resolution units.
+ """
+ w_pitch, h_pitch = self.get_size_pitch(size[0])
+ return int(size[1] * w_pitch), int(size[2] * h_pitch)
+
+ def convert_size(self, size: SizeType, new_top_layer: int) -> SizeType:
+ """Convert the given size to a new top layer.
+
+ Parameters
+ ----------
+ size : SizeType
+ size of a block.
+ new_top_layer : int
+ the new top level layer ID.
+
+ Returns
+ -------
+ new_size : SizeType
+ the new size tuple.
+ """
+ wblk, hblk = self.get_size_dimension(size)
+ return self.get_size_tuple(new_top_layer, wblk, hblk)
+
+ def get_wire_bounds(self, layer_id: int, tr_idx: TrackType, width: int = 1) -> Tuple[int, int]:
+ """Calculate the wire bounds coordinate.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ tr_idx : TrackType
+ the center track index.
+ width : int
+ width of wire in number of tracks.
+
+ Returns
+ -------
+ lower : int
+ the lower bound coordinate perpendicular to wire direction.
+ upper : int
+ the upper bound coordinate perpendicular to wire direction.
+ """
+ return self.get_wire_bounds_htr(layer_id, int(round(2 * tr_idx)), width)
+
+ def coord_to_track(self, layer_id: int, coord: int, mode: RoundMode = RoundMode.NONE,
+ even: bool = False) -> HalfInt:
+ """Convert given coordinate to track number.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer number.
+ coord : int
+ the coordinate perpendicular to the track direction.
+ mode : RoundMode
+ the rounding mode.
+
+ If mode == NEAREST, return the nearest track (default).
+
+ If mode == LESS_EQ, return the nearest track with coordinate less
+ than or equal to coord.
+
+ If mode == LESS, return the nearest track with coordinate less
+ than coord.
+
+ If mode == GREATER, return the nearest track with coordinate greater
+ than or equal to coord.
+
+ If mode == GREATER_EQ, return the nearest track with coordinate greater
+ than coord.
+
+ If mode == NONE, raise error if coordinate is not on track.
+
+ even : bool
+ True to round coordinate to integer tracks.
+
+ Returns
+ -------
+ track : HalfInt
+ the track number
+ """
+ return HalfInt(self.coord_to_htr(layer_id, coord, mode, even))
+
+ def coord_to_fill_track(self, layer_id: int, coord: int, fill_config: Dict[int, Any],
+ mode: RoundMode = RoundMode.NEAREST) -> HalfInt:
+ """Returns the fill track number closest to the given coordinate.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer number.
+ coord : int
+ the coordinate perpendicular to the track direction.
+ fill_config : Dict[int, Any]
+ the fill configuration dictionary.
+ mode : RoundMode
+ the rounding mode.
+
+ If mode == NEAREST, return the nearest track (default).
+
+ If mode == LESS_EQ, return the nearest track with coordinate less
+ than or equal to coord.
+
+ If mode == LESS, return the nearest track with coordinate less
+ than coord.
+
+ If mode == GREATER, return the nearest track with coordinate greater
+ than or equal to coord.
+
+ If mode == GREATER_EQ, return the nearest track with coordinate greater
+ than coord.
+
+ If mode == NONE, raise error if coordinate is not on track.
+
+ Returns
+ -------
+ track : HalfInt
+ the track number
+ """
+ ntr_w, ntr_sp, _, _ = fill_config[layer_id]
+
+ num_htr = round(2 * (ntr_w + ntr_sp))
+ fill_pitch = num_htr * self.get_track_pitch(layer_id) // 2
+ return HalfInt(coord_to_custom_htr(coord, fill_pitch, fill_pitch // 2, mode, False))
+
+ def coord_to_nearest_track(self, layer_id: int, coord: int, *,
+ half_track: bool = True,
+ mode: Union[RoundMode, int] = RoundMode.NEAREST) -> HalfInt:
+ """Returns the track number closest to the given coordinate.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer number.
+ coord : int
+ the coordinate perpendicular to the track direction.
+ half_track : bool
+ if True, allow half integer track numbers.
+ mode : Union[RoundMode, int]
+ the rounding mode.
+
+ If mode == NEAREST, return the nearest track (default).
+
+ If mode == LESS_EQ, return the nearest track with coordinate less
+ than or equal to coord.
+
+ If mode == LESS, return the nearest track with coordinate less
+ than coord.
+
+ If mode == GREATER, return the nearest track with coordinate greater
+ than or equal to coord.
+
+ If mode == GREATER_EQ, return the nearest track with coordinate greater
+ than coord.
+
+ Returns
+ -------
+ track : HalfInt
+ the track number
+ """
+ warn('coord_to_nearest_track is deprecated, use coord_to_track with optional flags instead',
+ DeprecationWarning)
+ return HalfInt(self.coord_to_htr(layer_id, coord, mode, not half_track))
+
+ def find_next_track(self, layer_id: int, coord: int, *, tr_width: int = 1,
+ half_track: bool = True,
+ mode: Union[RoundMode, int] = RoundMode.GREATER_EQ) -> HalfInt:
+ """Find the track such that its edges are on the same side w.r.t. the given coordinate.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer number.
+ coord : int
+ the coordinate perpendicular to the track direction.
+ tr_width : int
+ the track width, in number of tracks.
+ half_track : bool
+ True to allow half integer track center numbers.
+ mode : Union[RoundMode, int]
+ the rounding mode. NEAREST and NONE are not supported.
+
+ If mode == LESS_EQ, return the track with both edges less
+ than or equal to coord.
+
+ If mode == LESS, return the nearest track with both edges less
+ than coord.
+
+ If mode == GREATER, return the nearest track with both edges greater
+ than coord.
+
+ If mode == GREATER_EQ, return the nearest track with both edges greater
+ than or equal to coord.
+
+ Returns
+ -------
+ tr_idx : HalfInt
+ the center track index.
+ """
+ return HalfInt(self.find_next_htr(layer_id, coord, tr_width, mode, not half_track))
+
+ def transform_track(self, layer_id: int, track_idx: TrackType, xform: Transform) -> HalfInt:
+ """Transform the given track index.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ track_idx : TrackType
+ the track index.
+ xform : Transform
+ the transformation object.
+
+ Returns
+ -------
+ tidx : HalfInt
+ the transformed track index.
+ """
+ return HalfInt(self.transform_htr(layer_id, int(round(2 * track_idx)), xform))
+
+ def get_track_index_range(self, layer_id: int, lower: int, upper: int, *,
+ num_space: TrackType = 0, edge_margin: int = 0,
+ half_track: bool = True) -> Tuple[OptHalfIntType, OptHalfIntType]:
+ """ Returns the first and last track index strictly in the given range.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ lower : int
+ the lower coordinate.
+ upper : int
+ the upper coordinate.
+ num_space : TrackType
+ number of space tracks to the tracks right outside of the given range.
+ edge_margin : int
+ minimum space from outer tracks to given range.
+ half_track : bool
+ True to allow half-integer tracks.
+
+ Returns
+ -------
+ start_track : OptHalfIntType
+ the first track index. None if no solution.
+ end_track : OptHalfIntType
+ the last track index. None if no solution.
+ """
+ even = not half_track
+ # get start track half index
+ lower_bnd = self.find_next_track(layer_id, lower, mode=RoundMode.LESS_EQ)
+ start_track = self.find_next_track(layer_id, lower + edge_margin, mode=RoundMode.GREATER_EQ)
+ start_track = max(start_track, lower_bnd + num_space).up_even(even)
+
+ # get end track half index
+ upper_bnd = self.find_next_track(layer_id, upper, mode=RoundMode.GREATER_EQ)
+ end_track = self.find_next_track(layer_id, upper - edge_margin, mode=RoundMode.LESS_EQ)
+ end_track = min(end_track, upper_bnd - num_space).down_even(even)
+
+ if end_track < start_track:
+ # no solution
+ return None, None
+ return start_track, end_track
+
+ def get_overlap_tracks(self, layer_id: int, lower: int, upper: int,
+ half_track: bool = True) -> Tuple[OptHalfIntType, OptHalfIntType]:
+ """ Returns the first and last track index that overlaps with the given range.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ lower : int
+ the lower coordinate.
+ upper : int
+ the upper coordinate.
+ half_track : bool
+ True to allow half-integer tracks.
+
+ Returns
+ -------
+ start_track : OptHalfIntType
+ the first track index. None if no solution.
+ end_track : OptHalfIntType
+ the last track index. None if no solution.
+ """
+ even = not half_track
+ lower_tr = self.find_next_track(layer_id, lower, mode=RoundMode.LESS_EQ)
+ lower_tr = lower_tr.up().up_even(even)
+ upper_tr = self.find_next_track(layer_id, upper, mode=RoundMode.GREATER_EQ)
+ upper_tr = upper_tr.down().down_even(even)
+
+ if upper_tr < lower_tr:
+ return None, None
+ return lower_tr, upper_tr
+
+ def track_to_coord(self, layer_id: int, track_idx: TrackType) -> int:
+ """Convert given track number to coordinate.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer number.
+ track_idx : TrackType
+ the track number.
+
+ Returns
+ -------
+ coord : int
+ the coordinate perpendicular to track direction.
+ """
+ return self.htr_to_coord(layer_id, int(round(2 * track_idx)))
+
+ def interval_to_track(self, layer_id: int, intv: Tuple[int, int]) -> Tuple[HalfInt, int]:
+ """Convert given coordinates to track number and width.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer number.
+ intv : Tuple[int, int]
+ lower and upper coordinates perpendicular to the track direction.
+
+ Returns
+ -------
+ track : HalfInt
+ the track number
+ width : int
+ the track width, in number of tracks.
+ """
+ start, stop = intv
+ htr = self.coord_to_htr(layer_id, (start + stop) // 2, RoundMode.NONE, False)
+ width = stop - start
+
+ # binary search to take width override into account
+ bin_iter = BinaryIterator(1, None)
+ while bin_iter.has_next():
+ cur_ntr = bin_iter.get_next()
+ wire_width = self.get_wire_total_width(layer_id, cur_ntr)
+ if wire_width == width:
+ return HalfInt(htr), cur_ntr
+ elif wire_width > width:
+ bin_iter.down()
+ else:
+ bin_iter.up()
+
+ # never found solution; width is not quantized.
+ raise ValueError('Interval {} on layer {} width not quantized'.format(intv, layer_id))
+
+ def get_copy_with(self, top_ignore_lay: Optional[int] = None,
+ top_private_lay: Optional[int] = None,
+ tr_specs: Optional[List[TrackSpec]] = None
+ ) -> RoutingGrid:
+ if top_ignore_lay is None:
+ top_ignore_lay = self.top_ignore_layer
+ if top_private_lay is None:
+ top_private_lay = self.top_private_layer
+ if tr_specs is None:
+ tr_specs_cpp = []
+ else:
+ tr_specs_cpp = [(spec.layer, spec.direction.value, spec.width, spec.space, spec.offset)
+ for spec in tr_specs]
+
+ new_grid = super(RoutingGrid, self).get_copy_with(top_ignore_lay, top_private_lay,
+ tr_specs_cpp)
+ return RoutingGrid(self._tech_info, '', copy=new_grid)
+
diff --git a/src/bag/layout/tech.py b/src/bag/layout/tech.py
new file mode 100644
index 0000000..659671b
--- /dev/null
+++ b/src/bag/layout/tech.py
@@ -0,0 +1,732 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines BAG's technology related classes"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Dict, List, Tuple, Optional, Any, Sequence
+
+import math
+from itertools import chain
+
+# try to import cython classes
+# noinspection PyUnresolvedReferences
+from pybag.core import BBox, PyTech, Transform
+from pybag.enum import Orient2D, Orientation, Direction
+
+from ..util.search import BinaryIterator
+from ..util.immutable import ImmutableSortedDict, Param
+
+from .data import MaxSpaceFillInfo
+
+if TYPE_CHECKING:
+ from .core import PyLayInstance
+ from .template import TemplateBase
+
+
+# Note: pybind11 classes do not work with ABCs, so we throw NotImplementedError as compromise.
+class TechInfo(PyTech):
+ """The base technology class.
+
+ This class provides various methods for querying technology-specific information.
+
+ Parameters
+ ----------
+ tech_params : Dict[str, Any]
+ process specific parameters.
+ config : Dict[str, Any]
+ the configuration dictionary corresponding to config_fname.
+ config_fname : str
+ the configuration file name.
+
+ Attributes
+ ----------
+ tech_params : Dict[str, Any]
+ technology specific parameters.
+ """
+
+ def __init__(self, tech_params: Dict[str, Any], config: Dict[str, Any],
+ config_fname: str) -> None:
+ PyTech.__init__(self, config_fname)
+ self._tech_params = tech_params
+ self._config = config
+ self._tech_cls_dict: Dict[str, Any] = {}
+ self._tech_cls_cache: Dict[Tuple[str, ImmutableSortedDict], Any] = {}
+
+ def get_margin(self, is_vertical: bool, edge1: Param, edge2: Optional[Param]) -> int:
+ raise NotImplementedError('Not implemented.')
+
+ def add_cell_boundary(self, template: TemplateBase, box: BBox) -> None:
+ """Adds a cell boundary object to the given template.
+
+ This is usually the PR boundary.
+
+ Parameters
+ ----------
+ template : TemplateBase
+ the template to draw the cell boundary in.
+ box : BBox
+ the cell boundary bounding box.
+ """
+ raise NotImplementedError('Not implemented.')
+
+ def draw_device_blockage(self, template: TemplateBase) -> None:
+ """Draw device blockage layers on the given template.
+
+ Parameters
+ ----------
+ template : TemplateBase
+ the template to draw the device block layers on
+ """
+ raise NotImplementedError('Not implemented.')
+
+ def get_metal_em_specs(self, layer: str, purpose: str, w: int, length: int = -1,
+ vertical: bool = False, dc_temp: int = -1000, rms_dt: int = -1000
+ ) -> Tuple[float, float, float]:
+ """Returns a tuple of EM current/resistance specs of the given wire.
+
+ Parameters
+ ----------
+ layer : str
+ the layer name.
+ purpose : str
+ the purpose name.
+ w : int
+ the width of the metal in resolution units (dimension perpendicular to current flow).
+ length : int
+ the length of the metal in resolution units (dimension parallel to current flow).
+ If negative, disable length enhancement.
+ vertical : bool
+ True to compute vertical current.
+ dc_temp : int
+ the temperature (in Celsius) to calculate DC current EM spec with.
+ If equal to -1000, use technology default.
+ rms_dt : int
+ the temperature delta (in Celsius) to target for when computing AC RMS current
+ EM spec. If equal to -1000, use technology default.
+
+ Returns
+ -------
+ idc : float
+ maximum DC current, in Amperes.
+ iac_rms : float
+ maximum AC RMS current, in Amperes.
+ iac_peak : float
+ maximum AC peak current, in Amperes.
+ """
+ raise NotImplementedError('Not implemented.')
+
+ def get_via_em_specs(self, layer_dir: int, layer: str, purpose: str, adj_layer: str,
+ adj_purpose: str, cut_w: int, cut_h: int, m_w: int = -1, m_l: int = -1,
+ adj_m_w: int = -1, adj_m_l: int = -1, array: bool = False,
+ dc_temp: int = -1000, rms_dt: int = -1000) -> Tuple[float, float, float]:
+ """Returns a tuple of EM current/resistance specs of the given via.
+
+ Parameters
+ ----------
+ layer_dir : int
+ the direction value of the first specified layer. LOWER if the first layer is the
+ bottom layer, UPPER if the first layer is the top layer.
+ layer : str
+ the first layer name.
+ purpose : str
+ the first layer purpose name.
+ adj_layer : str
+ the second layer name.
+ adj_purpose : str
+ the second layer purpose name.
+ cut_w : int
+ the via cut width.
+ cut_h : int
+ the via cut height.
+ m_w : int
+ the first layer wire width, used for EM enhancement calculations.
+ Negative numbers has no effect.
+ m_l : int
+ the first layer wire length, used for EM enhancement calculations.
+ Negative numbers has no effect.
+ adj_m_w : int
+ the second layer wire width, used for EM enhancement calculations.
+ Negative numbers has no effect.
+ adj_m_l : int
+ the second layer wire length, used for EM enhancement calculations.
+ Negative numbers has no effect.
+ array : bool
+ True if this via is in a via array.
+ dc_temp : int
+ the temperature (in Celsius) to calculate DC current EM spec with.
+ If equal to -1000, use technology default.
+ rms_dt : int
+ the temperature delta (in Celsius) to target for when computing AC RMS current
+ EM spec. If equal to -1000, use technology default.
+
+ Returns
+ -------
+ idc : float
+ maximum DC current per via, in Amperes.
+ iac_rms : float
+ maximum AC RMS current per via, in Amperes.
+ iac_peak : float
+ maximum AC peak current per via, in Amperes.
+ """
+ raise NotImplementedError('Not implemented.')
+
+ def get_res_em_specs(self, res_type: str, w: int, *, length: int = -1,
+ dc_temp: int = -1000, rms_dt: int = -1000) -> Tuple[float, float, float]:
+ """Returns a tuple of EM current/resistance specs of the given resistor.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type string.
+ w : int
+ the width of the metal in resolution units (dimension perpendicular to current flow).
+ length : int
+ the length of the metal in resolution units (dimension parallel to current flow).
+ If negative, disable length enhancement.
+ dc_temp : int
+ the temperature (in Celsius) to calculate DC current EM spec with.
+ If equal to -1000, use technology default.
+ rms_dt : int
+ the temperature delta (in Celsius) to target for when computing AC RMS current
+ EM spec. If equal to -1000, use technology default.
+
+ Returns
+ -------
+ idc : float
+ maximum DC current, in Amperes.
+ iac_rms : float
+ maximum AC RMS current, in Amperes.
+ iac_peak : float
+ maximum AC peak current, in Amperes.
+ """
+ raise NotImplementedError('Not implemented.')
+
+ @property
+ def tech_params(self) -> Dict[str, Any]:
+ """Dict[str, Any]: the technology parameters dictionary."""
+ return self._tech_params
+
+ @property
+ def config(self) -> Dict[str, Any]:
+ """Dict[str, Any]: The configuration dictionary used to compute various DRC rules."""
+ return self._config
+
+ def register_device_tech(self, dev_name: str, obj: Any) -> None:
+ """Register the A technology class for the given device."""
+ self._tech_cls_dict[dev_name] = obj
+
+ def get_device_tech(self, dev_name: str, **kwargs: Any) -> Any:
+ """Get an instance of the technology class for the given device."""
+ cache_key = (dev_name, ImmutableSortedDict(kwargs))
+ ans = self._tech_cls_cache.get(cache_key, None)
+ if ans is None:
+ # make the technology class instance.
+ cls = self._tech_cls_dict.get(dev_name, None)
+ if cls is None:
+ raise ValueError(
+ 'Technology class {} not found. Is it registered?'.format(dev_name))
+
+ ans = self._tech_cls_cache[cache_key] = cls(tech_info=self, **kwargs)
+ return ans
+
+ def get_dc_temp(self, dc_temp: int = -1000) -> int:
+ """Returns the temperature at which to evaluate DC electro-migration rules."""
+ if dc_temp == -1000:
+ return self._tech_params['em']['dc_temp']
+ return dc_temp
+
+ def get_rms_dt(self, rms_dt: int = -1000) -> int:
+ """Returns the delta-temperature requirement for RMS electro-migration rules."""
+ if rms_dt == -1000:
+ return self._tech_params['em']['rms_dt']
+ return rms_dt
+
+ def get_well_layers(self, mos_type: str) -> Sequence[Tuple[str, str]]:
+ """Returns a list of well layers associated with the given transistor type.
+
+ """
+ return self._config['well_layers'][mos_type]
+
+ def get_implant_layers(self, mos_type: str, res_type: str = '') -> Sequence[Tuple[str, str]]:
+ """Returns a list of implant layers associated with the given device type.
+
+ Parameters
+ ----------
+ mos_type : str
+ one of 'nch', 'pch', 'ntap', or 'ptap'
+ res_type : str
+ If given, the return layers will be for the substrate of the given resistor type.
+
+ Returns
+ -------
+ imp_list : List[Tuple[str, str]]
+ list of implant layers.
+ """
+ entry_name = 'res_{}'.format(res_type) if res_type else mos_type
+ return self.config['imp_layers'][entry_name]
+
+ def get_threshold_layers(self, mos_type: str, threshold: str,
+ res_type: str = '') -> Sequence[Tuple[str, str]]:
+ """Returns a list of threshold layers."""
+ entry_name = 'res_{}'.format(res_type) if res_type else mos_type
+ return self.config['thres_layers'][entry_name][threshold]
+
+ def get_exclude_layer(self, layer_id: int) -> Tuple[str, str]:
+ """Returns the metal exclude layer"""
+ return self.config['metal_exclude_table'][layer_id]
+
+ def get_dnw_margin(self, dnw_mode: str) -> int:
+ """Returns the required DNW margin given the DNW mode.
+
+ Parameters
+ ----------
+ dnw_mode : str
+ the DNW mode string.
+
+ Returns
+ -------
+ dnw_margin : int
+ the DNW margin in resolution units.
+ """
+ return self.config['dnw_margins'][dnw_mode]
+
+ def get_dnw_layers(self) -> List[Tuple[str, str]]:
+ """Returns a list of layers that defines DNW.
+
+ Returns
+ -------
+ lay_list : List[Tuple[str, str]]
+ list of DNW layers.
+ """
+ return self.config['dnw_layers']
+
+ def get_res_metal_layers(self, layer_id: int) -> List[Tuple[str, str]]:
+ """Returns a list of layers associated with the given metal resistor.
+
+ Parameters
+ ----------
+ layer_id : int
+ the metal layer ID.
+
+ Returns
+ -------
+ res_list : List[Tuple[str, str]]
+ list of resistor layers.
+ """
+ return self.config['res_metal_layer_table'][layer_id]
+
+ def get_res_rsquare(self, res_type: str) -> float:
+ """Returns R-square for the given resistor type.
+
+ This is used to do some approximate resistor dimension calculation.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type.
+
+ Returns
+ -------
+ rsquare : float
+ resistance in Ohms per unit square of the given resistor type.
+ """
+ return self.config['resistor']['info'][res_type]['rsq']
+
+ def get_res_width_bounds(self, res_type: str) -> Tuple[int, int]:
+ """Returns the maximum and minimum resistor width for the given resistor type.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type.
+
+ Returns
+ -------
+ wmin : int
+ minimum resistor width, in layout units.
+ wmax : int
+ maximum resistor width, in layout units.
+ """
+ return self.config['resistor']['info'][res_type]['w_bounds']
+
+ def get_res_length_bounds(self, res_type: str) -> Tuple[int, int]:
+ """Returns the maximum and minimum resistor length for the given resistor type.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type.
+
+ Returns
+ -------
+ lmin : int
+ minimum resistor length, in layout units.
+ lmax : int
+ maximum resistor length, in layout units.
+ """
+ return self.config['resistor']['info'][res_type]['l_bounds']
+
+ def get_res_min_nsquare(self, res_type: str) -> float:
+ """Returns the minimum allowable number of squares for the given resistor type.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type.
+
+ Returns
+ -------
+ nsq_min : float
+ minimum number of squares needed.
+ """
+ return self.config['resistor']['info'][res_type]['min_nsq']
+
+ def get_max_space_fill_info(self, layer_id: int) -> MaxSpaceFillInfo:
+ fill_info: Tuple[int, int, int, int, float] = self._tech_params['fill'][layer_id]
+ return MaxSpaceFillInfo(fill_info)
+
+ def get_idc_scale_factor(self, layer: str, purpose: str, temp: float,
+ is_res: bool = False) -> float:
+ """Return the Idc EM specs temperature scale factor.
+
+ Parameters
+ ----------
+ layer : str
+ the layer name.
+ purpose : str
+ the purpose name.
+ temp : float
+ the temperature, in Celsius.
+ is_res : bool
+ True to get scale factor for resistor.
+
+ Returns
+ -------
+ scale : float
+ the scale factor.
+ """
+ if is_res:
+ key = 'res'
+ else:
+ key = (layer, purpose)
+ idc_em_scale = self.config['idc_em_scale']
+
+ idc_params = idc_em_scale.get(key, None)
+ if idc_params is None:
+ idc_params = idc_em_scale['default']
+
+ temp_list = idc_params['temp']
+ scale_list = idc_params['scale']
+
+ for temp_test, scale in zip(temp_list, scale_list):
+ if temp <= temp_test:
+ return scale
+ return scale_list[-1]
+
+ def merge_well(self, template: TemplateBase, inst_list: List[PyLayInstance], sub_type: str, *,
+ threshold: str = '', res_type: str = '', merge_imp: bool = False) -> None:
+ """Merge the well of the given instances together."""
+
+ if threshold is not None:
+ lay_iter = chain(self.get_well_layers(sub_type),
+ self.get_threshold_layers(sub_type, threshold, res_type=res_type))
+ else:
+ lay_iter = self.get_well_layers(sub_type)
+ if merge_imp:
+ lay_iter = chain(lay_iter, self.get_implant_layers(sub_type, res_type=res_type))
+
+ for lay_purp in lay_iter:
+ tot_box = BBox.get_invalid_bbox()
+ for inst in inst_list:
+ cur_box = inst.master.get_rect_bbox(lay_purp)
+ tot_box.merge(inst.transform_master_object(cur_box))
+ if tot_box.is_physical():
+ template.add_rect(lay_purp, tot_box)
+
+ # noinspection PyMethodMayBeStatic
+ def finalize_template(self, template: TemplateBase) -> None:
+ """Perform any operations necessary on the given layout template before finalizing it.
+
+ By default we add the cell boundary.
+
+ Parameters
+ ----------
+ template : TemplateBase
+ the template object.
+ """
+ template.add_cell_boundary(template.bound_box)
+
+ def get_res_info(self, res_type: str, w: int, l: int, **kwargs: Any) -> Dict[str, Any]:
+ """Returns a dictionary containing EM information of the given resistor.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type.
+ w : int
+ the resistor width in resolution units (dimension perpendicular to current flow).
+ l : int
+ the resistor length in resolution units (dimension parallel to current flow).
+ **kwargs : Any
+ optional parameters for EM rule calculations, such as nominal temperature,
+ AC rms delta-T, etc.
+
+ Returns
+ -------
+ info : Dict[str, Any]
+ A dictionary of wire information. Should have the following:
+
+ resistance : float
+ The resistance, in Ohms.
+ idc : float
+ The maximum allowable DC current, in Amperes.
+ iac_rms : float
+ The maximum allowable AC RMS current, in Amperes.
+ iac_peak : float
+ The maximum allowable AC peak current, in Amperes.
+ """
+ rsq = self.get_res_rsquare(res_type)
+ res = l / w * rsq
+ idc, irms, ipeak = self.get_res_em_specs(res_type, w, length=l, **kwargs)
+
+ return dict(
+ resistance=res,
+ idc=idc,
+ iac_rms=irms,
+ iac_peak=ipeak,
+ )
+
+ def get_via_info(self, bbox: BBox, layer_dir: Direction, layer: str, adj_layer: str,
+ ex_dir: Orient2D, *, purpose: str = '', adj_purpose: str = '',
+ wlen: int = -1, adj_wlen: int = -1, extend: bool = True,
+ adj_ex_dir: Optional[Orient2D] = None,
+ **kwargs: Any) -> Optional[Dict[str, Any]]:
+ """Create a via on the routing grid given the bounding box.
+
+ Parameters
+ ----------
+ bbox : BBox
+ the bounding box of the via.
+ layer_dir : Direction
+ the direction of the first specified layer. LOWER if the first layer is the
+ bottom layer, UPPER if the first layer is the top layer.
+ layer : str
+ the first layer name.
+ adj_layer : str
+ the second layer name.
+ ex_dir : Orient2D
+ the first layer extension direction.
+ purpose : str
+ first layer purpose name.
+ adj_purpose : str
+ second layer purpose name.
+ wlen : int
+ length of first layer wire connected to this Via, in resolution units.
+ Used for length enhancement EM calculation.
+ adj_wlen : int
+ length of second layer wire connected to this Via, in resolution units.
+ Used for length enhancement EM calculation.
+ extend : bool
+ True if via extension can be drawn outside of bounding box.
+ adj_ex_dir : Optional[Orient2D]
+ second layer extension direction. Can force to extend in same direction as bottom.
+ **kwargs : Any
+ optional parameters for EM rule calculations, such as nominal temperature,
+ AC rms delta-T, etc.
+
+ Returns
+ -------
+ info : Optional[Dict[str, Any]]
+ A dictionary of via information, or None if no solution. Should have the following:
+
+ resistance : float
+ The total via array resistance, in Ohms.
+ idc : float
+ The total via array maximum allowable DC current, in Amperes.
+ iac_rms : float
+ The total via array maximum allowable AC RMS current, in Amperes.
+ iac_peak : float
+ The total via array maximum allowable AC peak current, in Amperes.
+ params : Dict[str, Any]
+ A dictionary of via parameters.
+ """
+ if adj_ex_dir is None:
+ adj_ex_dir = ex_dir.perpendicular()
+
+ via_id = self.get_via_id(layer_dir, layer, purpose, adj_layer, adj_purpose)
+ via_param = self.get_via_param(bbox.w, bbox.h, via_id, layer_dir,
+ ex_dir, adj_ex_dir, extend)
+
+ if via_param.empty:
+ # no solution found
+ return None
+
+ xform = Transform(bbox.xm, bbox.ym, Orientation.R0)
+ m_box = via_param.get_box(xform, layer_dir)
+ adj_m_box = via_param.get_box(xform, layer_dir.flip())
+ w = m_box.get_dim(ex_dir.perpendicular())
+ adj_w = adj_m_box.get_dim(adj_ex_dir.perpendicular())
+ cut_dim = via_param.cut_dim
+ nx = via_param.nx
+ ny = via_param.ny
+ idc, irms, ipeak = self.get_via_em_specs(layer_dir, layer, purpose, adj_layer,
+ adj_purpose, cut_dim[0], cut_dim[1],
+ m_w=w, m_l=wlen, adj_m_w=adj_w, adj_m_l=adj_wlen,
+ array=nx > 1 or ny > 1, **kwargs)
+
+ params = {'id': via_id,
+ 'xform': Transform(bbox.xm, bbox.ym, Orientation.R0),
+ 'via_param': via_param,
+ }
+
+ ntot = nx * ny
+ box_list = [None, None]
+ box_list[layer_dir] = m_box
+ box_list[layer_dir.flip()] = adj_m_box
+ return dict(
+ resistance=0.0,
+ idc=idc * ntot,
+ iac_rms=irms * ntot,
+ iac_peak=ipeak * ntot,
+ params=params,
+ metal_box=box_list,
+ )
+
+ def design_resistor(self, res_type: str, res_targ: float, idc: float = 0.0,
+ iac_rms: float = 0.0, iac_peak: float = 0.0, num_even: bool = True,
+ **kwargs: Any) -> Tuple[int, int, int, int]:
+ """Finds the optimal resistor dimension that meets the given specs.
+
+ Assumes resistor length does not effect EM specs.
+
+ Parameters
+ ----------
+ res_type : str
+ the resistor type.
+ res_targ : float
+ target resistor, in Ohms.
+ idc : float
+ maximum DC current spec, in Amperes.
+ iac_rms : float
+ maximum AC RMS current spec, in Amperes.
+ iac_peak : float
+ maximum AC peak current spec, in Amperes.
+ num_even : int
+ True to return even number of resistors.
+ **kwargs :
+ optional EM spec calculation parameters.
+
+ Returns
+ -------
+ num_par : int
+ number of resistors needed in parallel.
+ num_ser : int
+ number of resistors needed in series.
+ w : int
+ width of a unit resistor, in resolution units.
+ l : int
+ length of a unit resistor, in resolution units.
+ """
+ rsq = self.get_res_rsquare(res_type)
+ wmin_unit, wmax_unit = self.get_res_width_bounds(res_type)
+ lmin_unit, lmax_unit = self.get_res_length_bounds(res_type)
+ min_nsq = self.get_res_min_nsquare(res_type)
+
+ # make sure width is always even
+ wmin_unit = -2 * (-wmin_unit // 2)
+ wmax_unit = 2 * (wmax_unit // 2)
+
+ # step 1: find number of parallel resistors and minimum resistor width.
+ if num_even:
+ npar_iter = BinaryIterator(2, None, step=2)
+ else:
+ npar_iter = BinaryIterator(1, None, step=1)
+ while npar_iter.has_next():
+ npar = npar_iter.get_next()
+ res_targ_par = res_targ * npar
+ idc_par = idc / npar
+ iac_rms_par = iac_rms / npar
+ iac_peak_par = iac_peak / npar
+ res_idc, res_irms, res_ipeak = self.get_res_em_specs(res_type, wmax_unit, **kwargs)
+ if (0.0 < res_idc < idc_par or 0.0 < res_irms < iac_rms_par or
+ 0.0 < res_ipeak < iac_peak_par):
+ npar_iter.up()
+ else:
+ # This could potentially work, find width solution
+ w_iter = BinaryIterator(wmin_unit, wmax_unit + 1, step=2)
+ while w_iter.has_next():
+ wcur_unit = w_iter.get_next()
+ lcur_unit = int(math.ceil(res_targ_par / rsq * wcur_unit))
+ if lcur_unit < max(lmin_unit, int(math.ceil(min_nsq * wcur_unit))):
+ w_iter.down()
+ else:
+ tmp = self.get_res_em_specs(res_type, wcur_unit, length=lcur_unit, **kwargs)
+ res_idc, res_irms, res_ipeak = tmp
+ if (0.0 < res_idc < idc_par or 0.0 < res_irms < iac_rms_par or
+ 0.0 < res_ipeak < iac_peak_par):
+ w_iter.up()
+ else:
+ w_iter.save_info((wcur_unit, lcur_unit))
+ w_iter.down()
+
+ w_info = w_iter.get_last_save_info()
+ if w_info is None:
+ # no solution; we need more parallel resistors
+ npar_iter.up()
+ else:
+ # solution!
+ npar_iter.save_info((npar, w_info[0], w_info[1]))
+ npar_iter.down()
+
+ # step 3: fix maximum length violation by having resistor in series.
+ num_par, wopt_unit, lopt_unit = npar_iter.get_last_save_info()
+ if lopt_unit > lmax_unit:
+ num_ser = -(-lopt_unit // lmax_unit)
+ lopt_unit = -(-lopt_unit // num_ser)
+ else:
+ num_ser = 1
+
+ # step 4: return answer
+ return num_par, num_ser, wopt_unit, lopt_unit
diff --git a/src/bag/layout/template.py b/src/bag/layout/template.py
new file mode 100755
index 0000000..c106822
--- /dev/null
+++ b/src/bag/layout/template.py
@@ -0,0 +1,2762 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines layout template classes.
+"""
+
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING, Union, Dict, Any, List, TypeVar, Type, Optional, Tuple, Iterable, Mapping,
+ Sequence, Set, cast
+)
+from bag.typing import PointType
+
+import abc
+from itertools import product
+
+from pybag.enum import (
+ PathStyle, BlockageType, BoundaryType, DesignOutput, Orient2D, SupplyWrapMode,
+ Orientation, Direction, MinLenMode, RoundMode, PinMode, Direction2D
+)
+from pybag.core import (
+ BBox, BBoxArray, PyLayCellView, Transform, PyLayInstRef, PyPath, PyBlockage, PyBoundary,
+ PyRect, PyVia, PyPolygon, PyPolygon90, PyPolygon45, ViaParam, COORD_MIN, COORD_MAX,
+ RTree, BBoxCollection, TrackColoring, make_tr_colors
+)
+
+from ..util.immutable import ImmutableSortedDict, Param
+from ..util.cache import DesignMaster, MasterDB, format_cell_name
+from ..util.interval import IntervalSet
+from ..util.math import HalfInt
+from ..design.module import Module
+
+from .core import PyLayInstance
+from .tech import TechInfo
+from .routing.base import Port, TrackID, WireArray
+from .routing.grid import RoutingGrid
+from .data import MOMCapInfo, TemplateEdgeInfo
+
+GeoType = Union[PyRect, PyPolygon90, PyPolygon45, PyPolygon]
+TemplateType = TypeVar('TemplateType', bound='TemplateBase')
+DiffWarrType = Tuple[Optional[WireArray], Optional[WireArray]]
+
+if TYPE_CHECKING:
+ from ..core import BagProject
+ from ..typing import TrackType, SizeType
+
+
+class TemplateDB(MasterDB):
+ """A database of all templates.
+
+ This class is a subclass of MasterDB that defines some extra properties/function
+ aliases to make creating layouts easier.
+
+ Parameters
+ ----------
+ routing_grid : RoutingGrid
+ the default RoutingGrid object.
+ lib_name : str
+ the cadence library to put all generated templates in.
+ prj : Optional[BagProject]
+ the BagProject instance.
+ name_prefix : str
+ generated layout name prefix.
+ name_suffix : str
+ generated layout name suffix.
+ """
+
+ def __init__(self, routing_grid: RoutingGrid, lib_name: str, prj: Optional[BagProject] = None,
+ name_prefix: str = '', name_suffix: str = '') -> None:
+ MasterDB.__init__(self, lib_name, prj=prj, name_prefix=name_prefix, name_suffix=name_suffix)
+
+ self._grid = routing_grid
+ self._tr_colors = make_tr_colors(self._grid.tech_info)
+
+ @property
+ def grid(self) -> RoutingGrid:
+ """RoutingGrid: The global RoutingGrid instance."""
+ return self._grid
+
+ @property
+ def tech_info(self) -> TechInfo:
+ return self._grid.tech_info
+
+ @property
+ def tr_colors(self) -> TrackColoring:
+ return self._tr_colors
+
+ def new_template(self, temp_cls: Type[TemplateType], params: Optional[Mapping[str, Any]] = None,
+ **kwargs: Any) -> TemplateType:
+ """Alias for new_master() for backwards compatibility.
+ """
+ return self.new_master(temp_cls, params=params, **kwargs)
+
+ def instantiate_layout(self, template: TemplateBase, top_cell_name: str = '',
+ output: DesignOutput = DesignOutput.LAYOUT, **kwargs: Any) -> None:
+ """Alias for instantiate_master(), with default output type of LAYOUT.
+ """
+ self.instantiate_master(output, template, top_cell_name, **kwargs)
+
+ def batch_layout(self, info_list: Sequence[Tuple[TemplateBase, str]],
+ output: DesignOutput = DesignOutput.LAYOUT, **kwargs: Any) -> None:
+ """Alias for batch_output(), with default output type of LAYOUT.
+ """
+ self.batch_output(output, info_list, **kwargs)
+
+
+def get_cap_via_extensions(info: MOMCapInfo, grid: RoutingGrid, bot_layer: int,
+ top_layer: int) -> Dict[int, int]:
+ via_ext_dict: Dict[int, int] = {lay: 0 for lay in range(bot_layer, top_layer + 1)}
+ # get via extensions on each layer
+ for lay0 in range(bot_layer, top_layer):
+ lay1 = lay0 + 1
+
+ # port-to-port via extension
+ bot_tr_w = info.get_port_tr_w(lay0)
+ top_tr_w = info.get_port_tr_w(lay1)
+ ext_pp = grid.get_via_extensions(Direction.LOWER, lay0, bot_tr_w, top_tr_w)
+
+ w0, sp0, _, _ = info.get_cap_specs(lay0)
+ w1, sp1, _, _ = info.get_cap_specs(lay1)
+ # cap-to-cap via extension
+ ext_cc = grid.get_via_extensions_dim(Direction.LOWER, lay0, w0, w1)
+ # cap-to-port via extension
+ ext_cp = grid.get_via_extensions_dim_tr(Direction.LOWER, lay0, w0, top_tr_w)
+ # port-to-cap via extension
+ ext_pc = grid.get_via_extensions_dim_tr(Direction.UPPER, lay1, w1, bot_tr_w)
+
+ via_ext_dict[lay0] = max(via_ext_dict[lay0], ext_pp[0], ext_cc[0], ext_cp[0], ext_pc[0])
+ via_ext_dict[lay1] = max(via_ext_dict[lay1], ext_pp[1], ext_cc[1], ext_cp[1], ext_pc[1])
+
+ return via_ext_dict
+
+
+class TemplateBase(DesignMaster):
+ """The base template class.
+
+ Parameters
+ ----------
+ temp_db : TemplateDB
+ the template database.
+ params : Param
+ the parameter values.
+ **kwargs : Any
+ dictionary of the following optional parameters:
+
+ grid : RoutingGrid
+ the routing grid to use for this template.
+ """
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+
+ # add hidden parameters
+ DesignMaster.__init__(self, temp_db, params, **kwargs)
+
+ # private attributes
+ self._size: Optional[SizeType] = None
+ self._ports: Dict[str, Port] = {}
+ self._port_params: Dict[str, Dict[str, Any]] = {}
+ self._array_box: Optional[BBox] = None
+ self._fill_box: Optional[BBox] = None
+ self._sch_params: Optional[Param] = None
+ self._cell_boundary_added: bool = False
+ self._instances: Dict[str, PyLayInstance] = {}
+ self._use_color: bool = False
+
+ # public attributes
+ self.prim_top_layer: Optional[int] = None
+ self.prim_bound_box: Optional[BBox] = None
+
+ # get private attributes from parameters
+ tmp_grid: RoutingGrid = self.params['grid']
+ if tmp_grid is None:
+ self._grid: RoutingGrid = temp_db.grid
+ else:
+ self._grid: RoutingGrid = tmp_grid
+
+ tmp_colors: TrackColoring = self.params['tr_colors']
+ if tmp_colors is None:
+ self._tr_colors: TrackColoring = temp_db.tr_colors
+ else:
+ self._tr_colors: TrackColoring = tmp_colors
+
+ self._show_pins: bool = self.params['show_pins']
+ self._edge_info: Optional[TemplateEdgeInfo] = None
+
+ # create Cython wrapper object
+ self._layout: PyLayCellView = PyLayCellView(self._grid, self._tr_colors, self.cell_name)
+
+ @classmethod
+ def get_hidden_params(cls) -> Dict[str, Any]:
+ ans = DesignMaster.get_hidden_params()
+ ans['grid'] = None
+ ans['tr_colors'] = None
+ ans['show_pins'] = True
+ return ans
+
+ @classmethod
+ def get_schematic_class(cls) -> Optional[Type[Module]]:
+ return None
+
+ @abc.abstractmethod
+ def draw_layout(self) -> None:
+ """Draw the layout of this template.
+
+ Override this method to create the layout.
+
+ WARNING: you should never call this method yourself.
+ """
+ pass
+
+ def get_schematic_class_inst(self) -> Optional[Type[Module]]:
+ return self.get_schematic_class()
+
+ def get_master_basename(self) -> str:
+ """Returns the base name to use for this instance.
+
+ Returns
+ -------
+ basename : str
+ the base name for this instance.
+ """
+ return self.get_layout_basename()
+
+ def get_layout_basename(self) -> str:
+ """Returns the base name for this template.
+
+ Returns
+ -------
+ base_name : str
+ the base name of this template.
+ """
+ return self.__class__.__name__
+
+ def get_content(self, output_type: DesignOutput, rename_dict: Dict[str, str], name_prefix: str,
+ name_suffix: str, shell: bool, exact_cell_names: Set[str],
+ supply_wrap_mode: SupplyWrapMode) -> Tuple[str, Any]:
+ if not self.finalized:
+ raise ValueError('This template is not finalized yet')
+
+ cell_name = format_cell_name(self.cell_name, rename_dict, name_prefix, name_suffix,
+ exact_cell_names, supply_wrap_mode)
+ return cell_name, self._layout
+
+ def finalize(self) -> None:
+ """Finalize this master instance.
+ """
+ # create layout
+ self.draw_layout()
+
+ # finalize this template
+ grid = self.grid
+ grid.tech_info.finalize_template(self)
+
+ # construct port objects
+ for net_name, port_params in self._port_params.items():
+ pin_dict = port_params['pins']
+ label = port_params['label']
+ hide = port_params['hide']
+ if port_params['show']:
+ label = port_params['label']
+ for lay, geo_list in pin_dict.items():
+ if isinstance(lay, int):
+ for warr in geo_list:
+ self._layout.add_pin_arr(net_name, label, warr.track_id,
+ warr.lower, warr.upper)
+ else:
+ for box in geo_list:
+ self._layout.add_pin(lay, net_name, label, box)
+ self._ports[net_name] = Port(net_name, pin_dict, label, hide)
+
+ # call super finalize routine
+ DesignMaster.finalize(self)
+
+ @property
+ def show_pins(self) -> bool:
+ """bool: True to show pins."""
+ return self._show_pins
+
+ @property
+ def sch_params(self) -> Optional[Param]:
+ """Optional[Dict[str, Any]]: The schematic parameters dictionary."""
+ return self._sch_params
+
+ @sch_params.setter
+ def sch_params(self, new_params: Dict[str, Any]) -> None:
+ self._sch_params = ImmutableSortedDict(new_params)
+
+ @property
+ def template_db(self) -> TemplateDB:
+ """TemplateDB: The template database object"""
+ # noinspection PyTypeChecker
+ return self.master_db
+
+ @property
+ def is_empty(self) -> bool:
+ """bool: True if this template is empty."""
+ return self._layout.is_empty
+
+ @property
+ def grid(self) -> RoutingGrid:
+ """RoutingGrid: The RoutingGrid object"""
+ return self._grid
+
+ @grid.setter
+ def grid(self, new_grid: RoutingGrid) -> None:
+ self._layout.set_grid(new_grid)
+ self._grid = new_grid
+
+ @property
+ def tr_colors(self) -> TrackColoring:
+ return self._tr_colors
+
+ @property
+ def array_box(self) -> Optional[BBox]:
+ """Optional[BBox]: The array/abutment bounding box of this template."""
+ return self._array_box
+
+ @array_box.setter
+ def array_box(self, new_array_box: BBox) -> None:
+ if not self._finalized:
+ self._array_box = new_array_box
+ else:
+ raise RuntimeError('Template already finalized.')
+
+ @property
+ def fill_box(self) -> Optional[BBox]:
+ """Optional[BBox]: The dummy fill bounding box of this template."""
+ return self._fill_box
+
+ @fill_box.setter
+ def fill_box(self, new_box: BBox) -> None:
+ if not self._finalized:
+ self._fill_box = new_box
+ else:
+ raise RuntimeError('Template already finalized.')
+
+ @property
+ def top_layer(self) -> int:
+ """int: The top layer ID used in this template."""
+ if self.size is None:
+ if self.prim_top_layer is None:
+ raise Exception('Both size and prim_top_layer are unset.')
+ return self.prim_top_layer
+ return self.size[0]
+
+ @property
+ def size(self) -> Optional[SizeType]:
+ """Optional[SizeType]: The size of this template, in (layer, nx_blk, ny_blk) format."""
+ return self._size
+
+ @property
+ def size_defined(self) -> bool:
+ """bool: True if size or bounding box has been set."""
+ return self.size is not None or self.prim_bound_box is not None
+
+ @property
+ def bound_box(self) -> Optional[BBox]:
+ """Optional[BBox]: Returns the template BBox. None if size not set yet."""
+ mysize = self.size
+ if mysize is None:
+ if self.prim_bound_box is None:
+ raise ValueError('Both size and prim_bound_box are unset.')
+ return self.prim_bound_box
+
+ wblk, hblk = self.grid.get_size_dimension(mysize)
+ return BBox(0, 0, wblk, hblk)
+
+ @size.setter
+ def size(self, new_size: SizeType) -> None:
+ if not self._finalized:
+ self._size = new_size
+ else:
+ raise RuntimeError('Template already finalized.')
+
+ @property
+ def layout_cellview(self) -> PyLayCellView:
+ """PyLayCellView: The internal layout object."""
+ return self._layout
+
+ @property
+ def edge_info(self) -> Optional[TemplateEdgeInfo]:
+ return self._edge_info
+
+ @property
+ def use_color(self) -> bool:
+ return self._use_color
+
+ @edge_info.setter
+ def edge_info(self, new_info: TemplateEdgeInfo) -> None:
+ self._edge_info = new_info
+
+ def get_margin(self, top_layer: int, edge_dir: Direction2D,
+ half_blk_x: bool = True, half_blk_y: bool = True) -> int:
+ grid = self.grid
+ tech_info = grid.tech_info
+
+ edge_info = self.edge_info
+ if edge_info is None:
+ # TODO: implement this. Need to recurse down instance hierarchy
+ raise ValueError('Not implemented yet. See developer.')
+
+ my_edge = self.edge_info.get_edge_params(edge_dir)
+ is_vertical = edge_dir.is_vertical
+ margin = tech_info.get_margin(is_vertical, my_edge, None)
+
+ blk_size = grid.get_block_size(top_layer, half_blk_x=half_blk_x, half_blk_y=half_blk_y)
+ q = blk_size[is_vertical]
+ return -(-margin // q) * q
+
+ def get_rect_bbox(self, lay_purp: Tuple[str, str]) -> BBox:
+ """Returns the overall bounding box of all rectangles on the given layer.
+
+ Note: currently this does not check primitive instances or vias.
+
+ Parameters
+ ----------
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+
+ Returns
+ -------
+ box : BBox
+ the overall bounding box of the given layer.
+ """
+ return self._layout.get_rect_bbox(lay_purp[0], lay_purp[1])
+
+ def new_template_with(self, **kwargs: Any) -> TemplateBase:
+ """Create a new template with the given parameters.
+
+ This method will update the parameter values with the given dictionary,
+ then create a new template with those parameters and return it.
+
+ Parameters
+ ----------
+ **kwargs : Any
+ a dictionary of new parameter values.
+
+ Returns
+ -------
+ new_temp : TemplateBase
+ A new layout master object.
+ """
+ # get new parameter dictionary.
+ new_params = self.params.copy(append=kwargs)
+ return self.template_db.new_template(self.__class__, params=new_params)
+
+ def set_size_from_bound_box(self, top_layer_id: int, bbox: BBox, *, round_up: bool = False,
+ half_blk_x: bool = True, half_blk_y: bool = True):
+ """Compute the size from overall bounding box.
+
+ Parameters
+ ----------
+ top_layer_id : int
+ the top level routing layer ID that array box is calculated with.
+ bbox : BBox
+ the overall bounding box
+ round_up: bool
+ True to round up bounding box if not quantized properly
+ half_blk_x : bool
+ True to allow half-block widths.
+ half_blk_y : bool
+ True to allow half-block heights.
+ """
+ grid = self.grid
+
+ if bbox.xl != 0 or bbox.yl != 0:
+ raise ValueError('lower-left corner of overall bounding box must be (0, 0).')
+
+ if grid.size_defined(top_layer_id):
+ self.size = grid.get_size_tuple(top_layer_id, bbox.w, bbox.h, round_up=round_up,
+ half_blk_x=half_blk_x, half_blk_y=half_blk_y)
+ else:
+ self.prim_top_layer = top_layer_id
+ self.prim_bound_box = bbox
+
+ def set_size_from_array_box(self, top_layer_id: int) -> None:
+ """Automatically compute the size from array_box.
+
+ Assumes the array box is exactly in the center of the template.
+
+ Parameters
+ ----------
+ top_layer_id : int
+ the top level routing layer ID that array box is calculated with.
+ """
+ grid = self.grid
+
+ array_box = self.array_box
+ if array_box is None:
+ raise ValueError("array_box is not set")
+
+ dx = array_box.xl
+ dy = array_box.yl
+ if dx < 0 or dy < 0:
+ raise ValueError('lower-left corner of array box must be in first quadrant.')
+
+ # noinspection PyAttributeOutsideInit
+ self.size = grid.get_size_tuple(top_layer_id, 2 * dx + self.array_box.width_unit,
+ 2 * dy + self.array_box.height_unit)
+
+ def get_pin_name(self, name: str) -> str:
+ """Get the actual name of the given pin from the renaming dictionary.
+
+ Given a pin name, If this Template has a parameter called 'rename_dict',
+ return the actual pin name from the renaming dictionary.
+
+ Parameters
+ ----------
+ name : str
+ the pin name.
+
+ Returns
+ -------
+ actual_name : str
+ the renamed pin name.
+ """
+ rename_dict = self.params.get('rename_dict', {})
+ return rename_dict.get(name, name)
+
+ def get_port(self, name: str = '') -> Port:
+ """Returns the port object with the given name.
+
+ Parameters
+ ----------
+ name : str
+ the port terminal name. If None or empty, check if this template has only one port,
+ then return it.
+
+ Returns
+ -------
+ port : Port
+ the port object.
+ """
+ if not name:
+ if len(self._ports) != 1:
+ raise ValueError('Template has %d ports != 1.' % len(self._ports))
+ name = next(iter(self._ports))
+ return self._ports[name]
+
+ def has_port(self, port_name: str) -> bool:
+ """Returns True if this template has the given port."""
+ return port_name in self._ports
+
+ def port_names_iter(self) -> Iterable[str]:
+ """Iterates over port names in this template.
+
+ Yields
+ ------
+ port_name : str
+ name of a port in this template.
+ """
+ return self._ports.keys()
+
+ def new_template(self, temp_cls: Type[TemplateType], *,
+ params: Optional[Mapping[str, Any]] = None,
+ show_pins: bool = False,
+ grid: Optional[RoutingGrid] = None) -> TemplateType:
+ """Create a new template.
+
+ Parameters
+ ----------
+ temp_cls : Type[TemplateType]
+ the template class to instantiate.
+ params : Optional[Mapping[str, Any]]
+ the parameter dictionary.
+ show_pins : bool
+ True to pass show_pins in the generated template, if params does not already have
+ show_pins.
+ grid: Optional[RoutingGrid]
+ routing grid for this cell.
+
+ Returns
+ -------
+ template : TemplateType
+ the new template instance.
+ """
+ if grid is None:
+ grid = self.grid
+ show_pins = params.get('show_pins', show_pins)
+ if isinstance(params, ImmutableSortedDict):
+ params = params.copy(append=dict(grid=grid, show_pins=show_pins))
+ else:
+ params['grid'] = grid
+ params['show_pins'] = show_pins
+ return self.template_db.new_template(params=params, temp_cls=temp_cls)
+
+ def add_instance(self,
+ master: TemplateBase,
+ *,
+ inst_name: str = '',
+ xform: Optional[Transform] = None,
+ nx: int = 1,
+ ny: int = 1,
+ spx: int = 0,
+ spy: int = 0,
+ commit: bool = True,
+ ) -> PyLayInstance:
+ """Adds a new (arrayed) instance to layout.
+
+ Parameters
+ ----------
+ master : TemplateBase
+ the master template object.
+ inst_name : Optional[str]
+ instance name. If None or an instance with this name already exists,
+ a generated unique name is used.
+ xform : Optional[Transform]
+ the transformation object.
+ nx : int
+ number of columns. Must be positive integer.
+ ny : int
+ number of rows. Must be positive integer.
+ spx : CoordType
+ column pitch. Used for arraying given instance.
+ spy : CoordType
+ row pitch. Used for arraying given instance.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ inst : PyLayInstance
+ the added instance.
+ """
+ if xform is None:
+ xform = Transform()
+
+ ref = self._layout.add_instance(master.layout_cellview, inst_name, xform, nx, ny,
+ spx, spy, False)
+ ans = PyLayInstance(self, master, ref)
+ if commit:
+ ans.commit()
+
+ self._instances[ans.name] = ans
+ self._use_color = self._use_color or master.use_color
+ return ans
+
+ def add_instance_primitive(self,
+ lib_name: str,
+ cell_name: str,
+ *,
+ xform: Optional[Transform] = None,
+ view_name: str = 'layout',
+ inst_name: str = '',
+ nx: int = 1,
+ ny: int = 1,
+ spx: int = 0,
+ spy: int = 0,
+ params: Optional[Dict[str, Any]] = None,
+ commit: bool = True,
+ **kwargs: Any,
+ ) -> PyLayInstRef:
+ """Adds a new (arrayed) primitive instance to layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ instance library name.
+ cell_name : str
+ instance cell name.
+ xform : Optional[Transform]
+ the transformation object.
+ view_name : str
+ instance view name. Defaults to 'layout'.
+ inst_name : Optional[str]
+ instance name. If None or an instance with this name already exists,
+ a generated unique name is used.
+ nx : int
+ number of columns. Must be positive integer.
+ ny : int
+ number of rows. Must be positive integer.
+ spx : CoordType
+ column pitch. Used for arraying given instance.
+ spy : CoordType
+ row pitch. Used for arraying given instance.
+ params : Optional[Dict[str, Any]]
+ the parameter dictionary. Used for adding pcell instance.
+ commit : bool
+ True to commit the object immediately.
+ **kwargs : Any
+ additional arguments. Usually implementation specific.
+
+ Returns
+ -------
+ ref : PyLayInstRef
+ A reference to the primitive instance.
+ """
+ if not params:
+ params = kwargs
+ else:
+ params.update(kwargs)
+ if xform is None:
+ xform = Transform()
+
+ # TODO: support pcells
+ if params:
+ raise ValueError("layout pcells not supported yet; see developer")
+
+ return self._layout.add_prim_instance(lib_name, cell_name, view_name, inst_name, xform,
+ nx, ny, spx, spy, commit)
+
+ def is_horizontal(self, layer: str) -> bool:
+ """Returns True if the given layer has no direction or is horizontal."""
+ lay_id = self._grid.tech_info.get_layer_id(layer)
+ return (lay_id is None) or self._grid.is_horizontal(lay_id)
+
+ def add_rect(self, lay_purp: Tuple[str, str], bbox: BBox, commit: bool = True) -> PyRect:
+ """Add a new rectangle.
+
+ Parameters
+ ----------
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+ bbox : BBox
+ the rectangle bounding box.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ rect : PyRect
+ the added rectangle.
+ """
+ return self._layout.add_rect(lay_purp[0], lay_purp[1], bbox, commit)
+
+ def add_rect_array(self, lay_purp: Tuple[str, str], bbox: BBox,
+ nx: int = 1, ny: int = 1, spx: int = 0, spy: int = 0) -> None:
+ """Add a new rectangle array.
+ """
+ self._layout.add_rect_arr(lay_purp[0], lay_purp[1], bbox, nx, ny, spx, spy)
+
+ def add_bbox_array(self, lay_purp: Tuple[str, str], barr: BBoxArray) -> None:
+ """Add a new rectangle array.
+
+ Parameters
+ ----------
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+ barr : BBoxArray
+ the rectangle bounding box array.
+ """
+ self._layout.add_rect_arr(lay_purp[0], lay_purp[1], barr)
+
+ def add_bbox_collection(self, lay_purp: Tuple[str, str], bcol: BBoxCollection) -> None:
+ self._layout.add_rect_list(lay_purp[0], lay_purp[1], bcol)
+
+ def add_res_metal(self, layer_id: int, bbox: BBox) -> None:
+ """Add a new metal resistor.
+
+ Parameters
+ ----------
+ layer_id : int
+ the metal layer ID.
+ bbox : BBox
+ the resistor bounding box.
+ """
+ for lay, purp in self._grid.tech_info.get_res_metal_layers(layer_id):
+ self._layout.add_rect(lay, purp, bbox, True)
+
+ def add_path(self, lay_purp: Tuple[str, str], width: int, points: List[PointType],
+ start_style: PathStyle, *, join_style: PathStyle = PathStyle.round,
+ stop_style: Optional[PathStyle] = None, commit: bool = True) -> PyPath:
+ """Add a new path.
+
+ Parameters
+ ----------
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+ width : int
+ the path width.
+ points : List[PointType]
+ points defining this path.
+ start_style : PathStyle
+ the path beginning style.
+ join_style : PathStyle
+ path style for the joints.
+ stop_style : Optional[PathStyle]
+ the path ending style. Defaults to start style.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ path : PyPath
+ the added path object.
+ """
+ if stop_style is None:
+ stop_style = start_style
+ half_width = width // 2
+ return self._layout.add_path(lay_purp[0], lay_purp[1], points, half_width, start_style,
+ stop_style, join_style, commit)
+
+ def add_path45_bus(self, lay_purp: Tuple[str, str], points: List[PointType], widths: List[int],
+ spaces: List[int], start_style: PathStyle, *,
+ join_style: PathStyle = PathStyle.round,
+ stop_style: Optional[PathStyle] = None, commit: bool = True) -> PyPath:
+ """Add a path bus that only contains 45 degree turns.
+
+ Parameters
+ ----------
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+ points : List[PointType]
+ points defining this path.
+ widths : List[int]
+ width of each path in the bus.
+ spaces : List[int]
+ space between each path.
+ start_style : PathStyle
+ the path beginning style.
+ join_style : PathStyle
+ path style for the joints.
+ stop_style : Optional[PathStyle]
+ the path ending style. Defaults to start style.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ path : PyPath
+ the added path object.
+ """
+ if stop_style is None:
+ stop_style = start_style
+ return self._layout.add_path45_bus(lay_purp[0], lay_purp[1], points, widths, spaces,
+ start_style, stop_style, join_style, commit)
+
+ def add_polygon(self, lay_purp: Tuple[str, str], points: List[PointType],
+ commit: bool = True) -> PyPolygon:
+ """Add a new polygon.
+
+ Parameters
+ ----------
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+ points : List[PointType]
+ vertices of the polygon.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ polygon : PyPolygon
+ the added polygon object.
+ """
+ return self._layout.add_poly(lay_purp[0], lay_purp[1], points, commit)
+
+ def add_blockage(self, layer: str, blk_type: BlockageType, points: List[PointType],
+ commit: bool = True) -> PyBlockage:
+ """Add a new blockage object.
+
+ Parameters
+ ----------
+ layer : str
+ the layer name.
+ blk_type : BlockageType
+ the blockage type.
+ points : List[PointType]
+ vertices of the blockage object.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ blockage : PyBlockage
+ the added blockage object.
+ """
+ return self._layout.add_blockage(layer, blk_type, points, commit)
+
+ def add_boundary(self, bnd_type: BoundaryType, points: List[PointType],
+ commit: bool = True) -> PyBoundary:
+ """Add a new boundary.
+
+ Parameters
+ ----------
+ bnd_type : str
+ the boundary type.
+ points : List[PointType]
+ vertices of the boundary object.
+ commit : bool
+ True to commit the object immediately.
+
+ Returns
+ -------
+ boundary : PyBoundary
+ the added boundary object.
+ """
+ return self._layout.add_boundary(bnd_type, points, commit)
+
+ def add_cell_boundary(self, bbox: BBox) -> None:
+ """Adds cell boundary in this template.
+
+ By default, this method is called when finalizing a template (although the process
+ implementation may override this behavior) to set the cell boundary, which is generally
+ used for DRC or P&R purposes.
+
+ This method can only be called once from the template. All calls after the first one will
+ be ignored. Therefore, if you need to set the cell boundary to be something other than
+ the template's bounding box, you can call this in the draw_layout() method.
+
+ Parameters
+ ----------
+ bbox : BBox
+ the cell boundary bounding box.
+ """
+ if not self._cell_boundary_added:
+ self._cell_boundary_added = True
+ self.grid.tech_info.add_cell_boundary(self, bbox)
+
+ def disable_cell_boundary(self) -> None:
+ """Disable cell boundary drawing in this template."""
+ self._cell_boundary_added = True
+
+ def reexport(self, port: Port, *,
+ net_name: str = '', label: str = '', show: Optional[bool] = None,
+ hide: Optional[bool] = None, connect: bool = False) -> None:
+ """Re-export the given port object.
+
+ Add all geometries in the given port as pins with optional new name
+ and label.
+
+ Parameters
+ ----------
+ port : Port
+ the Port object to re-export.
+ net_name : str
+ the new net name. If not given, use the port's current net name.
+ label : str
+ the label. If not given, use net_name.
+ show : Optional[bool]
+ True to draw the pin in layout. If None, use self.show_pins
+ hide: Optional[bool]
+ if given, it overrides the hide flag of the port, otherwise the default is used.
+ connect : bool
+ True to enable connection by name.
+ """
+ if show is None:
+ show = self._show_pins
+
+ net_name = net_name or port.net_name
+ if not label:
+ if net_name != port.net_name:
+ if port.label.endswith(':'):
+ # inherit connect setting of the port
+ label = net_name + ':'
+ else:
+ label = net_name
+ else:
+ label = port.label
+ if connect and label[-1] != ':':
+ label += ':'
+
+ if hide is None:
+ hide = port.hidden
+
+ show = show and not hide
+ if net_name not in self._port_params:
+ self._port_params[net_name] = dict(label=label, pins={}, show=show, hide=hide)
+
+ port_params = self._port_params[net_name]
+ # check labels is consistent.
+ if port_params['label'] != label:
+ msg = 'Current port label = %s != specified label = %s'
+ raise ValueError(msg % (port_params['label'], label))
+ if port_params['show'] != show:
+ raise ValueError('Conflicting show port specification.')
+ if port_params['hide'] != hide:
+ raise ValueError('Conflicting hide port specification.')
+
+ # export all port geometries
+ port_pins = port_params['pins']
+ for lay, geo_list in port.items():
+ cur_geo_list = port_pins.get(lay, None)
+ if cur_geo_list is None:
+ port_pins[lay] = cur_geo_list = []
+ cur_geo_list.extend(geo_list)
+
+ def add_pin_primitive(self, net_name: str, layer: str, bbox: BBox, *,
+ label: str = '', show: bool = True, hide: bool = False,
+ connect: bool = False):
+ """Add a primitive pin to the layout.
+
+ Parameters
+ ----------
+ net_name : str
+ the net name associated with the pin.
+ layer : str
+ the pin layer name.
+ bbox : BBox
+ the pin bounding box.
+ label : str
+ the label of this pin. If None or empty, defaults to be the net_name.
+ this argument is used if you need the label to be different than net name
+ for LVS purposes. For example, unconnected pins usually need a colon after
+ the name to indicate that LVS should short those pins together.
+ show : bool
+ True to draw the pin in layout.
+ hide : bool
+ True to add a hidden pin.
+ connect : bool
+ True to enable connection by name.
+ """
+ if show is None:
+ show = self._show_pins
+ show = show and not hide
+
+ label = label or net_name
+ if connect and label[-1] != ':':
+ label += ':'
+
+ port_params = self._port_params.get(net_name, None)
+ if port_params is None:
+ self._port_params[net_name] = port_params = dict(label=label, pins={},
+ show=show, hide=hide)
+ else:
+ # check labels is consistent.
+ if port_params['label'] != label:
+ msg = 'Current port label = %s != specified label = %s'
+ raise ValueError(msg % (port_params['label'], label))
+ if port_params['show'] != show:
+ raise ValueError('Conflicting show port specification.')
+ if port_params['hide'] != hide:
+ raise ValueError('Conflicting hide port specification.')
+
+ port_pins = port_params['pins']
+
+ if layer in port_pins:
+ port_pins[layer].append(bbox)
+ else:
+ port_pins[layer] = [bbox]
+
+ def add_label(self, label: str, lay_purp: Tuple[str, str], bbox: BBox) -> None:
+ """Adds a label to the layout.
+
+ This is mainly used to add voltage text labels.
+
+ Parameters
+ ----------
+ label : str
+ the label text.
+ lay_purp: Tuple[str, str]
+ the layer/purpose pair.
+ bbox : BBox
+ the label bounding box.
+ """
+ w = bbox.w
+ text_h = bbox.h
+ if text_h > w:
+ orient = Orientation.R90
+ text_h = w
+ else:
+ orient = Orientation.R0
+ xform = Transform(bbox.xm, bbox.ym, orient)
+ self._layout.add_label(lay_purp[0], lay_purp[1], xform, label, text_h)
+
+ def add_pin(self, net_name: str, wire_arr_list: Union[WireArray, List[WireArray]],
+ *, label: str = '', show: Optional[bool] = None,
+ mode: PinMode = PinMode.ALL, hide: bool = False, connect: bool = False) -> None:
+ """Add new pin to the layout.
+
+ If one or more pins with the same net name already exists,
+ they'll be grouped under the same port.
+
+ Parameters
+ ----------
+ net_name : str
+ the net name associated with the pin.
+ wire_arr_list : Union[WireArray, List[WireArray]]
+ WireArrays representing the pin geometry.
+ label : str
+ the label of this pin. If None or empty, defaults to be the net_name.
+ this argument is used if you need the label to be different than net name
+ for LVS purposes. For example, unconnected pins usually need a colon after
+ the name to indicate that LVS should short those pins together.
+ show : Optional[bool]
+ if True, draw the pin in layout. If None, use self.show_pins
+ mode : PinMode
+ location of the pin relative to the WireArray.
+ hide : bool
+ True if this is a hidden pin.
+ connect : bool
+ True to enable connection by name.
+ """
+ if show is None:
+ show = self._show_pins
+ show = show and not hide
+
+ label = label or net_name
+ if connect and label[-1] != ':':
+ label += ':'
+
+ port_params = self._port_params.get(net_name, None)
+ if port_params is None:
+ self._port_params[net_name] = port_params = dict(label=label, pins={},
+ show=show, hide=hide)
+ else:
+ # check labels is consistent.
+ if port_params['label'] != label:
+ msg = 'Current port label = %s != specified label = %s'
+ raise ValueError(msg % (port_params['label'], label))
+ if port_params['show'] != show:
+ raise ValueError('Conflicting show port specification.')
+ if port_params['hide'] != hide:
+ raise ValueError('Conflicting hide port specification.')
+
+ grid = self._grid
+ for warr in WireArray.wire_grp_iter(wire_arr_list):
+ # add pin array to port_pins
+ tid = warr.track_id.copy_with(grid)
+ layer_id = tid.layer_id
+ if mode is not PinMode.ALL:
+ # create new pin WireArray that's snapped to the edge
+ cur_w = grid.get_wire_total_width(layer_id, tid.width)
+ wl = warr.lower
+ wu = warr.upper
+ pin_len = min(grid.get_next_length(layer_id, tid.width, cur_w, even=True),
+ wu - wl)
+ if mode is PinMode.LOWER:
+ wu = wl + pin_len
+ elif mode is PinMode.UPPER:
+ wl = wu - pin_len
+ else:
+ wl = (wl + wu - pin_len) // 2
+ wu = wl + pin_len
+ warr = WireArray(tid, wl, wu)
+
+ port_pins = port_params['pins']
+ if layer_id not in port_pins:
+ port_pins[layer_id] = [warr]
+ else:
+ port_pins[layer_id].append(warr)
+
+ self._use_color = True
+
+ def add_via(self, bbox: BBox, bot_lay_purp: Tuple[str, str], top_lay_purp: Tuple[str, str],
+ bot_dir: Orient2D, *, extend: bool = True, top_dir: Optional[Orient2D] = None,
+ add_layers: bool = False, commit: bool = True) -> PyVia:
+ """Adds an arrayed via object to the layout.
+
+ Parameters
+ ----------
+ bbox : BBox
+ the via bounding box, not including extensions.
+ bot_lay_purp : Tuple[str. str]
+ the bottom layer/purpose pair.
+ top_lay_purp : Tuple[str, str]
+ the top layer/purpose pair.
+ bot_dir : Orient2D
+ the bottom layer extension direction.
+ extend : bool
+ True if via extension can be drawn outside of the box.
+ top_dir : Optional[Orient2D]
+ top layer extension direction. Defaults to be perpendicular to bottom layer direction.
+ add_layers : bool
+ True to add metal rectangles on top and bottom layers.
+ commit : bool
+ True to commit via immediately.
+
+ Returns
+ -------
+ via : PyVia
+ the new via object.
+ """
+ tech_info = self._grid.tech_info
+ via_info = tech_info.get_via_info(bbox, Direction.LOWER, bot_lay_purp[0],
+ top_lay_purp[0],
+ bot_dir, purpose=bot_lay_purp[1],
+ adj_purpose=top_lay_purp[1],
+ extend=extend, adj_ex_dir=top_dir)
+
+ if via_info is None:
+ raise ValueError('Cannot create via between layers {} and {} '
+ 'with BBox: {}'.format(bot_lay_purp, top_lay_purp, bbox))
+
+ table = via_info['params']
+ via_id = table['id']
+ xform = table['xform']
+ via_param = table['via_param']
+
+ return self._layout.add_via(xform, via_id, via_param, add_layers, commit)
+
+ def add_via_arr(self, barr: BBoxArray, bot_lay_purp: Tuple[str, str],
+ top_lay_purp: Tuple[str, str], bot_dir: Orient2D, *, extend: bool = True,
+ top_dir: Optional[Orient2D] = None, add_layers: bool = False) -> Dict[str, Any]:
+ """Adds an arrayed via object to the layout.
+
+ Parameters
+ ----------
+ barr : BBoxArray
+ the BBoxArray representing the via bounding boxes, not including extensions.
+ bot_lay_purp : Tuple[str. str]
+ the bottom layer/purpose pair.
+ top_lay_purp : Tuple[str, str]
+ the top layer/purpose pair.
+ bot_dir : Orient2D
+ the bottom layer extension direction.
+ extend : bool
+ True if via extension can be drawn outside of the box.
+ top_dir : Optional[Orient2D]
+ top layer extension direction. Defaults to be perpendicular to bottom layer direction.
+ add_layers : bool
+ True to add metal rectangles on top and bottom layers.
+
+ Returns
+ -------
+ via_info : Dict[str, Any]
+ the via information dictionary.
+ """
+ tech_info = self._grid.tech_info
+ base_box = barr.base
+ via_info = tech_info.get_via_info(base_box, Direction.LOWER, bot_lay_purp[0],
+ top_lay_purp[0], bot_dir, purpose=bot_lay_purp[1],
+ adj_purpose=top_lay_purp[1], extend=extend,
+ adj_ex_dir=top_dir)
+
+ if via_info is None:
+ raise ValueError('Cannot create via between layers {} and {} '
+ 'with BBox: {}'.format(bot_lay_purp, top_lay_purp, base_box))
+
+ table = via_info['params']
+ via_id = table['id']
+ xform = table['xform']
+ via_param = table['via_param']
+
+ self._layout.add_via_arr(xform, via_id, via_param, add_layers, barr.nx, barr.ny,
+ barr.spx, barr.spy)
+
+ return via_info
+
+ def add_via_primitive(self, via_type: str, xform: Transform, cut_width: int, cut_height: int,
+ *, num_rows: int = 1, num_cols: int = 1, sp_rows: int = 0,
+ sp_cols: int = 0, enc1: Tuple[int, int, int, int] = (0, 0, 0, 0),
+ enc2: Tuple[int, int, int, int] = (0, 0, 0, 0), nx: int = 1, ny: int = 1,
+ spx: int = 0, spy: int = 0) -> None:
+ """Adds via(s) by specifying all parameters.
+
+ Parameters
+ ----------
+ via_type : str
+ the via type name.
+ xform: Transform
+ the transformation object.
+ cut_width : CoordType
+ via cut width. This is used to create rectangle via.
+ cut_height : CoordType
+ via cut height. This is used to create rectangle via.
+ num_rows : int
+ number of via cut rows.
+ num_cols : int
+ number of via cut columns.
+ sp_rows : CoordType
+ spacing between via cut rows.
+ sp_cols : CoordType
+ spacing between via cut columns.
+ enc1 : Optional[List[CoordType]]
+ a list of left, right, top, and bottom enclosure values on bottom layer.
+ Defaults to all 0.
+ enc2 : Optional[List[CoordType]]
+ a list of left, right, top, and bottom enclosure values on top layer.
+ Defaults to all 0.
+ nx : int
+ number of columns.
+ ny : int
+ number of rows.
+ spx : int
+ column pitch.
+ spy : int
+ row pitch.
+ """
+ l1, r1, t1, b1 = enc1
+ l2, r2, t2, b2 = enc2
+ param = ViaParam(num_cols, num_rows, cut_width, cut_height, sp_cols, sp_rows,
+ l1, r1, t1, b1, l2, r2, t2, b2)
+ self._layout.add_via_arr(xform, via_type, param, True, nx, ny, spx, spy)
+
+ def add_via_on_grid(self, tid1: TrackID, tid2: TrackID, *, extend: bool = True
+ ) -> Tuple[Tuple[int, int], Tuple[int, int]]:
+ """Add a via on the routing grid.
+
+ Parameters
+ ----------
+ tid1 : TrackID
+ the first TrackID
+ tid2 : TrackID
+ the second TrackID
+ extend : bool
+ True to extend outside the via bounding box.
+ """
+ return self._layout.add_via_on_intersections(tid1, tid2, COORD_MIN, COORD_MAX,
+ COORD_MIN, COORD_MAX, extend, False)
+
+ def extend_wires(self, warr_list: Union[WireArray, List[Optional[WireArray]]], *,
+ lower: Optional[int] = None, upper: Optional[int] = None,
+ min_len_mode: Optional[int] = None) -> List[Optional[WireArray]]:
+ """Extend the given wires to the given coordinates.
+
+ Parameters
+ ----------
+ warr_list : Union[WireArray, List[Optional[WireArray]]]
+ the wires to extend.
+ lower : Optional[int]
+ the wire lower coordinate.
+ upper : Optional[int]
+ the wire upper coordinate.
+ min_len_mode : Optional[int]
+ If not None, will extend track so it satisfy minimum length requirement.
+ Use -1 to extend lower bound, 1 to extend upper bound, 0 to extend both equally.
+
+ Returns
+ -------
+ warr_list : List[Optional[WireArray]]
+ list of added wire arrays.
+ If any elements in warr_list were None, they will be None in the return.
+ """
+ grid = self._grid
+
+ new_warr_list = []
+ for warr in WireArray.wire_grp_iter(warr_list):
+ if warr is None:
+ new_warr_list.append(None)
+ else:
+ tid = warr.track_id.copy_with(grid)
+ wlower = warr.lower
+ wupper = warr.upper
+ if lower is None:
+ cur_lower = wlower
+ else:
+ cur_lower = min(lower, wlower)
+ if upper is None:
+ cur_upper = wupper
+ else:
+ cur_upper = max(upper, wupper)
+ if min_len_mode is not None:
+ # extend track to meet minimum length
+ # make sure minimum length is even so that middle coordinate exists
+ tr_len = cur_upper - cur_lower
+ next_len = grid.get_next_length(tid.layer_id, tid.width, tr_len, even=True)
+ if next_len > tr_len:
+ ext = next_len - tr_len
+ if min_len_mode < 0:
+ cur_lower -= ext
+ elif min_len_mode > 0:
+ cur_upper += ext
+ else:
+ cur_lower -= ext // 2
+ cur_upper = cur_lower + next_len
+
+ new_warr = WireArray(tid, cur_lower, cur_upper)
+ self._layout.add_warr(tid, cur_lower, cur_upper)
+ new_warr_list.append(new_warr)
+
+ self._use_color = True
+ return new_warr_list
+
+ def add_wires(self, layer_id: int, track_idx: TrackType, lower: int, upper: int, *,
+ width: int = 1, num: int = 1, pitch: TrackType = 1) -> WireArray:
+ """Add the given wire(s) to this layout.
+
+ Parameters
+ ----------
+ layer_id : int
+ the wire layer ID.
+ track_idx : TrackType
+ the smallest wire track index.
+ lower : CoordType
+ the wire lower coordinate.
+ upper : CoordType
+ the wire upper coordinate.
+ width : int
+ the wire width in number of tracks.
+ num : int
+ number of wires.
+ pitch : TrackType
+ the wire pitch.
+
+ Returns
+ -------
+ warr : WireArray
+ the added WireArray object.
+ """
+ tid = TrackID(layer_id, track_idx, width=width, num=num, pitch=pitch, grid=self._grid)
+ warr = WireArray(tid, lower, upper)
+ self._layout.add_warr(tid, lower, upper)
+ self._use_color = True
+ return warr
+
+ def add_matched_wire(self, warr: WireArray, coord: int, layer_id: int) -> WireArray:
+ """Adds a wire (without any via), matched to a provided wire array.
+ The mirroring takes place with respect to a coordinate and the track direction on the
+ layer of the coordinate
+
+ Parameters
+ ----------
+ warr : WireArray
+ the original wire array for which a matched wire should be drawn
+ coord : int
+ the coordinate which is used for mirroring
+ layer_id : int
+ the layer_id of the coordinate. this argument is used to figure out the axis around
+ which things should be mirrored
+
+ Returns
+ -------
+ warr : WireArray
+ the added WireArray object.
+ """
+ grid = self._grid
+ tid = warr.track_id
+ wire_layer = warr.layer_id
+ wire_dir = grid.get_direction(wire_layer)
+ ref_dir = grid.get_direction(layer_id)
+
+ if wire_dir is not ref_dir:
+ # if wire and reference have different orientation
+ new_lower = 2 * coord - warr.upper
+ new_upper = 2 * coord - warr.lower
+ self.add_wires(wire_layer, tid.base_index, new_lower, new_upper)
+ return WireArray(tid, new_lower, new_upper)
+
+ coord_tidx = grid.coord_to_track(layer_id, coord)
+ new_tidx = 2 * coord_tidx - tid.base_index
+ new_tid = TrackID(wire_layer, new_tidx, width=tid.width, num=tid.num,
+ pitch=tid.pitch, grid=grid)
+ self.add_wires(wire_layer, new_tidx, warr.lower, warr.upper)
+ return WireArray(new_tid, warr.lower, warr.upper)
+
+ def connect_to_tracks_with_dummy_wires(self,
+ wire_arr_list: Union[WireArray, List[WireArray]],
+ track_id: TrackID,
+ ref_coord: int,
+ ref_layer_id: int,
+ *,
+ wire_lower: Optional[int] = None,
+ wire_upper: Optional[int] = None,
+ track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None,
+ min_len_mode: MinLenMode = None,
+ ret_wire_list: Optional[List[WireArray]] = None,
+ debug: bool = False) -> Optional[WireArray]:
+ """Implements connect_to_tracks but with matched wires drawn simultaneously
+ Parameters
+ ----------
+ wire_arr_list : Union[WireArray, List[WireArray]]
+ list of WireArrays to connect to track.
+ track_id : TrackID
+ TrackID that specifies the track(s) to connect the given wires to.
+ ref_coord: int
+ the coordinate which is used for mirroring
+ ref_layer_id: int
+ the layer_id of the coordinate. this argument is used to figure out the axis around
+ which things should be mirrored
+ wire_lower : Optional[CoordType]
+ if given, extend wire(s) to this lower coordinate.
+ wire_upper : Optional[CoordType]
+ if given, extend wire(s) to this upper coordinate.
+ track_lower : Optional[CoordType]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[CoordType]
+ if given, extend track(s) to this upper coordinate.
+ min_len_mode : MinLenMode
+ the minimum length extension mode.
+ ret_wire_list : Optional[List[WireArray]]
+ If not none, extended wires that are created will be appended to this list.
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ wire_arr : Optional[WireArray]
+ WireArray representing the tracks created.
+ """
+ if ret_wire_list is None:
+ ret_wire_list = []
+ track_warr = self.connect_to_tracks(wire_arr_list,
+ track_id,
+ wire_lower=wire_lower,
+ wire_upper=wire_upper,
+ track_lower=track_lower,
+ track_upper=track_upper,
+ min_len_mode=min_len_mode,
+ ret_wire_list=ret_wire_list,
+ debug=debug)
+ self.add_matched_wire(track_warr, ref_coord, ref_layer_id)
+ for warr in ret_wire_list:
+ self.add_matched_wire(warr, ref_coord, ref_layer_id)
+
+ return track_warr
+
+ def connect_wire_to_coord(self, wire: WireArray, layer_id: int, coord: int,
+ min_len_mode: MinLenMode = MinLenMode.NONE,
+ round_mode: RoundMode = RoundMode.NONE) -> WireArray:
+ """ Connects a given wire to a wire on the next/previous layer aligned with a given
+ coordinate.
+
+ Parameters
+ ----------
+ wire : WireArray
+ wire object to be connected.
+ layer_id: int
+ the wire layer ID.
+ coord : CoordType
+ the coordinate to be used for alignment.
+ min_len_mode : MinLenMode
+ the minimum length extension mode used in connect_to_tracks.
+ round_mode: RoundMode
+ the rounding mode used in coord_to_track conversion.
+
+ Returns
+ -------
+ warr : WireArray
+ the added WireArray object.
+ """
+ if layer_id not in [wire.layer_id + 1, wire.layer_id - 1]:
+ raise ValueError(f'cannot connect wire of layer {wire.layer_id} to layer {layer_id}')
+ tidx = self.grid.coord_to_track(layer_id, coord, round_mode)
+ tid = TrackID(layer_id, tidx, width=wire.track_id.width)
+ warr = self.connect_to_tracks(wire, tid, min_len_mode=min_len_mode)
+ return warr
+
+ def add_res_metal_warr(self, layer_id: int, track_idx: TrackType, lower: int, upper: int,
+ **kwargs: Any) -> Param:
+ """Add metal resistor as WireArray to this layout.
+
+ Parameters
+ ----------
+ layer_id : int
+ the wire layer ID.
+ track_idx : TrackType
+ the smallest wire track index.
+ lower : CoordType
+ the wire lower coordinate.
+ upper : CoordType
+ the wire upper coordinate.
+ **kwargs :
+ optional arguments to add_wires()
+
+ Returns
+ -------
+ sch_params : Dict[str, Any]
+ the metal resistor schematic parameters dictionary.
+ """
+ warr = self.add_wires(layer_id, track_idx, lower, upper, **kwargs)
+
+ wdir = self.grid.get_direction(layer_id)
+ npar = 0
+ w = 0
+ for _, _, box in warr.wire_iter(self._tr_colors):
+ self.add_res_metal(layer_id, box)
+ npar += 1
+ if w == 0:
+ w = box.get_dim(wdir.perpendicular().value)
+
+ ans = dict(
+ w=w,
+ l=upper - lower,
+ layer=layer_id,
+ npar=npar,
+ )
+ return ImmutableSortedDict(ans)
+
+ def add_mom_cap(self, cap_box: BBox, bot_layer: int, num_layer: int, *,
+ port_widths: Optional[Mapping[int, int]] = None,
+ port_plow: Optional[Mapping[int, bool]] = None,
+ array: bool = False,
+ cap_wires_list: Optional[List[Tuple[Tuple[str, str], Tuple[str, str],
+ BBoxArray, BBoxArray]]] = None,
+ cap_type: str = 'standard'
+ ) -> Dict[int, Tuple[List[WireArray], List[WireArray]]]:
+ """Draw mom cap in the defined bounding box."""
+
+ empty_dict = {}
+ if num_layer <= 1:
+ raise ValueError('Must have at least 2 layers for MOM cap.')
+ if port_widths is None:
+ port_widths = empty_dict
+ if port_plow is None:
+ port_plow = empty_dict
+
+ grid = self.grid
+ tech_info = grid.tech_info
+
+ top_layer = bot_layer + num_layer - 1
+ cap_info = MOMCapInfo(tech_info.tech_params['mom_cap'][cap_type], port_widths, port_plow)
+ via_ext_dict = get_cap_via_extensions(cap_info, grid, bot_layer, top_layer)
+
+ # find port locations and cap boundaries.
+ port_tracks: Dict[int, Tuple[List[int], List[int]]] = {}
+ cap_bounds: Dict[int, Tuple[int, int]] = {}
+ cap_exts: Dict[int, Tuple[int, int]] = {}
+ for cur_layer in range(bot_layer, top_layer + 1):
+ cap_w, cap_sp, cap_margin, num_ports = cap_info.get_cap_specs(cur_layer)
+ port_tr_w = cap_info.get_port_tr_w(cur_layer)
+ port_tr_sep = grid.get_sep_tracks(cur_layer, port_tr_w, port_tr_w)
+
+ dir_idx = grid.get_direction(cur_layer).value
+ coord0, coord1 = cap_box.get_interval(1 - dir_idx)
+ # get max via extension on adjacent layers
+ adj_via_ext = 0
+ if cur_layer != bot_layer:
+ adj_via_ext = via_ext_dict[cur_layer - 1]
+ if cur_layer != top_layer:
+ adj_via_ext = max(adj_via_ext, via_ext_dict[cur_layer + 1])
+ # find track indices
+ if array:
+ tidx0 = grid.coord_to_track(cur_layer, coord0)
+ tidx1 = grid.coord_to_track(cur_layer, coord1)
+ else:
+ tidx0 = grid.find_next_track(cur_layer, coord0 + adj_via_ext, tr_width=port_tr_w,
+ mode=RoundMode.GREATER_EQ)
+ tidx1 = grid.find_next_track(cur_layer, coord1 - adj_via_ext, tr_width=port_tr_w,
+ mode=RoundMode.LESS_EQ)
+
+ if tidx0 + 2 * num_ports * port_tr_sep >= tidx1:
+ raise ValueError('Cannot draw MOM cap; '
+ f'not enough space between ports on layer {cur_layer}.')
+
+ # compute space from MOM cap wires to port wires
+ cap_margin = max(cap_margin, grid.get_min_space(cur_layer, port_tr_w))
+ lower_tracks = [tidx0 + idx * port_tr_sep for idx in range(num_ports)]
+ upper_tracks = [tidx1 - idx * port_tr_sep for idx in range(num_ports - 1, -1, -1)]
+
+ tr_ll = grid.get_wire_bounds(cur_layer, lower_tracks[0], width=port_tr_w)[0]
+ tr_lu = grid.get_wire_bounds(cur_layer, lower_tracks[num_ports - 1], width=port_tr_w)[1]
+ tr_ul = grid.get_wire_bounds(cur_layer, upper_tracks[0], width=port_tr_w)[0]
+ tr_uu = grid.get_wire_bounds(cur_layer, upper_tracks[num_ports - 1], width=port_tr_w)[1]
+ port_tracks[cur_layer] = (lower_tracks, upper_tracks)
+ cap_bounds[cur_layer] = (tr_lu + cap_margin, tr_ul - cap_margin)
+ cap_exts[cur_layer] = (tr_ll, tr_uu)
+
+ port_dict: Dict[int, Tuple[List[WireArray], List[WireArray]]] = {}
+ cap_wire_dict: Dict[int, Tuple[Tuple[str, str], Tuple[str, str], BBoxArray, BBoxArray]] = {}
+ # draw ports/wires
+ for cur_layer in range(bot_layer, top_layer + 1):
+ port_plow = cap_info.get_port_plow(cur_layer)
+ port_tr_w = cap_info.get_port_tr_w(cur_layer)
+ cap_w, cap_sp, cap_margin, num_ports = cap_info.get_cap_specs(cur_layer)
+
+ # find port/cap wires lower/upper coordinates
+ lower = COORD_MAX
+ upper = COORD_MIN
+ if cur_layer != top_layer:
+ lower, upper = cap_exts[cur_layer + 1]
+ if cur_layer != bot_layer:
+ tmpl, tmpu = cap_exts[cur_layer - 1]
+ lower = min(lower, tmpl)
+ upper = max(upper, tmpu)
+
+ via_ext = via_ext_dict[cur_layer]
+ lower -= via_ext
+ upper += via_ext
+
+ # draw ports
+ lower_tracks, upper_tracks = port_tracks[cur_layer]
+ lower_warrs = [self.add_wires(cur_layer, tr_idx, lower, upper, width=port_tr_w)
+ for tr_idx in lower_tracks]
+ upper_warrs = [self.add_wires(cur_layer, tr_idx, lower, upper, width=port_tr_w)
+ for tr_idx in upper_tracks]
+
+ # assign port wires to positive/negative terminals
+ num_ports = len(lower_warrs)
+ if port_plow:
+ if num_ports == 1:
+ plist = lower_warrs
+ nlist = upper_warrs
+ else:
+ plist = [lower_warrs[0], upper_warrs[0]]
+ nlist = [lower_warrs[1], upper_warrs[1]]
+ else:
+ if num_ports == 1:
+ plist = upper_warrs
+ nlist = lower_warrs
+ else:
+ plist = [lower_warrs[1], upper_warrs[1]]
+ nlist = [lower_warrs[0], upper_warrs[0]]
+
+ # save ports
+ port_dict[cur_layer] = plist, nlist
+
+ # compute cap wires BBoxArray
+ cap_bndl, cap_bndh = cap_bounds[cur_layer]
+ cap_tot_space = cap_bndh - cap_bndl
+ cap_pitch = cap_w + cap_sp
+ num_cap_wires = cap_tot_space // cap_pitch
+ cap_bndl += (cap_tot_space - (num_cap_wires * cap_pitch - cap_sp)) // 2
+
+ cur_dir = grid.get_direction(cur_layer)
+ cap_box0 = BBox(cur_dir, lower, upper, cap_bndl, cap_bndl + cap_w)
+ lay_purp_list = tech_info.get_lay_purp_list(cur_layer)
+ num_lay_purp = len(lay_purp_list)
+ assert num_lay_purp <= 2, 'This method now only works for 1 or 2 colors.'
+ num0 = (num_cap_wires + 1) // 2
+ num1 = num_cap_wires - num0
+ barr_pitch = cap_pitch * 2
+ cap_box1 = cap_box0.get_move_by_orient(cur_dir, dt=0, dp=cap_pitch)
+ barr0 = BBoxArray(cap_box0, cur_dir, np=num0, spp=barr_pitch)
+ barr1 = BBoxArray(cap_box1, cur_dir, np=num1, spp=barr_pitch)
+ if port_plow:
+ capp_barr = barr1
+ capn_barr = barr0
+ capp_lp = lay_purp_list[-1]
+ capn_lp = lay_purp_list[0]
+ else:
+ capp_barr = barr0
+ capn_barr = barr1
+ capp_lp = lay_purp_list[0]
+ capn_lp = lay_purp_list[-1]
+
+ # draw cap wires
+ self.add_bbox_array(capp_lp, capp_barr)
+ self.add_bbox_array(capn_lp, capn_barr)
+ # save caps
+ cap_barr_tuple = (capp_lp, capn_lp, capp_barr, capn_barr)
+ cap_wire_dict[cur_layer] = cap_barr_tuple
+ if cap_wires_list is not None:
+ cap_wires_list.append(cap_barr_tuple)
+
+ # connect port/cap wires to bottom port/cap
+ if cur_layer != bot_layer:
+ # connect ports to layer below
+ bplist, bnlist = port_dict[cur_layer - 1]
+ bcapp_lp, bcapn_lp, bcapp, bcapn = cap_wire_dict[cur_layer - 1]
+ self._add_mom_cap_connect_ports(bplist, plist)
+ self._add_mom_cap_connect_ports(bnlist, nlist)
+ self._add_mom_cap_connect_cap_to_port(Direction.UPPER, capp_lp, capp_barr, bplist)
+ self._add_mom_cap_connect_cap_to_port(Direction.UPPER, capn_lp, capn_barr, bnlist)
+ self._add_mom_cap_connect_cap_to_port(Direction.LOWER, bcapp_lp, bcapp, plist)
+ self._add_mom_cap_connect_cap_to_port(Direction.LOWER, bcapn_lp, bcapn, nlist)
+
+ return port_dict
+
+ def _add_mom_cap_connect_cap_to_port(self, cap_dir: Direction, cap_lp: Tuple[str, str],
+ barr: BBoxArray, ports: List[WireArray]) -> None:
+ num_ports = len(ports)
+ if num_ports == 1:
+ self.connect_bbox_to_tracks(cap_dir, cap_lp, barr, ports[0].track_id)
+ else:
+ port_dir = self.grid.get_direction(ports[0].layer_id)
+ for idx, warr in enumerate(ports):
+ new_barr = barr.get_sub_array(port_dir, num_ports, idx)
+ self.connect_bbox_to_tracks(cap_dir, cap_lp, new_barr, warr.track_id)
+
+ def _add_mom_cap_connect_ports(self, bot_ports: List[WireArray], top_ports: List[WireArray]
+ ) -> None:
+ for bot_warr, top_warr in product(bot_ports, top_ports):
+ self.add_via_on_grid(bot_warr.track_id, top_warr.track_id, extend=True)
+
+ def reserve_tracks(self, layer_id: int, track_idx: TrackType, *,
+ width: int = 1, num: int = 1, pitch: int = 0) -> None:
+ """Reserve the given routing tracks so that power fill will not fill these tracks.
+
+ Note: the size of this template should be set before calling this method.
+
+ Parameters
+ ----------
+ layer_id : int
+ the wire layer ID.
+ track_idx : TrackType
+ the smallest wire track index.
+ width : int
+ the wire width in number of tracks.
+ num : int
+ number of wires.
+ pitch : TrackType
+ the wire pitch.
+ """
+ # TODO: fix this method
+ raise ValueError('Not implemented yet.')
+
+ def get_available_tracks(self, layer_id: int, tid_lo: TrackType, tid_hi: TrackType,
+ lower: int, upper: int, width: int = 1, sep: HalfInt = HalfInt(1),
+ include_last: bool = False, sep_margin: Optional[HalfInt] = None
+ ) -> List[HalfInt]:
+ """Returns a list of available tracks between the given bounds.
+
+ Parameters
+ ----------
+ layer_id : int
+ the layer ID.
+ tid_lo : TrackType
+ the lower track index, inclusive.
+ tid_hi : TrackType
+ the upper track index, exclusive by default.
+ lower : int
+ the lower wire coordinate.
+ upper: int
+ the upper wire coordinate.
+ width : int
+ the track width.
+ sep : HalfInt
+ the track separation
+ include_last : bool
+ True to make "upper" inclusive.
+ sep_margin : Optional[HalfInt]
+ the margin between available tracks and surrounding wires, in number of tracks.
+
+ Returns
+ -------
+ tidx_list : List[HalfInt]
+ list of available tracks.
+ """
+ grid = self.grid
+
+ orient = grid.get_direction(layer_id)
+ tr_info = grid.get_track_info(layer_id)
+ if sep_margin is None:
+ sep_margin = grid.get_sep_tracks(layer_id, width, 1, same_color=False)
+ bl, bu = grid.get_wire_bounds_htr(layer_id, 0, width)
+ tr_w2 = (bu - bl) // 2
+ margin = tr_info.pitch * sep_margin - (tr_info.width // 2) - tr_w2
+
+ sp_list = [0, 0]
+ sp_list[orient.value ^ 1] = margin
+ spx, spy = sp_list
+
+ htr0 = HalfInt.convert(tid_lo).dbl_value
+ htr1 = HalfInt.convert(tid_hi).dbl_value
+ if include_last:
+ htr1 += 1
+ htr_sep = HalfInt.convert(sep).dbl_value
+ ans = []
+ cur_htr = htr0
+ while cur_htr < htr1:
+ mid = grid.htr_to_coord(layer_id, cur_htr)
+ box = BBox(orient, lower, upper, mid - tr_w2, mid + tr_w2)
+ if not self._layout.get_intersect(layer_id, box, spx, spy, False):
+ ans.append(HalfInt(cur_htr))
+ cur_htr += htr_sep
+ else:
+ cur_htr += 1
+
+ return ans
+
+ def connect_wires(self, wire_arr_list: Union[WireArray, List[WireArray]], *,
+ lower: Optional[int] = None,
+ upper: Optional[int] = None,
+ debug: bool = False,
+ ) -> List[WireArray]:
+ """Connect all given WireArrays together.
+
+ all WireArrays must be on the same layer.
+
+ Parameters
+ ----------
+ wire_arr_list : Union[WireArr, List[WireArr]]
+ WireArrays to connect together.
+ lower : Optional[CoordType]
+ if given, extend connection wires to this lower coordinate.
+ upper : Optional[CoordType]
+ if given, extend connection wires to this upper coordinate.
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ conn_list : List[WireArray]
+ list of connection wires created.
+ """
+ grid = self._grid
+
+ if lower is None:
+ lower = COORD_MAX
+ if upper is None:
+ upper = COORD_MIN
+
+ # record all wire ranges
+ layer_id = None
+ intv_set = IntervalSet()
+ for wire_arr in WireArray.wire_grp_iter(wire_arr_list):
+ # NOTE: no need to copy with new grid, this TrackID is not used to create WireArrays
+ tid = wire_arr.track_id
+ lay_id = tid.layer_id
+ tr_w = tid.width
+ if layer_id is None:
+ layer_id = lay_id
+ elif lay_id != layer_id:
+ raise ValueError('WireArray layer ID != {}'.format(layer_id))
+
+ cur_range = wire_arr.lower, wire_arr.upper
+ for tidx in tid:
+ intv = grid.get_wire_bounds(lay_id, tidx, width=tr_w)
+ intv_rang_item = intv_set.get_first_overlap_item(intv)
+ if intv_rang_item is None:
+ range_set = IntervalSet()
+ range_set.add(cur_range)
+ intv_set.add(intv, val=(range_set, tidx, tr_w))
+ elif intv_rang_item[0] == intv:
+ tmp_rang_set: IntervalSet = intv_rang_item[1][0]
+ tmp_rang_set.add(cur_range, merge=True, abut=True)
+ else:
+ raise ValueError(f'wire on lay={lay_id}, track={tidx} overlap existing wires. '
+ f'wire interval={intv}, overlapped wire '
+ f'interval={intv_rang_item[0]}')
+
+ # draw wires, group into arrays
+ new_warr_list = []
+ base_start = None # type: Optional[int]
+ base_end = None # type: Optional[int]
+ base_tidx = None # type: Optional[HalfInt]
+ base_width = None # type: Optional[int]
+ count = 0
+ pitch = 0
+ last_tidx = 0
+ for set_item in intv_set.items():
+ intv = set_item[0]
+ range_set: IntervalSet = set_item[1][0]
+ cur_tidx: HalfInt = set_item[1][1]
+ cur_tr_w: int = set_item[1][2]
+ cur_start = min(lower, range_set.start)
+ cur_end = max(upper, range_set.stop)
+
+ if debug:
+ print('wires intv: %s, range: (%d, %d)' % (intv, cur_start, cur_end))
+ if count == 0:
+ base_tidx = cur_tidx
+ base_start = cur_start
+ base_end = cur_end
+ base_width = cur_tr_w
+ count = 1
+ pitch = 0
+ else:
+ assert base_tidx is not None, "count == 0 should have set base_intv"
+ assert base_width is not None, "count == 0 should have set base_width"
+ assert base_start is not None, "count == 0 should have set base_start"
+ assert base_end is not None, "count == 0 should have set base_end"
+ if cur_start == base_start and cur_end == base_end and base_width == cur_tr_w:
+ # length and width matches
+ cur_pitch = cur_tidx - last_tidx
+ if count == 1:
+ # second wire, set half pitch
+ pitch = cur_pitch
+ count += 1
+ elif pitch == cur_pitch:
+ # pitch matches
+ count += 1
+ else:
+ # pitch does not match, add current wires and start anew
+ track_id = TrackID(layer_id, base_tidx, width=base_width,
+ num=count, pitch=pitch, grid=grid)
+ warr = WireArray(track_id, base_start, base_end)
+ new_warr_list.append(warr)
+ self._layout.add_warr(track_id, base_start, base_end)
+ base_tidx = cur_tidx
+ count = 1
+ pitch = 0
+ else:
+ # length/width does not match, add cumulated wires and start anew
+ track_id = TrackID(layer_id, base_tidx, width=base_width,
+ num=count, pitch=pitch, grid=grid)
+ warr = WireArray(track_id, base_start, base_end)
+ new_warr_list.append(warr)
+ self._layout.add_warr(track_id, base_start, base_end)
+ base_start = cur_start
+ base_end = cur_end
+ base_tidx = cur_tidx
+ base_width = cur_tr_w
+ count = 1
+ pitch = 0
+
+ # update last lower coordinate
+ last_tidx = cur_tidx
+
+ if base_tidx is None:
+ # no wires given at all
+ return []
+
+ assert base_tidx is not None, "count == 0 should have set base_intv"
+ assert base_start is not None, "count == 0 should have set base_start"
+ assert base_end is not None, "count == 0 should have set base_end"
+
+ # add last wires
+ track_id = TrackID(layer_id, base_tidx, base_width, num=count, pitch=pitch, grid=grid)
+ warr = WireArray(track_id, base_start, base_end)
+ self._layout.add_warr(track_id, base_start, base_end)
+ new_warr_list.append(warr)
+ self._use_color = True
+ return new_warr_list
+
+ def connect_bbox_to_tracks(self, layer_dir: Direction, lay_purp: Tuple[str, str],
+ box_arr: Union[BBox, BBoxArray], track_id: TrackID, *,
+ track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None,
+ min_len_mode: MinLenMode = MinLenMode.NONE,
+ wire_lower: Optional[int] = None,
+ wire_upper: Optional[int] = None,
+ ret_bnds: Optional[List[int]] = None) -> WireArray:
+ """Connect the given primitive wire to given tracks.
+
+ Parameters
+ ----------
+ layer_dir : Direction
+ the primitive wire layer direction relative to the given tracks. LOWER if
+ the wires are below tracks, UPPER if the wires are above tracks.
+ lay_purp : Tuple[str, str]
+ the primitive wire layer/purpose name.
+ box_arr : Union[BBox, BBoxArray]
+ bounding box of the wire(s) to connect to tracks.
+ track_id : TrackID
+ TrackID that specifies the track(s) to connect the given wires to.
+ track_lower : Optional[int]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[int]
+ if given, extend track(s) to this upper coordinate.
+ min_len_mode : MinLenMode
+ The minimum length extension mode.
+ wire_lower : Optional[int]
+ if given, extend wire(s) to this lower coordinate.
+ wire_upper : Optional[int]
+ if given, extend wire(s) to this upper coordinate.
+ ret_bnds : Optional[List[int]]
+ if given, return the bounds on the bounding box layer.
+
+ Returns
+ -------
+ wire_arr : WireArray
+ WireArray representing the tracks created.
+ """
+ if isinstance(box_arr, BBox):
+ box_arr = BBoxArray(box_arr)
+
+ track_id = track_id.copy_with(self._grid)
+ bnds = self._layout.connect_barr_to_tracks(layer_dir, lay_purp[0], lay_purp[1], box_arr,
+ track_id, track_lower, track_upper, min_len_mode,
+ wire_lower, wire_upper)
+ tr_idx = 1 - layer_dir.value
+ if ret_bnds is not None:
+ ret_bnds[0] = bnds[layer_dir.value][0]
+ ret_bnds[1] = bnds[layer_dir.value][1]
+
+ self._use_color = True
+ return WireArray(track_id, bnds[tr_idx][0], bnds[tr_idx][1])
+
+ def connect_bbox_to_track_wires(self, layer_dir: Direction, lay_purp: Tuple[str, str],
+ box_arr: Union[BBox, BBoxArray],
+ track_wires: Union[WireArray, List[WireArray]], *,
+ min_len_mode: MinLenMode = MinLenMode.NONE,
+ ret_bnds: Optional[List[int]] = None
+ ) -> Union[Optional[WireArray], List[Optional[WireArray]]]:
+ ans = []
+ bnds = [COORD_MAX, COORD_MIN]
+ for warr in WireArray.wire_grp_iter(track_wires):
+ cur_bnds = [0, 0]
+ tr = self.connect_bbox_to_tracks(layer_dir, lay_purp, box_arr,
+ warr.track_id, track_lower=warr.lower,
+ track_upper=warr.upper, min_len_mode=min_len_mode,
+ ret_bnds=cur_bnds)
+ ans.append(tr)
+ bnds[0] = min(bnds[0], cur_bnds[0])
+ bnds[1] = max(bnds[1], cur_bnds[1])
+
+ if ret_bnds is not None:
+ ret_bnds[0] = bnds[0]
+ ret_bnds[1] = bnds[1]
+
+ if isinstance(track_wires, WireArray):
+ return ans[0]
+ return ans
+
+ def connect_bbox_to_differential_tracks(self, p_lay_dir: Direction, n_lay_dir: Direction,
+ p_lay_purp: Tuple[str, str],
+ n_lay_purp: Tuple[str, str],
+ pbox: Union[BBox, BBoxArray],
+ nbox: Union[BBox, BBoxArray], tr_layer_id: int,
+ ptr_idx: TrackType, ntr_idx: TrackType, *,
+ width: int = 1, track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None,
+ min_len_mode: MinLenMode = MinLenMode.NONE
+ ) -> DiffWarrType:
+ """Connect the given differential primitive wires to two tracks symmetrically.
+
+ This method makes sure the connections are symmetric and have identical parasitics.
+
+ Parameters
+ ----------
+ p_lay_dir : Direction
+ positive signal layer direction.
+ n_lay_dir : Direction
+ negative signal layer direction.
+ p_lay_purp : Tuple[str, str]
+ positive signal layer/purpose pair.
+ n_lay_purp : Tuple[str, str]
+ negative signal layer/purpose pair.
+ pbox : Union[BBox, BBoxArray]
+ positive signal wires to connect.
+ nbox : Union[BBox, BBoxArray]
+ negative signal wires to connect.
+ tr_layer_id : int
+ track layer ID.
+ ptr_idx : TrackType
+ positive track index.
+ ntr_idx : TrackType
+ negative track index.
+ width : int
+ track width in number of tracks.
+ track_lower : Optional[int]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[int]
+ if given, extend track(s) to this upper coordinate.
+ min_len_mode : MinLenMode
+ the minimum length extension mode.
+
+ Returns
+ -------
+ p_track : Optional[WireArray]
+ the positive track.
+ n_track : Optional[WireArray]
+ the negative track.
+ """
+ track_list = self.connect_bbox_to_matching_tracks([p_lay_dir, n_lay_dir],
+ [p_lay_purp, n_lay_purp], [pbox, nbox],
+ tr_layer_id, [ptr_idx, ntr_idx],
+ width=width, track_lower=track_lower,
+ track_upper=track_upper,
+ min_len_mode=min_len_mode)
+ return track_list[0], track_list[1]
+
+ def fix_track_min_length(self, tr_layer_id: int, width: int, track_lower: int, track_upper: int,
+ min_len_mode: MinLenMode) -> Tuple[int, int]:
+ even = min_len_mode is MinLenMode.MIDDLE
+ tr_len = self.grid.get_next_length(tr_layer_id, width, track_upper - track_lower, even=even)
+ if min_len_mode is MinLenMode.LOWER:
+ track_lower = track_upper - tr_len
+ elif min_len_mode is MinLenMode.UPPER:
+ track_upper = track_lower + tr_len
+ elif min_len_mode is MinLenMode.MIDDLE:
+ track_lower = (track_upper + track_lower - tr_len) // 2
+ track_upper = track_lower + tr_len
+
+ return track_lower, track_upper
+
+ def connect_bbox_to_matching_tracks(self, lay_dir_list: List[Direction],
+ lay_purp_list: List[Tuple[str, str]],
+ box_arr_list: List[Union[BBox, BBoxArray]],
+ tr_layer_id: int, tr_idx_list: List[TrackType], *,
+ width: int = 1, track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None,
+ min_len_mode: MinLenMode = MinLenMode.NONE,
+ ) -> List[Optional[WireArray]]:
+ """Connect the given primitive wire to given tracks.
+
+ Parameters
+ ----------
+ lay_dir_list : List[Direction]
+ the primitive wire layer direction list.
+ lay_purp_list : List[Tuple[str, str]]
+ the primitive wire layer/purpose list.
+ box_arr_list : List[Union[BBox, BBoxArray]]
+ bounding box of the wire(s) to connect to tracks.
+ tr_layer_id : int
+ track layer ID.
+ tr_idx_list : List[TrackType]
+ list of track indices.
+ width : int
+ track width in number of tracks.
+ track_lower : Optional[int]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[int]
+ if given, extend track(s) to this upper coordinate.
+ min_len_mode : MinLenMode
+ the minimum length extension mode.
+ Returns
+ -------
+ wire_arr : List[Optional[WireArray]]
+ WireArrays representing the tracks created.
+ """
+ grid = self._grid
+ tr_dir = grid.get_direction(tr_layer_id)
+ w_dir = tr_dir.perpendicular()
+
+ num = len(lay_dir_list)
+ if len(lay_purp_list) != num or len(box_arr_list) != num or len(tr_idx_list) != num:
+ raise ValueError('Connection list parameters have mismatch length.')
+ if num == 0:
+ raise ValueError('Connection lists are empty.')
+
+ wl = None
+ wu = None
+ for lay_dir, (lay, purp), box_arr, tr_idx in zip(lay_dir_list, lay_purp_list,
+ box_arr_list, tr_idx_list):
+ if isinstance(box_arr, BBox):
+ box_arr = BBoxArray(box_arr)
+
+ tid = TrackID(tr_layer_id, tr_idx, width=width, grid=self._grid)
+ bnds = self._layout.connect_barr_to_tracks(lay_dir, lay, purp, box_arr, tid,
+ track_lower, track_upper, MinLenMode.NONE,
+ wl, wu)
+ w_idx = lay_dir.value
+ tr_idx = 1 - w_idx
+ wl = bnds[w_idx][0]
+ wu = bnds[w_idx][1]
+ track_lower = bnds[tr_idx][0]
+ track_upper = bnds[tr_idx][1]
+
+ # fix min_len_mode
+ track_lower, track_upper = self.fix_track_min_length(tr_layer_id, width, track_lower,
+ track_upper, min_len_mode)
+ # extend wires
+ ans = []
+ for (lay, purp), box_arr, tr_idx in zip(lay_purp_list, box_arr_list, tr_idx_list):
+ if isinstance(box_arr, BBox):
+ box_arr = BBoxArray(box_arr)
+ else:
+ box_arr = BBoxArray(box_arr.base, tr_dir, nt=box_arr.get_num(tr_dir),
+ spt=box_arr.get_sp(tr_dir))
+
+ box_arr.set_interval(w_dir, wl, wu)
+ self._layout.add_rect_arr(lay, purp, box_arr)
+
+ cur_tid = TrackID(tr_layer_id, tr_idx, width=width, grid=grid)
+ warr = WireArray(cur_tid, track_lower, track_upper)
+ self._layout.add_warr(cur_tid, track_lower, track_upper)
+ ans.append(warr)
+
+ self._use_color = True
+ return ans
+
+ def connect_to_tracks(self, wire_arr_list: Union[WireArray, List[WireArray]],
+ track_id: TrackID, *, wire_lower: Optional[int] = None,
+ wire_upper: Optional[int] = None, track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None, min_len_mode: MinLenMode = None,
+ ret_wire_list: Optional[List[WireArray]] = None,
+ debug: bool = False) -> Optional[WireArray]:
+ """Connect all given WireArrays to the given track(s).
+
+ All given wires should be on adjacent layers of the track.
+
+ Parameters
+ ----------
+ wire_arr_list : Union[WireArray, List[WireArray]]
+ list of WireArrays to connect to track.
+ track_id : TrackID
+ TrackID that specifies the track(s) to connect the given wires to.
+ wire_lower : Optional[CoordType]
+ if given, extend wire(s) to this lower coordinate.
+ wire_upper : Optional[CoordType]
+ if given, extend wire(s) to this upper coordinate.
+ track_lower : Optional[CoordType]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[CoordType]
+ if given, extend track(s) to this upper coordinate.
+ min_len_mode : MinLenMode
+ the minimum length extension mode.
+ ret_wire_list : Optional[List[WireArray]]
+ If not none, extended wires that are created will be appended to this list.
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ wire_arr : Optional[WireArray]
+ WireArray representing the tracks created.
+ """
+ if track_lower is None:
+ track_lower = COORD_MAX
+ if track_upper is None:
+ track_upper = COORD_MIN
+
+ # find min/max track Y coordinates
+ track_id = track_id.copy_with(self._grid)
+ tr_layer_id = track_id.layer_id
+ tr_w = track_id.width
+
+ # get top wire and bottom wire list
+ warr_list_list = [[], []]
+ for wire_arr in WireArray.wire_grp_iter(wire_arr_list):
+ cur_layer_id = wire_arr.layer_id
+ if cur_layer_id == tr_layer_id + 1:
+ warr_list_list[1].append(wire_arr)
+ elif cur_layer_id == tr_layer_id - 1:
+ warr_list_list[0].append(wire_arr)
+ else:
+ raise ValueError(
+ 'WireArray layer %d cannot connect to layer %d' % (cur_layer_id, tr_layer_id))
+
+ if not warr_list_list[0] and not warr_list_list[1]:
+ # no wires at all
+ return None
+
+ # connect wires together
+ tmp = self._connect_to_tracks_helper(warr_list_list[0], track_id, wire_lower, wire_upper,
+ track_lower, track_upper, ret_wire_list, 0, debug)
+ track_lower, track_upper = tmp
+ tmp = self._connect_to_tracks_helper(warr_list_list[1], track_id, wire_lower, wire_upper,
+ track_lower, track_upper, ret_wire_list, 1, debug)
+ track_lower, track_upper = tmp
+
+ # fix min_len_mode
+ track_lower, track_upper = self.fix_track_min_length(tr_layer_id, tr_w, track_lower,
+ track_upper, min_len_mode)
+ result = WireArray(track_id, track_lower, track_upper)
+ self._layout.add_warr(track_id, track_lower, track_upper)
+ self._use_color = True
+ return result
+
+ def _connect_to_tracks_helper(self, warr_list: List[WireArray], track_id: TrackID,
+ wire_lower: Optional[int], wire_upper: Optional[int],
+ track_lower: int, track_upper: int,
+ ret_wire_list: Optional[List[WireArray]], idx: int,
+ debug: bool) -> Tuple[Optional[int], Optional[int]]:
+ # precondition: track_id has correct routing grid, but not WireArrays in warr_list
+ for warr in self.connect_wires(warr_list, lower=wire_lower, upper=wire_upper,
+ debug=debug):
+ bnds = self._layout.connect_warr_to_tracks(warr.track_id, track_id,
+ warr.lower, warr.upper)
+ if ret_wire_list is not None:
+ new_tid = warr.track_id.copy_with(self._grid)
+ ret_wire_list.append(WireArray(new_tid, bnds[idx][0], bnds[idx][1]))
+ track_lower = min(track_lower, bnds[1 - idx][0])
+ track_upper = max(track_upper, bnds[1 - idx][1])
+
+ return track_lower, track_upper
+
+ def connect_to_track_wires(self, wire_arr_list: Union[WireArray, List[WireArray]],
+ track_wires: Union[WireArray, List[WireArray]], *,
+ min_len_mode: Optional[MinLenMode] = None,
+ debug: bool = False) -> Union[Optional[WireArray],
+ List[Optional[WireArray]]]:
+ """Connect all given WireArrays to the given WireArrays on adjacent layer.
+
+ Parameters
+ ----------
+ wire_arr_list : Union[WireArray, List[WireArray]]
+ list of WireArrays to connect to track.
+ track_wires : Union[WireArray, List[WireArray]]
+ list of tracks as WireArrays.
+ min_len_mode : MinLenMode
+ the minimum length extension mode.
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ wire_arr : Union[Optional[WireArray], List[Optional[WireArray]]]
+ WireArrays representing the tracks created. None if nothing to do.
+ """
+ ans = [] # type: List[Optional[WireArray]]
+ for warr in WireArray.wire_grp_iter(track_wires):
+ tr = self.connect_to_tracks(wire_arr_list, warr.track_id, track_lower=warr.lower,
+ track_upper=warr.upper, min_len_mode=min_len_mode,
+ debug=debug)
+ ans.append(tr)
+
+ if isinstance(track_wires, WireArray):
+ return ans[0]
+ return ans
+
+ def connect_differential_tracks(self, pwarr_list: Union[WireArray, List[WireArray]],
+ nwarr_list: Union[WireArray, List[WireArray]],
+ tr_layer_id: int, ptr_idx: TrackType, ntr_idx: TrackType, *,
+ width: int = 1, track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None
+ ) -> Tuple[Optional[WireArray], Optional[WireArray]]:
+ """Connect the given differential wires to two tracks symmetrically.
+
+ This method makes sure the connections are symmetric and have identical parasitics.
+
+ Parameters
+ ----------
+ pwarr_list : Union[WireArray, List[WireArray]]
+ positive signal wires to connect.
+ nwarr_list : Union[WireArray, List[WireArray]]
+ negative signal wires to connect.
+ tr_layer_id : int
+ track layer ID.
+ ptr_idx : TrackType
+ positive track index.
+ ntr_idx : TrackType
+ negative track index.
+ width : int
+ track width in number of tracks.
+ track_lower : Optional[int]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[int]
+ if given, extend track(s) to this upper coordinate.
+
+ Returns
+ -------
+ p_track : Optional[WireArray]
+ the positive track.
+ n_track : Optional[WireArray]
+ the negative track.
+ """
+ track_list = self.connect_matching_tracks([pwarr_list, nwarr_list], tr_layer_id,
+ [ptr_idx, ntr_idx], width=width,
+ track_lower=track_lower, track_upper=track_upper)
+ return track_list[0], track_list[1]
+
+ def connect_differential_wires(self, pin_warrs: Union[WireArray, List[WireArray]],
+ nin_warrs: Union[WireArray, List[WireArray]],
+ pout_warr: WireArray, nout_warr: WireArray, *,
+ track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None
+ ) -> Tuple[Optional[WireArray], Optional[WireArray]]:
+ """Connect the given differential wires to two WireArrays symmetrically.
+
+ This method makes sure the connections are symmetric and have identical parasitics.
+
+ Parameters
+ ----------
+ pin_warrs : Union[WireArray, List[WireArray]]
+ positive signal wires to connect.
+ nin_warrs : Union[WireArray, List[WireArray]]
+ negative signal wires to connect.
+ pout_warr : WireArray
+ positive track wires.
+ nout_warr : WireArray
+ negative track wires.
+ track_lower : Optional[int]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[int]
+ if given, extend track(s) to this upper coordinate.
+
+ Returns
+ -------
+ p_track : Optional[WireArray]
+ the positive track.
+ n_track : Optional[WireArray]
+ the negative track.
+ """
+ p_tid = pout_warr.track_id
+ lay_id = p_tid.layer_id
+ pidx = p_tid.base_index
+ nidx = nout_warr.track_id.base_index
+ width = p_tid.width
+
+ if track_lower is None:
+ tr_lower = pout_warr.lower
+ else:
+ tr_lower = min(track_lower, pout_warr.lower)
+ if track_upper is None:
+ tr_upper = pout_warr.upper
+ else:
+ tr_upper = max(track_upper, pout_warr.upper)
+
+ return self.connect_differential_tracks(pin_warrs, nin_warrs, lay_id, pidx, nidx,
+ width=width, track_lower=tr_lower,
+ track_upper=tr_upper)
+
+ def connect_matching_tracks(self, warr_list_list: List[Union[WireArray, List[WireArray]]],
+ tr_layer_id: int, tr_idx_list: List[TrackType], *,
+ width: int = 1,
+ track_lower: Optional[int] = None,
+ track_upper: Optional[int] = None,
+ min_len_mode: MinLenMode = MinLenMode.NONE
+ ) -> List[Optional[WireArray]]:
+ """Connect wires to tracks with optimal matching.
+
+ This method connects the wires to tracks in a way that minimizes the parasitic mismatches.
+
+ Parameters
+ ----------
+ warr_list_list : List[Union[WireArray, List[WireArray]]]
+ list of signal wires to connect.
+ tr_layer_id : int
+ track layer ID.
+ tr_idx_list : List[TrackType]
+ list of track indices.
+ width : int
+ track width in number of tracks.
+ track_lower : Optional[int]
+ if given, extend track(s) to this lower coordinate.
+ track_upper : Optional[int]
+ if given, extend track(s) to this upper coordinate.
+ min_len_mode : MinLenMode
+ the minimum length extension mode.
+
+ Returns
+ -------
+ track_list : List[WireArray]
+ list of created tracks.
+ """
+ # simple error checking
+ num_tracks = len(tr_idx_list) # type: int
+ if num_tracks != len(warr_list_list):
+ raise ValueError('Connection list parameters have mismatch length.')
+ if num_tracks == 0:
+ raise ValueError('Connection lists are empty.')
+
+ if track_lower is None:
+ track_lower = COORD_MAX
+ if track_upper is None:
+ track_upper = COORD_MIN
+
+ wbounds = [[COORD_MAX, COORD_MIN], [COORD_MAX, COORD_MIN]]
+ for warr_list, tr_idx in zip(warr_list_list, tr_idx_list):
+ tid = TrackID(tr_layer_id, tr_idx, width=width, grid=self._grid)
+ for warr in WireArray.wire_grp_iter(warr_list):
+ cur_lay_id = warr.layer_id
+ if cur_lay_id == tr_layer_id + 1:
+ wb_idx = 1
+ elif cur_lay_id == tr_layer_id - 1:
+ wb_idx = 0
+ else:
+ raise ValueError(
+ 'WireArray layer {} cannot connect to layer {}'.format(cur_lay_id,
+ tr_layer_id))
+
+ bnds = self._layout.connect_warr_to_tracks(warr.track_id, tid,
+ warr.lower, warr.upper)
+ wbounds[wb_idx][0] = min(wbounds[wb_idx][0], bnds[wb_idx][0])
+ wbounds[wb_idx][1] = max(wbounds[wb_idx][1], bnds[wb_idx][1])
+ track_lower = min(track_lower, bnds[1 - wb_idx][0])
+ track_upper = max(track_upper, bnds[1 - wb_idx][1])
+
+ # fix min_len_mode
+ track_lower, track_upper = self.fix_track_min_length(tr_layer_id, width, track_lower,
+ track_upper, min_len_mode)
+ # extend wires
+ ans = []
+ for warr_list, tr_idx in zip(warr_list_list, tr_idx_list):
+ for warr in WireArray.wire_grp_iter(warr_list):
+ wb_idx = (warr.layer_id - tr_layer_id + 1) // 2
+ self._layout.add_warr(warr.track_id, wbounds[wb_idx][0], wbounds[wb_idx][1])
+
+ cur_tid = TrackID(tr_layer_id, tr_idx, width=width, grid=self._grid)
+ warr = WireArray(cur_tid, track_lower, track_upper)
+ self._layout.add_warr(cur_tid, track_lower, track_upper)
+ ans.append(warr)
+
+ self._use_color = True
+ return ans
+
+ def draw_vias_on_intersections(self, bot_warr_list: Union[WireArray, List[WireArray]],
+ top_warr_list: Union[WireArray, List[WireArray]]) -> None:
+ """Draw vias on all intersections of the two given wire groups.
+
+ Parameters
+ ----------
+ bot_warr_list : Union[WireArray, List[WireArray]]
+ the bottom wires.
+ top_warr_list : Union[WireArray, List[WireArray]]
+ the top wires.
+ """
+ for bwarr in WireArray.wire_grp_iter(bot_warr_list):
+ for twarr in WireArray.wire_grp_iter(top_warr_list):
+ self._layout.add_via_on_intersections(bwarr.track_id, twarr.track_id,
+ bwarr.lower, bwarr.upper,
+ twarr.lower, twarr.upper, True, True)
+
+ def mark_bbox_used(self, layer_id: int, bbox: BBox) -> None:
+ """Marks the given bounding-box region as used in this Template."""
+ # TODO: Fix this
+ raise ValueError('Not implemented yet')
+
+ def do_max_space_fill(self, layer_id: int, bound_box: Optional[BBox] = None,
+ fill_boundary: bool = True) -> None:
+ """Draw density fill on the given layer."""
+ if bound_box is None:
+ bound_box = self.bound_box
+
+ fill_info = self.grid.tech_info.get_max_space_fill_info(layer_id)
+ self._layout.do_max_space_fill(layer_id, bound_box, fill_boundary, fill_info.info)
+ self._use_color = True
+
+ def do_device_fill(self, fill_cls: Type[TemplateBase], **kwargs: Any) -> None:
+ """Fill empty region with device fills."""
+ bbox = self.bound_box
+ if bbox is None:
+ raise ValueError('bound_box attribute is not set.')
+
+ lookup = RTree()
+ ed = ImmutableSortedDict()
+ lookup.insert(None, bbox)
+
+ # subtract instance bounding boxes
+ for inst in self._instances.values():
+ if inst.committed:
+ inst_box = inst.bound_box
+ inst_edges = inst.master.edge_info
+ if inst_edges is None:
+ # TODO: implement this. Need to recurse down instance hierarchy
+ raise ValueError('Not implemented, see developer.')
+ # save items in list, because we'll remove them from the index
+ item_list = list(lookup.intersect_iter(inst_box))
+ for box, item_id in item_list:
+ if box.get_intersect(inst_box).is_physical():
+ box_edges = cast(Optional[TemplateEdgeInfo], lookup.pop(item_id))
+ _update_device_fill_area(lookup, ed, inst_box, inst_edges, box, box_edges)
+
+ # draw fill
+ cnt = 0
+ for box, obj_id in lookup:
+ kwargs['width'] = box.w
+ kwargs['height'] = box.h
+ kwargs['edges'] = lookup[obj_id]
+ master = self.new_template(fill_cls, params=kwargs)
+ self.add_instance(master, inst_name=f'XFILL{cnt}', xform=Transform(box.xl, box.yl))
+ cnt += 1
+
+ def get_lef_options(self, options: Dict[str, Any], config: Mapping[str, Any]) -> None:
+ """Populate the LEF options dictionary.
+
+ Parameters
+ ----------
+ options : Mapping[str, Any]
+ the result LEF options dictionary.
+ config : Mapping[str, Any]
+ the LEF configuration dictionary.
+ """
+ if not self.finalized:
+ raise ValueError('This method only works on finalized master.')
+
+ detail_layers_inc = config.get('detail_layers', [])
+
+ top_layer = self.top_layer
+ tech_info = self.grid.tech_info
+ cover_layers = set(range(tech_info.bot_layer, top_layer + 1))
+ detail_layers = set()
+ for lay in detail_layers_inc:
+ detail_layers.add(lay)
+ cover_layers. discard(lay)
+
+ options['detailed_layers'] = [lay for lay_id in sorted(detail_layers)
+ for lay, _ in tech_info.get_lay_purp_list(lay_id)]
+ options['cover_layers'] = [lay for lay_id in sorted(cover_layers)
+ for lay, _ in tech_info.get_lay_purp_list(lay_id)]
+ options['cell_type'] = config.get('cell_type', 'block')
+
+
+def _update_device_fill_area(lookup: RTree, ed: Param, inst_box: BBox, inst_edges: TemplateEdgeInfo,
+ sp_box: BBox, sp_edges: Optional[TemplateEdgeInfo]) -> None:
+ # find instance edge with no constraints
+ cut_edge_dir: Optional[Direction2D] = None
+ cut_edge_dir_backup: Optional[Direction2D] = None
+ two_backup = False
+ # start at 1 so we prefer cutting horizontally
+ for edir in (Direction2D.SOUTH, Direction2D.EAST, Direction2D.NORTH, Direction2D.WEST):
+ if not inst_edges.get_edge_params(edir):
+ if inst_edges.get_edge_params(edir.flip()):
+ two_backup = cut_edge_dir_backup is not None
+ if not two_backup:
+ cut_edge_dir_backup = edir
+ else:
+ cut_edge_dir = edir
+ break
+
+ bxl = sp_box.xl
+ byl = sp_box.yl
+ bxh = sp_box.xh
+ byh = sp_box.yh
+ ixl = inst_box.xl
+ iyl = inst_box.yl
+ ixh = inst_box.xh
+ iyh = inst_box.yh
+ if sp_edges is None:
+ bel = beb = ber = bet = ed
+ else:
+ bel, beb, ber, bet = sp_edges.to_tuple()
+ iel, ieb, ier, iet = inst_edges.to_tuple()
+ sq_list = [(BBox(bxl, byl, ixl, iyl), (bel, beb, ed, ed)),
+ (BBox(ixl, byl, ixh, iyl), (ed, beb, ed, iet)),
+ (BBox(ixh, byl, bxh, iyl), (ed, beb, ber, ed)),
+ (BBox(ixh, iyl, bxh, iyh), (ier, ed, ber, ed)),
+ (BBox(ixh, iyh, bxh, byh), (ed, ed, ber, bet)),
+ (BBox(ixl, iyh, ixh, byh), (ed, ieb, ed, bet)),
+ (BBox(bxl, iyh, ixl, byh), (bel, ed, ed, bet)),
+ (BBox(bxl, iyl, ixl, iyh), (bel, ed, iel, ed)),
+ ]
+ if cut_edge_dir is not None:
+ # found opposite edges with no constraints, we're done
+ if cut_edge_dir.is_vertical:
+ # cut horizontally
+ tile_list = [sq_list[3], sq_list[7], _fill_merge(sq_list, 0, True),
+ _fill_merge(sq_list, 4, True)]
+ else:
+ # cut vertically
+ tile_list = [sq_list[1], sq_list[5], _fill_merge(sq_list, 2, True),
+ _fill_merge(sq_list, 6, True)]
+ elif cut_edge_dir_backup is not None:
+ if two_backup:
+ # two adjacent cut idx. Cut horizontally
+ istart = 2 * cut_edge_dir_backup.value + 3
+ istop = istart + 3
+ if istop > 8:
+ tile_list = sq_list[istart:]
+ tile_list.extend(sq_list[:istop - 8])
+ else:
+ tile_list = sq_list[istart:istop]
+
+ istart = istop % 8
+ tile_list.append(_fill_merge(sq_list, istart, True))
+ tile_list.append(_fill_merge(sq_list, (istart + 3) % 8, False))
+ else:
+ istart = 2 * cut_edge_dir_backup.value + 1
+ istop = istart + 5
+ if istop > 8:
+ tile_list = sq_list[istart:]
+ tile_list.extend(sq_list[:istop - 8])
+ else:
+ tile_list = sq_list[istart:istop]
+
+ istart = istop % 8
+ tile_list.append(_fill_merge(sq_list, istart, True))
+ else:
+ tile_list = sq_list
+
+ for box, edges in tile_list:
+ if box.is_physical():
+ lookup.insert(edges, box)
+
+
+def _fill_merge(sq_list: List[Tuple[BBox, Tuple[Param, Param, Param, Param]]],
+ istart: int, merge_two: bool) -> Tuple[BBox, Tuple[Param, Param, Param, Param]]:
+ box = sq_list[istart][0]
+ edges = list(sq_list[istart][1])
+ istop = istart + 3 if merge_two else istart + 2
+ for idx in range(istart + 1, istop):
+ cur_box, cur_edges = sq_list[idx % 8]
+ if not box.is_physical():
+ box = cur_box
+ edges = list(cur_edges)
+ elif cur_box.is_physical():
+ if cur_box.xl < box.xl:
+ edges[0] = cur_edges[0]
+ if cur_box.yl < box.yl:
+ edges[1] = cur_edges[1]
+ if cur_box.xh > box.xh:
+ edges[2] = cur_edges[2]
+ if cur_box.yh > box.yh:
+ edges[3] = cur_edges[3]
+ box.merge(cur_box)
+ return box, (edges[0], edges[1], edges[2], edges[3])
diff --git a/src/bag/layout/util.py b/src/bag/layout/util.py
new file mode 100644
index 0000000..bf56ad4
--- /dev/null
+++ b/src/bag/layout/util.py
@@ -0,0 +1,128 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, Any, Tuple, Optional, Type, cast
+
+from pybag.enum import Direction2D
+from pybag.core import BBox, Transform
+
+from ..util.immutable import Param
+from ..util.importlib import import_class
+from ..design.module import Module
+
+
+from .template import TemplateBase, TemplateDB
+
+
+class BlackBoxTemplate(TemplateBase):
+ """A black box template."""
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ lib_name='The library name.',
+ cell_name='The layout cell name.',
+ top_layer='The top level layer.',
+ size='The width/height of the cell, in resolution units.',
+ ports='The port information dictionary.',
+ )
+
+ def get_layout_basename(self) -> str:
+ return self.params['cell_name']
+
+ def draw_layout(self) -> None:
+ lib_name: str = self.params['lib_name']
+ cell_name: str = self.params['cell_name']
+ top_layer: int = self.params['top_layer']
+ size: Tuple[int, int] = self.params['size']
+ ports: Dict[str, Dict[str, Tuple[int, int, int, int]]] = self.params['ports']
+
+ show_pins = self.show_pins
+ for term_name, pin_dict in ports.items():
+ for lay, bbox_list in pin_dict.items():
+ for xl, yb, xr, yt in bbox_list:
+ box = BBox(xl, yb, xr, yt)
+ self._register_pin(lay, term_name, box, show_pins)
+
+ self.add_instance_primitive(lib_name, cell_name)
+
+ self.prim_top_layer = top_layer
+ self.prim_bound_box = BBox(0, 0, size[0], size[1])
+
+ for layer in range(1, top_layer + 1):
+ self.mark_bbox_used(layer, self.prim_bound_box)
+
+ self.sch_params = dict(
+ lib_name=lib_name,
+ cell_name=cell_name,
+ )
+
+ def _register_pin(self, lay: str, term_name: str, box: BBox, show_pins: bool) -> None:
+ # TODO: find way to add WireArray if possible
+ self.add_pin_primitive(term_name, lay, box, show=show_pins)
+
+
+class IPMarginTemplate(TemplateBase):
+ """A wrapper template the packages a TemplateBase into an IP block.
+
+ This class adds the necessary margins so a TemplateBase can be packaged into an IP
+ """
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ self._core: Optional[TemplateBase] = None
+
+ @property
+ def core(self) -> TemplateBase:
+ return self._core
+
+ def get_schematic_class_inst(self) -> Optional[Type[Module]]:
+ return self._core.get_schematic_class_inst()
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ cls_name='wrapped class name.',
+ params='parameters for the wrapped class.',
+ )
+
+ def draw_layout(self) -> None:
+ cls_name: str = self.params['cls_name']
+ params: Param = self.params['params']
+
+ gen_cls = cast(Type[TemplateBase], import_class(cls_name))
+ master: TemplateBase = self.new_template(gen_cls, params=params)
+ self._core = master
+ top_layer = master.top_layer
+
+ dx0 = master.get_margin(top_layer, Direction2D.WEST)
+ dy0 = master.get_margin(top_layer, Direction2D.SOUTH)
+ dx1 = master.get_margin(top_layer, Direction2D.EAST)
+ dy1 = master.get_margin(top_layer, Direction2D.NORTH)
+ inst = self.add_instance(master, inst_name='XINST', xform=Transform(dx0, dy0))
+
+ inst_box = inst.bound_box
+ self.set_size_from_bound_box(top_layer, BBox(0, 0, inst_box.xh + dx1, inst_box.yh + dy1))
+
+ # re-export pins
+ for name in inst.port_names_iter():
+ self.reexport(inst.get_port(name))
+
+ # pass out schematic parameters
+ self.sch_params = master.sch_params
diff --git a/src/bag/math/__init__.py b/src/bag/math/__init__.py
new file mode 100644
index 0000000..e6a0e24
--- /dev/null
+++ b/src/bag/math/__init__.py
@@ -0,0 +1,155 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package defines design template classes.
+"""
+
+from typing import Iterable, Tuple
+
+import numpy as np
+from . import interpolate
+
+__all__ = ['lcm', 'gcd', 'interpolate', 'float_to_si_string', 'si_string_to_float',
+ 'get_si_prefix']
+
+
+si_mag = [-24, -21, -18, -15, -12, -9, -6, -3, 0, 3, 6, 9, 12, 15, 18, 21, 24]
+si_pre = ['y', 'z', 'a', 'f', 'p', 'n', 'u', 'm', '', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']
+
+
+def get_si_prefix(num: float) -> Tuple[int, str]:
+ num = float(num)
+ if abs(num) < 1e-21:
+ return 0, ''
+ exp = np.log10(abs(num))
+
+ pre_idx = len(si_mag) - 1
+ for idx in range(len(si_mag)):
+ if exp < si_mag[idx]:
+ pre_idx = idx - 1
+ break
+ return si_mag[pre_idx], si_pre[pre_idx]
+
+
+def float_to_si_string(num: float, precision: int = 6) -> str:
+ """Converts the given floating point number to a string using SI prefix.
+
+ Parameters
+ ----------
+ num : float
+ the number to convert.
+ precision : int
+ number of significant digits, defaults to 6.
+
+ Returns
+ -------
+ ans : str
+ the string representation of the given number using SI suffix.
+ """
+ mag, pre = get_si_prefix(num)
+ fmt = '{:.%dg}{}' % precision
+ res = 10.0 ** mag
+ return fmt.format(num / res, pre)
+
+
+def si_string_to_float(si_str):
+ """Converts the given string with SI prefix to float.
+
+ Parameters
+ ----------
+ si_str : str
+ the string to convert
+
+ Returns
+ -------
+ ans : float
+ the floating point value of the given string.
+ """
+ if si_str[-1] in si_pre:
+ idx = si_pre.index(si_str[-1])
+ return float(si_str[:-1]) * 10**si_mag[idx]
+ else:
+ return float(si_str)
+
+
+def gcd(a, b):
+ # type: (int, int) -> int
+ """Compute greatest common divisor of two positive integers.
+
+ Parameters
+ ----------
+ a : int
+ the first number.
+ b : int
+ the second number.
+
+ Returns
+ -------
+ ans : int
+ the greatest common divisor of the two given integers.
+ """
+ while b:
+ a, b = b, a % b
+ return a
+
+
+def lcm(arr, init=1):
+ # type: (Iterable[int], int) -> int
+ """Compute least common multiple of all numbers in the given list.
+
+ Parameters
+ ----------
+ arr : Iterable[int]
+ a list of integers.
+ init : int
+ the initial LCM. Defaults to 1.
+
+ Returns
+ -------
+ ans : int
+ the least common multiple of all the given numbers.
+ """
+ cur_lcm = init
+ for val in arr:
+ cur_lcm = cur_lcm * val // gcd(cur_lcm, val)
+ return cur_lcm
diff --git a/src/bag/math/dfun.py b/src/bag/math/dfun.py
new file mode 100644
index 0000000..7557451
--- /dev/null
+++ b/src/bag/math/dfun.py
@@ -0,0 +1,677 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines the differentiable function class."""
+
+from typing import Union, List, Optional, Tuple
+
+import abc
+
+import numpy as np
+
+
+class DiffFunction(abc.ABC):
+ """An abstract class representing a differentiable scalar function.
+
+ Supports Numpy broadcasting. Defaults to using finite difference for derivative calculation.
+
+ Parameters
+ ----------
+ input_ranges : List[Tuple[Optional[float], Optional[float]]]
+ input ranges.
+ delta_list : Optional[List[float]]
+ a list of finite difference step size for each input. If None,
+ finite difference will be disabled.
+ """
+
+ def __init__(self, input_ranges, delta_list=None):
+ # type: (List[Tuple[Optional[float], Optional[float]]], Optional[List[float]]) -> None
+ # error checking
+ self._ndim = len(input_ranges)
+ if delta_list is not None and len(delta_list) != self._ndim:
+ raise ValueError('finite difference list length inconsistent.')
+
+ self._input_ranges = input_ranges
+ self.delta_list = delta_list # type: Optional[List[float]]
+
+ @property
+ def input_ranges(self):
+ # type: () -> List[Tuple[Optional[float], Optional[float]]]
+ return self._input_ranges
+
+ @property
+ def ndim(self):
+ # type: () -> int
+ """Number of input dimensions."""
+ return self._ndim
+
+ @abc.abstractmethod
+ def __call__(self, xi):
+ """Interpolate at the given coordinates.
+
+ Numpy broadcasting rules apply.
+
+ Parameters
+ ----------
+ xi : array_like
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : np.multiarray.ndarray
+ The interpolated values at the given coordinates.
+ """
+ raise NotImplementedError('Not implemented')
+
+ def get_input_range(self, idx):
+ # type: (int) -> Tuple[Optional[float], Optional[float]]
+ """Returns the input range of the given dimension."""
+ return self._input_ranges[idx]
+
+ def deriv(self, xi, j):
+ """Calculate the derivative at the given coordinates with respect to input j.
+
+ Numpy broadcasting rules apply.
+
+ Parameters
+ ----------
+ xi : array_like
+ The coordinates to evaluate, with shape (..., ndim)
+ j : int
+ input index.
+
+ Returns
+ -------
+ val : np.multiarray.ndarray
+ The derivatives at the given coordinates.
+ """
+ return self._fd(xi, j, self.delta_list[j])
+
+ def jacobian(self, xi):
+ """Calculate the Jacobian at the given coordinates.
+
+ Numpy broadcasting rules apply.
+
+ If finite difference step sizes are not specified,
+ will call deriv() in a for loop to compute the Jacobian.
+
+ Parameters
+ ----------
+ xi : array_like
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : np.multiarray.ndarray
+ The Jacobian matrices at the given coordinates.
+ """
+ if self.delta_list:
+ return self._fd_jacobian(xi, self.delta_list)
+ else:
+ xi = np.asarray(xi, dtype=float)
+ ans = np.empty(xi.shape)
+ for n in range(self.ndim):
+ ans[..., n] = self.deriv(xi, n)
+ return ans
+
+ def _fd(self, xi, idx, delta):
+ """Calculate the derivative along the given index using central finite difference.
+
+ Parameters
+ ----------
+ xi : array_like
+ The coordinates to evaluate, with shape (..., ndim)
+ idx : int
+ The index to calculate the derivative on.
+ delta : float
+ The finite difference step size.
+
+ Returns
+ -------
+ val : np.multiarray.ndarray
+ The derivatives at the given coordinates.
+ """
+ if idx < 0 or idx >= self.ndim:
+ raise ValueError('Invalid derivative index: %d' % idx)
+
+ xi = np.asarray(xi, dtype=float)
+ if xi.shape[-1] != self.ndim:
+ raise ValueError("The requested sample points xi have dimension %d, "
+ "but this interpolator has dimension %d" % (xi.shape[-1], self.ndim))
+
+ # use broadcasting to evaluate two points at once
+ xtest = np.broadcast_to(xi, (2,) + xi.shape).copy()
+ xtest[0, ..., idx] += delta / 2.0
+ xtest[1, ..., idx] -= delta / 2.0
+ val = self(xtest)
+ ans = (val[0] - val[1]) / delta # type: np.ndarray
+
+ if ans.size == 1 and not np.isscalar(ans):
+ return ans[0]
+ return ans
+
+ def _fd_jacobian(self, xi, delta_list):
+ """Calculate the Jacobian matrix using central finite difference.
+
+ Parameters
+ ----------
+ xi : array_like
+ The coordinates to evaluate, with shape (..., ndim)
+ delta_list : List[float]
+ list of finite difference step sizes for each input.
+
+ Returns
+ -------
+ val : np.multiarray.ndarray
+ The Jacobian matrices at the given coordinates.
+ """
+ xi = np.asarray(xi, dtype=float)
+ if xi.shape[-1] != self.ndim:
+ raise ValueError("The requested sample points xi have dimension %d, "
+ "but this interpolator has dimension %d" % (xi.shape[-1], self.ndim))
+
+ # use broadcasting to evaluate all points at once
+ xtest = np.broadcast_to(xi, (2 * self.ndim,) + xi.shape).copy()
+ for idx, delta in enumerate(delta_list):
+ xtest[2 * idx, ..., idx] += delta / 2.0
+ xtest[2 * idx + 1, ..., idx] -= delta / 2.0
+
+ val = self(xtest)
+ ans = np.empty(xi.shape)
+ for idx, delta in enumerate(delta_list):
+ ans[..., idx] = (val[2 * idx, ...] - val[2 * idx + 1, ...]) / delta
+ return ans
+
+ def transform_input(self, amat, bmat):
+ # type: (np.multiarray.ndarray, np.multiarray.ndarray) -> DiffFunction
+ """Returns f(Ax + B), where f is this function and A, B are matrices.
+
+ Parameters
+ ----------
+ amat : np.multiarray.ndarray
+ the input transform matrix.
+ bmat : np.multiarray.ndarray
+ the input shift matrix.
+
+ Returns
+ -------
+ dfun : DiffFunction
+ a scalar differential function.
+ """
+ return InLinTransformFunction(self, amat, bmat)
+
+ def __add__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, DiffFunction):
+ return SumDiffFunction(self, other, f2_sgn=1.0)
+ elif isinstance(other, float) or isinstance(other, int):
+ return ScaleAddFunction(self, other, 1.0)
+ elif isinstance(other, np.ndarray):
+ return ScaleAddFunction(self, other.item(), 1.0)
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __radd__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ return self.__add__(other)
+
+ def __sub__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, DiffFunction):
+ return SumDiffFunction(self, other, f2_sgn=-1.0)
+ elif isinstance(other, float) or isinstance(other, int):
+ return ScaleAddFunction(self, -other, 1.0)
+ elif isinstance(other, np.ndarray):
+ return ScaleAddFunction(self, -other.item(), 1.0)
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __rsub__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, DiffFunction):
+ return SumDiffFunction(other, self, f2_sgn=-1.0)
+ elif isinstance(other, float) or isinstance(other, int):
+ return ScaleAddFunction(self, other, -1.0)
+ elif isinstance(other, np.ndarray):
+ return ScaleAddFunction(self, other.item(), -1.0)
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __mul__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, DiffFunction):
+ return ProdFunction(self, other)
+ elif isinstance(other, float) or isinstance(other, int):
+ return ScaleAddFunction(self, 0.0, other)
+ elif isinstance(other, np.ndarray):
+ return ScaleAddFunction(self, 0.0, other.item())
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __rmul__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ return self.__mul__(other)
+
+ def __pow__(self, other):
+ # type: (Union[float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, float) or isinstance(other, int):
+ return PwrFunction(self, other, scale=1.0)
+ elif isinstance(other, np.ndarray):
+ return PwrFunction(self, other.item(), scale=1.0)
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __div__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, DiffFunction):
+ return DivFunction(self, other)
+ elif isinstance(other, float) or isinstance(other, int):
+ return ScaleAddFunction(self, 0.0, 1.0 / other)
+ elif isinstance(other, np.ndarray):
+ return ScaleAddFunction(self, 0.0, 1.0 / other.item())
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __truediv__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ return self.__div__(other)
+
+ def __rdiv__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ if isinstance(other, DiffFunction):
+ return DivFunction(other, self)
+ elif isinstance(other, float) or isinstance(other, int):
+ return PwrFunction(self, -1.0, scale=other)
+ elif isinstance(other, np.ndarray):
+ return PwrFunction(self, -1.0, scale=other.item())
+ else:
+ raise NotImplementedError('Unknown type %s' % type(other))
+
+ def __rtruediv__(self, other):
+ # type: (Union[DiffFunction, float, int, np.multiarray.ndarray]) -> DiffFunction
+ return self.__rdiv__(other)
+
+ def __neg__(self):
+ # type: () -> DiffFunction
+ return ScaleAddFunction(self, 0.0, -1.0)
+
+
+class InLinTransformFunction(DiffFunction):
+ """A DiffFunction where the input undergoes a linear transformation first.
+
+ This function computes f(Ax + B), where A and B are matrices.
+
+ Parameters
+ ----------
+ f1 : DiffFunction
+ the parent function.
+ amat : np.multiarray.ndarray
+ the input transform matrix.
+ bmat : np.multiarray.ndarray
+ the input shift matrix.
+ """
+ def __init__(self, f1, amat, bmat):
+ # type: (DiffFunction, np.multiarray.ndarray, np.multiarray.ndarray) -> None
+ if amat.shape[0] != f1.ndim or bmat.shape[0] != f1.ndim:
+ raise ValueError('amat/bmat number of rows must be %d' % f1.ndim)
+ if len(bmat.shape) != 1:
+ raise ValueError('bmat must be 1 dimension.')
+
+ # domain of f(Ax+B) cannot be represented by input ranges.
+ super(InLinTransformFunction, self).__init__([(None, None)] * amat.shape[1],
+ delta_list=None)
+ self._f1 = f1
+ self._amat = amat
+ self._bmat = bmat.reshape(-1, 1)
+
+ def _get_arg(self, xi):
+ xi = np.asarray(xi)
+ xi_shape = xi.shape
+ my_ndim = self.ndim
+ if xi_shape[-1] != my_ndim:
+ raise ValueError('Last dimension must have size %d' % my_ndim)
+
+ xi = xi.reshape(-1, my_ndim)
+ return (self._amat.dot(xi.T) + self._bmat).T, xi_shape
+
+ def __call__(self, xi):
+ farg, xi_shape = self._get_arg(xi)
+ result = self._f1(farg)
+ if np.isscalar(result):
+ return result
+ return result.reshape(xi_shape[:-1])
+
+ def deriv(self, xi, j):
+ jmat = self.jacobian(xi)
+ return jmat[..., 0, j]
+
+ def jacobian(self, xi):
+ farg, xi_shape = self._get_arg(xi)
+ jmat = self._f1.jacobian(farg).dot(self._amat)
+ shape_trunc = xi_shape[:-1] # type: Tuple[int, ...]
+ return jmat.reshape(shape_trunc + (1, self.ndim))
+
+
+class ScaleAddFunction(DiffFunction):
+ """A DiffFunction multiply by a scalar then added to a scalar.
+
+ Parameters
+ ----------
+ f1 : DiffFunction
+ the first function.
+ adder : float
+ constant to add.
+ scaler : float
+ constant to multiply.
+ """
+ def __init__(self, f1, adder, scaler):
+ # type: (DiffFunction, float, float) -> None
+ DiffFunction.__init__(self, f1.input_ranges, delta_list=None)
+ self._f1 = f1
+ self._adder = adder
+ self._scaler = scaler
+
+ def __call__(self, xi):
+ return self._f1(xi) * self._scaler + self._adder
+
+ def deriv(self, xi, j):
+ return self._f1.deriv(xi, j) * self._scaler
+
+ def jacobian(self, xi):
+ return self._f1.jacobian(xi) * self._scaler
+
+
+def _intersection(*args):
+ input_ranges = []
+ for bound_list in zip(*args):
+ lmax, umin = None, None
+ for l, u in bound_list:
+ if l is None:
+ lmax, umin = None, None
+ break
+ else:
+ if lmax is None:
+ lmax, umin = l, u
+ else:
+ lmax = max(l, lmax)
+ umin = min(u, umin)
+
+ input_ranges.append((lmax, umin))
+
+ return input_ranges
+
+
+class SumDiffFunction(DiffFunction):
+ """Sum or Difference of two DiffFunctions
+
+ Parameters
+ ----------
+ f1 : DiffFunction
+ the first function.
+ f2 : DiffFunction
+ the second function.
+ f2_sgn : float
+ 1 if adding, -1 if subtracting.
+ """
+ def __init__(self, f1, f2, f2_sgn=1.0):
+ # type: (DiffFunction, DiffFunction, float) -> None
+ if f1.ndim != f2.ndim:
+ raise ValueError('functions dimension mismatch.')
+
+ DiffFunction.__init__(self, _intersection(f1.input_ranges, f2.input_ranges),
+ delta_list=None)
+ self._f1 = f1
+ self._f2 = f2
+ self._f2_sgn = f2_sgn
+
+ def __call__(self, xi):
+ return self._f1(xi) + self._f2_sgn * self._f2(xi)
+
+ def deriv(self, xi, j):
+ return self._f1.deriv(xi, j) + self._f2_sgn * self._f2.deriv(xi, j)
+
+ def jacobian(self, xi):
+ return self._f1.jacobian(xi) + self._f2_sgn * self._f2.jacobian(xi)
+
+
+class ProdFunction(DiffFunction):
+ """product of two DiffFunctions
+
+ Parameters
+ ----------
+ f1 : DiffFunction
+ the first function.
+ f2 : DiffFunction
+ the second function.
+ """
+ def __init__(self, f1, f2):
+ # type: (DiffFunction, DiffFunction) -> None
+ if f1.ndim != f2.ndim:
+ raise ValueError('functions dimension mismatch.')
+
+ DiffFunction.__init__(self, _intersection(f1.input_ranges, f2.input_ranges),
+ delta_list=None)
+ self._f1 = f1
+ self._f2 = f2
+
+ def __call__(self, xi):
+ return self._f1(xi) * self._f2(xi)
+
+ def deriv(self, xi, j):
+ return self._f1.deriv(xi, j) * self._f2(xi) + self._f1(xi) * self._f2.deriv(xi, j)
+
+ def jacobian(self, xi):
+ f1_val = self._f1(xi)[..., np.newaxis]
+ f2_val = self._f2(xi)[..., np.newaxis]
+ f1_jac = self._f1.jacobian(xi)
+ f2_jac = self._f2.jacobian(xi)
+ return f1_jac * f2_val + f1_val * f2_jac
+
+
+class DivFunction(DiffFunction):
+ """division of two DiffFunctions
+
+ Parameters
+ ----------
+ f1 : DiffFunction
+ the first function.
+ f2 : DiffFunction
+ the second function.
+ """
+ def __init__(self, f1, f2):
+ # type: (DiffFunction, DiffFunction) -> None
+ if f1.ndim != f2.ndim:
+ raise ValueError('functions dimension mismatch.')
+
+ DiffFunction.__init__(self, _intersection(f1.input_ranges, f2.input_ranges),
+ delta_list=None)
+ self._f1 = f1
+ self._f2 = f2
+
+ def __call__(self, xi):
+ return self._f1(xi) / self._f2(xi)
+
+ def deriv(self, xi, j):
+ f2_val = self._f2(xi)
+ return self._f1.deriv(xi, j) / f2_val - (self._f1(xi) * self._f2.deriv(xi, j) / (f2_val**2))
+
+ def jacobian(self, xi):
+ f1_val = self._f1(xi)[..., np.newaxis]
+ f2_val = self._f2(xi)[..., np.newaxis]
+ f1_jac = self._f1.jacobian(xi)
+ f2_jac = self._f2.jacobian(xi)
+
+ return f1_jac / f2_val - (f1_val * f2_jac) / (f2_val**2)
+
+
+class PwrFunction(DiffFunction):
+ """a DiffFunction raised to a power.
+
+ Parameters
+ ----------
+ f : DiffFunction
+ the DiffFunction.
+ pwr : float
+ the power.
+ scale : float
+ scaling factor. Used to implement a / x.
+ """
+ def __init__(self, f, pwr, scale=1.0):
+ # type: (DiffFunction, float, float) -> None
+ DiffFunction.__init__(self, f.input_ranges, delta_list=None)
+ self._f = f
+ self._pwr = pwr
+ self._scale = scale
+
+ def __call__(self, xi):
+ return (self._f(xi) ** self._pwr) * self._scale
+
+ def deriv(self, xi, j):
+ return (self._f(xi) ** (self._pwr - 1) * self._pwr * self._f.deriv(xi, j)) * self._scale
+
+ def jacobian(self, xi):
+ f_val = self._f(xi)[..., np.newaxis]
+ f_jac = self._f.jacobian(xi)
+ return (f_jac * (f_val ** (self._pwr - 1) * self._pwr)) * self._scale
+
+
+class VectorDiffFunction(object):
+ """A differentiable vector function.
+
+ Parameters
+ ----------
+ fun_list : List[DiffFunction]
+ list of interpolator functions, one for each element of the output vector.
+ """
+
+ def __init__(self, fun_list):
+ # type: (List[DiffFunction]) -> None
+ # error checking
+ if not fun_list:
+ raise ValueError('No interpolators are given.')
+
+ self._input_ranges = _intersection(*(f.input_ranges for f in fun_list))
+
+ self._in_dim = fun_list[0].ndim
+ for fun in fun_list:
+ if fun.ndim != self._in_dim:
+ raise ValueError('Interpolators input dimension mismatch.')
+
+ self._fun_list = fun_list
+ self._out_dim = len(fun_list)
+
+ @property
+ def in_dim(self):
+ # type: () -> int
+ """Input dimension number."""
+ return self._in_dim
+
+ @property
+ def out_dim(self):
+ # type: () -> int
+ """Output dimension number."""
+ return self._out_dim
+
+ def get_input_range(self, idx):
+ # type: (int) -> Tuple[Optional[float], Optional[float]]
+ """Returns the input range of the given dimension."""
+ return self._input_ranges[idx]
+
+ def __call__(self, xi):
+ """Returns the output vector at the given coordinates.
+
+ Parameters
+ ----------
+ xi : array-like
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : numpy.array
+ The interpolated values at the given coordinates.
+ """
+ xi = np.asarray(xi, dtype=float)
+ shape_trunc = xi.shape[:-1] # type: Tuple[int, ...]
+ ans = np.empty(shape_trunc + (self._out_dim, ))
+ for idx in range(self._out_dim):
+ ans[..., idx] = self._fun_list[idx](xi)
+ return ans
+
+ def jacobian(self, xi):
+ """Calculate the Jacobian matrices of this function at the given coordinates.
+
+ Parameters
+ ----------
+ xi : array-like
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : numpy.array
+ The jacobian matrix at the given coordinates.
+ """
+ xi = np.asarray(xi, dtype=float)
+ shape_trunc = xi.shape[:-1] # type: Tuple[int, ...]
+ ans = np.empty(shape_trunc + (self._out_dim, self._in_dim))
+ for m in range(self._out_dim):
+ ans[..., m, :] = self._fun_list[m].jacobian(xi)
+ return ans
+
+ def deriv(self, xi, i, j):
+ """Compute the derivative of output i with respect to input j
+
+ Parameters
+ ----------
+ xi : array-like
+ The coordinates to evaluate, with shape (..., ndim)
+ i : int
+ output index.
+ j : int
+ input index.
+
+ Returns
+ -------
+ val : numpy.array
+ The derivatives at the given coordinates.
+ """
+ return self._fun_list[i].deriv(xi, j)
diff --git a/src/bag/math/interpolate.py b/src/bag/math/interpolate.py
new file mode 100644
index 0000000..134e78f
--- /dev/null
+++ b/src/bag/math/interpolate.py
@@ -0,0 +1,608 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various interpolation classes.
+"""
+
+from typing import List, Tuple, Union, Sequence
+
+import numpy as np
+import scipy.interpolate as interp
+import scipy.ndimage.interpolation as imag_interp
+
+from ..math.dfun import DiffFunction
+
+__author__ = 'erichang'
+__all__ = ['interpolate_grid', 'LinearInterpolator']
+
+
+def _scales_to_points(scale_list, # type: List[Tuple[float, float]]
+ values, # type: np.multiarray.ndarray
+ delta=1e-4, # type: float
+ ):
+ # type: (...) -> Tuple[List[np.multiarray.ndarray], List[float]]
+ """convert scale_list to list of point values and finite difference deltas."""
+
+ ndim = len(values.shape)
+ # error checking
+ if ndim == 1:
+ raise ValueError('This class only works for dimension >= 2.')
+ elif ndim != len(scale_list):
+ raise ValueError('input and output dimension mismatch.')
+
+ points = []
+ delta_list = []
+ for idx in range(ndim):
+ num_pts = values.shape[idx] # type: int
+ if num_pts < 2:
+ raise ValueError('Every dimension must have at least 2 points.')
+ offset, scale = scale_list[idx]
+ points.append(np.linspace(offset, (num_pts - 1) * scale + offset, num_pts))
+ delta_list.append(scale * delta)
+
+ return points, delta_list
+
+
+def interpolate_grid(scale_list, # type: List[Tuple[float, float]]
+ values, # type: np.multiarray.ndarray
+ method='spline', # type: str
+ extrapolate=False, # type: bool
+ delta=1e-4, # type: float
+ num_extrapolate=3, # type: int
+ ):
+ # type: (...) -> DiffFunction
+ """Interpolates multidimensional data on a regular grid.
+
+ returns an Interpolator for the given dataset.
+
+ Parameters
+ ----------
+ scale_list : List[Tuple[float, float]]
+ a list of (offset, spacing).
+ values : np.multiarray.ndarray
+ The output data in N dimensions. The length in each dimension must
+ be at least 2.
+ method : str
+ The interpolation method. Either 'linear', or 'spline'.
+ Defaults to 'spline'.
+ extrapolate : bool
+ True to extrapolate data output of given bounds. Defaults to False.
+ delta : float
+ the finite difference step size. Finite difference is only used for
+ linear interpolation and spline interpolation on 3D data or greater.
+ Defaults to 1e-4 of the grid spacing.
+ num_extrapolate: int
+ If spline interpolation is selected on 3D data or greater, we linearly
+ extrapolate the given data by this many points to fix behavior near
+ input boundaries.
+
+ Returns
+ -------
+ fun : DiffFunction
+ the interpolator function.
+ """
+ ndim = len(values.shape)
+ if method == 'linear':
+ points, delta_list = _scales_to_points(scale_list, values, delta)
+ return LinearInterpolator(points, values, delta_list, extrapolate=extrapolate)
+ elif ndim == 1:
+ return Interpolator1D(scale_list, values, method=method, extrapolate=extrapolate)
+ elif method == 'spline':
+ if ndim == 2:
+ return Spline2D(scale_list, values, extrapolate=extrapolate)
+ else:
+ return MapCoordinateSpline(scale_list, values, delta=delta, extrapolate=extrapolate,
+ num_extrapolate=num_extrapolate)
+ else:
+ raise ValueError('Unsupported interpolation method: %s' % method)
+
+
+class LinearInterpolator(DiffFunction):
+ """A linear interpolator on a regular grid for arbitrary dimensions.
+
+ This class is backed by scipy.interpolate.RegularGridInterpolator.
+ Derivatives are calculated using finite difference.
+
+ Parameters
+ ----------
+ points : Sequence[np.multiarray.ndarray]
+ list of points of each dimension.
+ values : np.multiarray.ndarray
+ The output data in N dimensions.
+ delta_list : List[float]
+ list of finite difference step size for each axis.
+ extrapolate : bool
+ True to extrapolate data output of given bounds. Defaults to False.
+ """
+
+ def __init__(self, points, values, delta_list, extrapolate=False):
+ # type: (Sequence[np.multiarray.ndarray], np.multiarray.ndarray, List[float], bool) -> None
+ input_range = [(pvec[0], pvec[-1]) for pvec in points]
+ DiffFunction.__init__(self, input_range, delta_list=delta_list)
+ self._points = points
+ self._extrapolate = extrapolate
+ self.fun = interp.RegularGridInterpolator(points, values, method='linear',
+ bounds_error=not extrapolate,
+ fill_value=None)
+
+ def get_input_points(self, idx):
+ # type: (int) -> np.multiarray.ndarray
+ """Returns the input points for the given dimension."""
+ return self._points[idx]
+
+ def __call__(self, xi):
+ """Interpolate at the given coordinate.
+
+ Parameters
+ ----------
+ xi : numpy.array
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : numpy.array
+ The interpolated values at the given coordinates.
+ """
+ ans = self.fun(xi)
+ if ans.size == 1:
+ return ans[0]
+ return ans
+
+ def integrate(self, xstart, xstop, axis=-1, logx=False, logy=False, raw=False):
+ # type: (float, float, int, bool, bool, bool) -> Union[LinearInterpolator, np.ndarray]
+ """Integrate away the given axis.
+
+ if logx/logy is True, that means this LinearInterpolator is actually used
+ to do linear interpolation on the logarithm of the actual data. This method
+ will returns the integral of the actual data.
+
+ Parameters
+ ----------
+ xstart : float
+ the X start value.
+ xstop : float
+ the X stop value.
+ axis : int
+ the axis of integration.
+ If unspecified, this will be the last axis.
+ logx : bool
+ True if the values on the given axis are actually the logarithm of
+ the real values.
+ logy : bool
+ True if the Y values are actually the logarithm of the real values.
+ raw : bool
+ True to return the raw data points instead of a LinearInterpolator object.
+
+ Returns
+ -------
+ result : Union[LinearInterpolator, np.ndarray]
+ float if this interpolator has only 1 dimension, otherwise a new
+ LinearInterpolator is returned.
+ """
+ if self.delta_list is None:
+ raise ValueError("Finite differences must be enabled")
+
+ if logx != logy:
+ raise ValueError('Currently only works for linear or log-log relationship.')
+
+ ndim = self.ndim
+ if axis < 0:
+ axis = ndim - 1
+ if axis < 0 or axis >= ndim:
+ raise IndexError('index out of range.')
+
+ if len(self._points) < ndim:
+ raise ValueError("len(self._points) != ndim")
+
+ def calculate_integ_x() -> np.ndarray:
+ # find data points between xstart and xstop
+ vec_inner = self._points[axis]
+ start_idx, stop_idx = np.searchsorted(vec_inner, [xstart, xstop])
+
+ cur_len = stop_idx - start_idx
+ if vec_inner[start_idx] > xstart:
+ cur_len += 1
+ istart = 1
+ else:
+ istart = 0
+ if vec_inner[stop_idx - 1] < xstop:
+ cur_len += 1
+ istop = cur_len - 1
+ else:
+ istop = cur_len
+
+ integ_x_inner = np.empty(cur_len)
+ integ_x_inner[istart:istop] = vec_inner[start_idx:stop_idx]
+ if istart != 0:
+ integ_x_inner[0] = xstart
+
+ if istop != cur_len:
+ integ_x_inner[cur_len - 1] = xstop
+
+ return integ_x_inner
+
+ # get all input sample points we need to integrate.
+ plist = []
+ integ_x = calculate_integ_x() # type: np.ndarray
+ new_points = []
+ new_deltas = []
+ for axis_idx, vec in enumerate(self._points):
+ if axis == axis_idx:
+ plist.append(integ_x)
+ else:
+ plist.append(vec)
+ new_points.append(vec)
+ new_deltas.append(self.delta_list[axis_idx])
+
+ fun_arg = np.stack(np.meshgrid(*plist, indexing='ij'), axis=-1)
+ values = self.fun(fun_arg)
+
+ if logx:
+ if axis != ndim - 1:
+ # transpose values so that broadcasting/slicing is easier
+ new_order = [idx for idx in range(ndim) if idx != axis]
+ new_order.append(axis)
+ values = np.transpose(values, axes=new_order)
+
+ # integrate given that log-log plot is piece-wise linear
+ ly1 = values[..., :-1]
+ ly2 = values[..., 1:]
+ lx1 = np.broadcast_to(integ_x[:-1], ly1.shape)
+ lx2 = np.broadcast_to(integ_x[1:], ly1.shape)
+ m = (ly2 - ly1) / (lx2 - lx1)
+
+ x1 = np.exp(lx1)
+ y1 = np.exp(ly1)
+
+ log_idx = np.abs(m + 1) < 1e-6
+ log_idxb = np.invert(log_idx)
+ area = np.empty(m.shape)
+ area[log_idx] = (y1[log_idx] / np.power(x1[log_idx], m[log_idx]) * (lx2[log_idx] -
+ lx1[log_idx]))
+
+ mp1 = m[log_idxb] + 1
+ x2 = np.exp(lx2[log_idxb])
+ x1 = x1[log_idxb]
+ area[log_idxb] = y1[log_idxb] / mp1 * (np.power(x2 / x1, m[log_idxb]) * x2 - x1)
+ new_values = np.sum(area, axis=-1) # type: np.multiarray.ndarray
+ else:
+ # just use trapezoid integration
+ # noinspection PyTypeChecker
+ new_values = np.trapz(values, x=integ_x, axis=axis)
+
+ if not raw and new_points:
+ return LinearInterpolator(new_points, new_values, new_deltas,
+ extrapolate=self._extrapolate)
+ else:
+ return new_values
+
+
+class Interpolator1D(DiffFunction):
+ """An interpolator on a regular grid for 1 dimensional data.
+
+ This class is backed by scipy.interpolate.InterpolatedUnivariateSpline.
+
+ Parameters
+ ----------
+ scale_list : list[(float, float)]
+ a list of (offset, spacing) for each input dimension.
+ values : numpy.array
+ The output data. Must be 1 dimension.
+ method : str
+ extrapolation method. Either 'linear' or 'spline'. Defaults to spline.
+ extrapolate : bool
+ True to extrapolate data output of given bounds. Defaults to False.
+ """
+
+ def __init__(self, scale_list, values, method='spline', extrapolate=False):
+ # error checking
+ if len(values.shape) != 1:
+ raise ValueError('This class only works for 1D data.')
+ elif len(scale_list) != 1:
+ raise ValueError('input and output dimension mismatch.')
+
+ if method == 'linear':
+ k = 1
+ elif method == 'spline':
+ k = 3
+ else:
+ raise ValueError('Unsuppoorted interpolation method: %s' % method)
+
+ offset, scale = scale_list[0]
+ num_pts = values.shape[0]
+ points = np.linspace(offset, (num_pts - 1) * scale + offset,
+ num_pts) # type: np.multiarray.ndarray
+
+ DiffFunction.__init__(self, [(points[0], points[-1])], delta_list=None)
+
+ ext = 0 if extrapolate else 2
+ self.fun = interp.InterpolatedUnivariateSpline(points, values, k=k, ext=ext)
+
+ def __call__(self, xi):
+ """Interpolate at the given coordinate.
+
+ Parameters
+ ----------
+ xi : numpy.array
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : numpy.array
+ The interpolated values at the given coordinates.
+ """
+ ans = self.fun(xi)
+ if ans.size == 1:
+ return ans.item()
+ return ans
+
+ def deriv(self, xi, idx):
+ """Calculate the derivative of the spline along the given index.
+
+ Parameters
+ ----------
+ xi : numpy.array
+ The coordinates to evaluate, with shape (..., ndim)
+ idx : int
+ The index to calculate the derivative on.
+
+ Returns
+ -------
+ val : numpy.array
+ The derivatives at the given coordinates.
+ """
+ if idx != 0:
+ raise ValueError('Invalid derivative index: %d' % idx)
+
+ ans = self.fun(xi, 1)
+ if ans.size == 1:
+ return ans[0]
+ return ans
+
+
+class Spline2D(DiffFunction):
+ """A spline interpolator on a regular grid for 2D data.
+
+ This class is backed by scipy.interpolate.RectBivariateSpline.
+
+ Parameters
+ ----------
+ scale_list : list[(float, float)]
+ a list of (offset, spacing) for each input dimension.
+ values : numpy.array
+ The output data. Must be 2D.
+ extrapolate : bool
+ True to extrapolate data output of given bounds. Defaults to False.
+ """
+
+ def __init__(self, scale_list, values, extrapolate=False):
+ # error checking
+ if len(values.shape) != 2:
+ raise ValueError('This class only works for 2D data.')
+ elif len(scale_list) != 2:
+ raise ValueError('input and output dimension mismatch.')
+
+ nx, ny = values.shape
+ offset, scale = scale_list[0]
+ x = np.linspace(offset, (nx - 1) * scale + offset, nx) # type: np.multiarray.ndarray
+ offset, scale = scale_list[1]
+ y = np.linspace(offset, (ny - 1) * scale + offset, ny) # type: np.multiarray.ndarray
+
+ self._min = x[0], y[0]
+ self._max = x[-1], y[-1]
+
+ DiffFunction.__init__(self, [(x[0], x[-1]), (y[0], y[-1])], delta_list=None)
+
+ self.fun = interp.RectBivariateSpline(x, y, values)
+ self._extrapolate = extrapolate
+
+ def _get_xy(self, xi):
+ """Get X and Y array from given coordinates."""
+ xi = np.asarray(xi, dtype=float)
+ if xi.shape[-1] != 2:
+ raise ValueError("The requested sample points xi have dimension %d, "
+ "but this interpolator has dimension 2" % (xi.shape[-1]))
+
+ # check input within bounds.
+ x = xi[..., 0] # type: np.multiarray.ndarray
+ y = xi[..., 1] # type: np.multiarray.ndarray
+ if not self._extrapolate and not np.all((self._min[0] <= x) & (x <= self._max[0]) &
+ (self._min[1] <= y) & (y <= self._max[1])):
+ raise ValueError('some inputs are out of bounds.')
+
+ return x, y
+
+ def __call__(self, xi):
+ """Interpolate at the given coordinates.
+
+ Parameters
+ ----------
+ xi : numpy.array
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : numpy.array
+ The interpolated values at the given coordinates.
+ """
+ x, y = self._get_xy(xi)
+ return self.fun(x, y, grid=False)
+
+ def deriv(self, xi, idx):
+ """Calculate the derivative of the spline along the given index.
+
+ Parameters
+ ----------
+ xi : numpy.array
+ The coordinates to evaluate, with shape (..., ndim)
+ idx : int
+ The index to calculate the derivative on.
+
+ Returns
+ -------
+ val : numpy.array
+ The derivatives at the given coordinates.
+ """
+ if idx < 0 or idx > 1:
+ raise ValueError('Invalid derivative index: %d' % idx)
+
+ x, y = self._get_xy(xi)
+ if idx == 0:
+ return self.fun(x, y, dx=1, grid=False)
+ else:
+ return self.fun(x, y, dy=1, grid=False)
+
+
+class MapCoordinateSpline(DiffFunction):
+ """A spline interpolator on a regular grid for multidimensional data.
+
+ The spline interpolation is done using map_coordinate method in the
+ scipy.ndimage.interpolation package. The derivative is done using
+ finite difference.
+
+ if extrapolate is True, we use linear interpolation for values outside of
+ bounds.
+
+ Note: By default, map_coordinate uses the nearest value for all points
+ outside the boundary. This will cause undesired interpolation
+ behavior near boundary points. To solve this, we linearly
+ extrapolates the given data for a fixed number of points.
+
+ Parameters
+ ----------
+ scale_list : list[(float, float)]
+ a list of (offset, spacing) for each input dimension.
+ values : numpy.array
+ The output data.
+ extrapolate : bool
+ True to linearly extrapolate outside of bounds.
+ num_extrapolate : int
+ number of points to extrapolate in each dimension in each direction.
+ delta : float
+ the finite difference step size. Defaults to 1e-4 (relative to a spacing of 1).
+ """
+
+ def __init__(self, scale_list, values, extrapolate=False, num_extrapolate=3,
+ delta=1e-4):
+ shape = values.shape
+ ndim = len(shape)
+
+ # error checking
+ if ndim < 3:
+ raise ValueError('Data must have 3 or more dimensions.')
+ elif ndim != len(scale_list):
+ raise ValueError('input and output dimension mismatch.')
+
+ self._scale_list = scale_list
+ self._max = [n - 1 + num_extrapolate for n in shape]
+ self._extrapolate = extrapolate
+ self._ext = num_extrapolate
+
+ # linearly extrapolate given values
+ ext_points = [np.arange(num_extrapolate, n + num_extrapolate) for n in shape]
+ points, delta_list = _scales_to_points(scale_list, values, delta)
+ input_ranges = [(pvec[0], pvec[-1]) for pvec in points]
+ self._extfun = LinearInterpolator(ext_points, values, [delta] * ndim, extrapolate=True)
+
+ xi_ext = np.stack(np.meshgrid(*(np.arange(0, n + 2 * num_extrapolate) for n in shape),
+ indexing='ij', copy=False), axis=-1)
+
+ values_ext = self._extfun(xi_ext)
+ self._filt_values = imag_interp.spline_filter(values_ext)
+
+ DiffFunction.__init__(self, input_ranges, delta_list=delta_list)
+
+ def _normalize_inputs(self, xi):
+ """Normalize the inputs."""
+ xi = np.asarray(xi, dtype=float)
+ if xi.shape[-1] != self.ndim:
+ raise ValueError("The requested sample points xi have dimension %d, "
+ "but this interpolator has dimension %d" % (xi.shape[-1], self.ndim))
+
+ xi = np.atleast_2d(xi.copy())
+ for idx, (offset, scale) in enumerate(self._scale_list):
+ xi[..., idx] -= offset
+ xi[..., idx] /= scale
+
+ # take extension input account.
+ xi += self._ext
+
+ return xi
+
+ def __call__(self, xi):
+ """Interpolate at the given coordinate.
+
+ Parameters
+ ----------
+ xi : numpy.array
+ The coordinates to evaluate, with shape (..., ndim)
+
+ Returns
+ -------
+ val : numpy.array
+ The interpolated values at the given coordinates.
+ """
+ ext = self._ext
+ ndim = self.ndim
+ xi = self._normalize_inputs(xi)
+ ans_shape = xi.shape[:-1]
+ xi = xi.reshape(-1, ndim)
+
+ ext_idx_vec = False
+ for idx in range(self.ndim):
+ ext_idx_vec = ext_idx_vec | (xi[:, idx] < ext) | (xi[:, idx] > self._max[idx])
+
+ int_idx_vec = ~ext_idx_vec
+ xi_ext = xi[ext_idx_vec, :]
+ xi_int = xi[int_idx_vec, :]
+ ans = np.empty(xi.shape[0])
+ ans[int_idx_vec] = imag_interp.map_coordinates(self._filt_values, xi_int.T,
+ mode='nearest', prefilter=False)
+ if xi_ext.size > 0:
+ if not self._extrapolate:
+ raise ValueError('some inputs are out of bounds.')
+ ans[ext_idx_vec] = self._extfun(xi_ext)
+
+ if ans.size == 1:
+ return ans[0]
+ return ans.reshape(ans_shape)
diff --git a/src/bag/mdao/__init__.py b/src/bag/mdao/__init__.py
new file mode 100644
index 0000000..c31dc62
--- /dev/null
+++ b/src/bag/mdao/__init__.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package contains various openmdao related modules.
+"""
\ No newline at end of file
diff --git a/src/bag/mdao/components.py b/src/bag/mdao/components.py
new file mode 100644
index 0000000..1bf8925
--- /dev/null
+++ b/src/bag/mdao/components.py
@@ -0,0 +1,190 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various OpenMDAO component classes.
+"""
+
+import numpy as np
+import openmdao.api as omdao
+
+
+class VecFunComponent(omdao.Component):
+ """A component based on a list of functions.
+
+ A component that evaluates multiple functions on the given inputs, then
+ returns the result as an 1D array. Each of the inputs may be a scalar or
+ a vector with the same size as the output. If a vector input is given,
+ each function will use a different element of the vector.
+
+ Parameters
+ ----------
+ output_name : str
+ output name.
+ fun_list : list[bag.math.dfun.DiffFunction]
+ list of interpolator functions, one for each dimension.
+ params : list[str]
+ list of parameter names. Parameter names may repeat, in which case the
+ same parameter will be used for multiple arguments of the function.
+ vector_params : set[str]
+ set of parameters that are vector instead of scalar. If a parameter
+ is a vector, it will be the same size as the output, and each function
+ only takes in the corresponding element of the parameter.
+ """
+
+ def __init__(self, output_name, fun_list, params,
+ vector_params=None):
+ omdao.Component.__init__(self)
+
+ vector_params = vector_params or set()
+
+ self._output = output_name
+ self._out_dim = len(fun_list)
+ self._in_dim = len(params)
+ self._params = params
+ self._unique_params = {}
+ self._fun_list = fun_list
+
+ for par in params:
+ adj = par in vector_params
+ shape = self._out_dim if adj else 1
+
+ if par not in self._unique_params:
+ # linear check, but small list so should be fine.
+ self.add_param(par, val=np.zeros(shape))
+ self._unique_params[par] = len(self._unique_params), adj
+
+ # construct chain rule jacobian matrix
+ self._chain_jacobian = np.zeros((self._in_dim, len(self._unique_params)))
+ for idx, par in enumerate(params):
+ self._chain_jacobian[idx, self._unique_params[par][0]] = 1
+
+ self.add_output(output_name, val=np.zeros(self._out_dim))
+
+ def __call__(self, **kwargs):
+ """Evaluate on the given inputs.
+
+ Parameters
+ ----------
+ kwargs : dict[str, np.array or float]
+ the inputs as a dictionary.
+
+ Returns
+ -------
+ out : np.array
+ the output array.
+ """
+ tmp = {}
+ self.solve_nonlinear(kwargs, tmp)
+ return tmp[self._output]
+
+ def _get_inputs(self, params):
+ """Given parameter values, construct inputs for functions.
+
+ Parameters
+ ----------
+ params : VecWrapper, optional
+ VecWrapper containing parameters. (p)
+
+ Returns
+ -------
+ ans : list[list[float]]
+ input lists.
+ """
+ ans = np.empty((self._out_dim, self._in_dim))
+ for idx, name in enumerate(self._params):
+ ans[:, idx] = params[name]
+ return ans
+
+ def solve_nonlinear(self, params, unknowns, resids=None):
+ """Compute the output parameter.
+
+ Parameters
+ ----------
+ params : VecWrapper, optional
+ VecWrapper containing parameters. (p)
+
+ unknowns : VecWrapper, optional
+ VecWrapper containing outputs and states. (u)
+
+ resids : VecWrapper, optional
+ VecWrapper containing residuals. (r)
+ """
+ xi_mat = self._get_inputs(params)
+
+ tmp = np.empty(self._out_dim)
+ for idx in range(self._out_dim):
+ tmp[idx] = self._fun_list[idx](xi_mat[idx, :])
+
+ unknowns[self._output] = tmp
+
+ def linearize(self, params, unknowns=None, resids=None):
+ """Compute the Jacobian of the parameter.
+
+ Parameters
+ ----------
+ params : VecWrapper, optional
+ VecWrapper containing parameters. (p)
+
+ unknowns : VecWrapper, optional
+ VecWrapper containing outputs and states. (u)
+
+ resids : VecWrapper, optional
+ VecWrapper containing residuals. (r)
+ """
+ # print('rank {} computing jac for {}'.format(self.comm.rank, self._outputs))
+
+ xi_mat = self._get_inputs(params)
+
+ jf = np.empty((self._out_dim, self._in_dim))
+ for k, fun in enumerate(self._fun_list):
+ jf[k, :] = fun.jacobian(xi_mat[k, :])
+
+ jmat = np.dot(jf, self._chain_jacobian)
+ jdict = {}
+ for par, (pidx, adj) in self._unique_params.items():
+ tmp = jmat[:, pidx]
+ if adj:
+ tmp = np.diag(tmp)
+ jdict[self._output, par] = tmp
+
+ return jdict
diff --git a/src/bag/mdao/core.py b/src/bag/mdao/core.py
new file mode 100644
index 0000000..5d89ff6
--- /dev/null
+++ b/src/bag/mdao/core.py
@@ -0,0 +1,313 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines core BAG openmdao classes."""
+
+import numpy as np
+import networkx as nx
+import openmdao.api as omdao
+
+import bag.util.parse
+
+from .components import VecFunComponent
+
+
+class GroupBuilder(object):
+ """A class that builds new OpenMDAO groups.
+
+ This class provides a simple interface to define new variables as function of
+ other variables, and it tracks the variable dependencies using a directed
+ acyclic graph.
+
+ """
+
+ def __init__(self):
+ self._g = nx.DiGraph()
+ self._input_vars = set()
+
+ def _add_node(self, name, ndim, **kwargs):
+ """Helper method to add a node and keep track of input variables."""
+ self._g.add_node(name, ndim=ndim, **kwargs)
+ self._input_vars.add(name)
+
+ def _add_edge(self, parent, child):
+ """Helper method to add an edge and update input variables."""
+ self._g.add_edge(parent, child)
+ try:
+ self._input_vars.remove(child)
+ except KeyError:
+ pass
+
+ def get_inputs(self):
+ """Returns a set of current input variable names.
+
+ Returns
+ -------
+ input_vars : set[str]
+ a set of input variable names.
+ """
+ return self._input_vars.copy()
+
+ def get_variables(self):
+ """Returns a list of variables.
+
+ Returns
+ -------
+ var_list : list[str]
+ a list of variables.
+ """
+ return list(self._g.nodes_iter())
+
+ def get_variable_info(self, name):
+ """Returns the range and dimension of the given variable.
+
+ Parameters
+ ----------
+ name : str
+ variable name.
+
+ Returns
+ -------
+ min : float
+ minimum value.
+ max : float
+ maximum value.
+ ndim : int
+ variable dimension.
+ """
+ nattr = self._g.node[name]
+ return nattr.copy()
+
+ def add_fun(self, var_name, fun_list, params, param_ranges, vector_params=None):
+ """Add a new variable defined by the given list of functions.
+
+ Parameters
+ ----------
+ var_name : str
+ variable name.
+ fun_list : list[bag.math.interpolate.Interpolator]
+ list of functions, one for each dimension.
+ params : list[str]
+ list of parameter names. Parameter names may repeat, in which case the
+ same parameter will be used for multiple arguments of the function.
+ param_ranges : dict[str, (float, float)]
+ a dictionary of parameter valid range.
+ vector_params : set[str]
+ set of parameters that are vector instead of scalar. If a parameter
+ is a vector, it will be the same size as the output, and each function
+ only takes in the corresponding element of the parameter.
+ """
+ vector_params = vector_params or set()
+ ndim = len(fun_list)
+
+ # error checking
+ for par in params:
+ if par not in param_ranges:
+ raise ValueError('Valid range of %s not specified.' % par)
+
+ # add inputs
+ for par, (par_min, par_max) in param_ranges.items():
+ par_dim = ndim if par in vector_params else 1
+ if par not in self._g:
+ # add input to graph if it's not in there.
+ self._add_node(par, par_dim)
+
+ nattrs = self._g.node[par]
+ if nattrs['ndim'] != par_dim:
+ # error checking.
+ raise ValueError('Variable %s has dimension mismatch.' % par)
+ # update input range
+ nattrs['min'] = max(par_min, nattrs.get('min', par_min))
+ nattrs['max'] = min(par_max, nattrs.get('max', par_max))
+
+ # add current variable
+ if var_name not in self._g:
+ self._add_node(var_name, ndim)
+
+ nattrs = self._g.node[var_name]
+ # error checking.
+ if nattrs['ndim'] != ndim:
+ raise ValueError('Variable %s has dimension mismatch.' % var_name)
+ if self._g.in_degree(var_name) > 0:
+ raise Exception('Variable %s already has other dependencies.' % var_name)
+
+ nattrs['fun_list'] = fun_list
+ nattrs['params'] = params
+ nattrs['vec_params'] = vector_params
+ for parent in param_ranges.keys():
+ self._add_edge(parent, var_name)
+
+ def add_var(self, variable, vmin, vmax, ndim=1):
+ """Adds a new independent variable.
+
+ Parameters
+ ----------
+ variable : str
+ the variable to add
+ vmin : float
+ the minimum allowable value.
+ vmax : float
+ the maximum allowable value.
+ ndim : int
+ the dimension of the variable. Defaults to 1.
+ """
+ if variable in self._g:
+ raise Exception('Variable %s already exists.' % variable)
+ self._add_node(variable, ndim, min=vmin, max=vmax)
+
+ def set_input_limit(self, var, equals=None, lower=None, upper=None):
+ """Sets the limit on the given input variable.
+
+ Parameters
+ ----------
+ var : str
+ name of the variable.
+ equals : float or None
+ if given, the equality value.
+ lower : float or None
+ if given, the minimum.
+ upper : float or None
+ if given, the maximum.
+ """
+ if var in self._g:
+ if self._g.in_degree(var) > 0:
+ raise Exception('Variable %s is not an input variable' % var)
+ nattr = self._g.node[var]
+ if equals is not None:
+ nattr['equals'] = equals
+ lower = upper = equals
+ print(var, lower, upper)
+ if lower is not None:
+ nattr['min'] = max(nattr.get('min', lower), lower)
+ if upper is not None:
+ nattr['max'] = min(nattr.get('max', upper), upper)
+ print(var, nattr['min'], nattr['max'])
+
+ def add_expr(self, eqn, ndim):
+ """Adds a new variable with the given expression.
+
+ Parameters
+ ----------
+ eqn : str
+ An equation of the form " = ", where var
+ is the output variable name, and expr is the expression.
+ All variables in expr must be already added.
+ ndim : int
+ the dimension of the output variable.
+ """
+ variable, expr = eqn.split('=', 1)
+ variable = variable.strip()
+ expr = expr.strip()
+
+ if variable not in self._g:
+ self._add_node(variable, ndim)
+ nattrs = self._g.node[variable]
+ if nattrs['ndim'] != ndim:
+ raise Exception('Dimension mismatch for %s' % variable)
+ if self._g.in_degree(variable) > 0:
+ raise Exception('%s already depends on other variables' % variable)
+
+ invars = bag.util.parse.get_variables(expr)
+ for parent in invars:
+ if parent not in self._g:
+ raise Exception('Variable %s is not defined.' % parent)
+ self._add_edge(parent, variable)
+
+ nattrs['expr'] = expr
+
+ def build(self, debug=False):
+ """Returns a OpenMDAO Group from the variable graph.
+
+ Parameters
+ ----------
+ debug : bool
+ True to print debug messages.
+
+ Returns
+ -------
+ grp : omdao.Group
+ the OpenMDAO group that computes all variables.
+ input_bounds : dict[str, any]
+ a dictionary from input variable name to (min, max, ndim) tuple.
+ """
+ input_bounds = {}
+ ndim_dict = {}
+
+ if not nx.is_directed_acyclic_graph(self._g):
+ raise Exception('Dependency loop detected')
+
+ grp = omdao.Group()
+ prom = ['*']
+ for var in nx.topological_sort(self._g):
+ nattrs = self._g.node[var]
+ ndim = nattrs['ndim']
+ ndim_dict[var] = ndim
+ if self._g.in_degree(var) == 0:
+ if debug:
+ # input variable
+ print('Input variable: %s' % var)
+ # range checking
+ vmin, vmax = nattrs['min'], nattrs['max']
+ veq = nattrs.get('equals', None)
+ if vmin > vmax:
+ raise Exception('Variable %s input range not valid.' % var)
+ input_bounds[var] = veq, vmin, vmax, ndim
+ else:
+ init_vals = {par: np.zeros(ndim_dict[par]) for par in self._g.predecessors_iter(var)}
+ comp_name = 'comp__%s' % var
+ if 'expr' in nattrs:
+ eqn = '{}={}'.format(var, nattrs['expr'])
+ init_vals[var] = np.zeros(ndim)
+ # noinspection PyTypeChecker
+ grp.add(comp_name, omdao.ExecComp(eqn, **init_vals), promotes=prom)
+ elif 'fun_list' in nattrs:
+ params = nattrs['params']
+ fun_list = nattrs['fun_list']
+ vec_params = nattrs['vec_params']
+ comp = VecFunComponent(var, fun_list, params, vector_params=vec_params)
+ # noinspection PyTypeChecker
+ grp.add(comp_name, comp, promotes=prom)
+ else:
+ raise Exception('Unknown attributes: {}'.format(nattrs))
+
+ return grp, input_bounds
diff --git a/src/bag/simulation/__init__.py b/src/bag/simulation/__init__.py
new file mode 100644
index 0000000..4d64194
--- /dev/null
+++ b/src/bag/simulation/__init__.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package defines various utility classes for running simulations and data post-processing.
+"""
\ No newline at end of file
diff --git a/src/bag/simulation/base.py b/src/bag/simulation/base.py
new file mode 100644
index 0000000..aaad9ca
--- /dev/null
+++ b/src/bag/simulation/base.py
@@ -0,0 +1,179 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""This module handles high level simulation routines.
+
+This module defines SimAccess, which provides methods to run simulations
+and retrieve results.
+"""
+
+from typing import Dict, Any, Tuple, Union, Sequence
+
+import abc
+from pathlib import Path
+
+from pybag.enum import DesignOutput
+from pybag.core import get_cdba_name_bits
+
+from ..concurrent.core import SubProcessManager, batch_async_task
+from .data import SimNetlistInfo, SimData
+
+
+def get_corner_temp(env_str: str) -> Tuple[str, int]:
+ idx = env_str.rfind('_')
+ if idx < 0:
+ raise ValueError(f'Invalid environment string: {env_str}')
+ return env_str[:idx], int(env_str[idx + 1:].replace('m', '-'))
+
+
+def setup_corner(corner_str: str, temp: int) -> str:
+ # Inverse of get_corner_temp
+ # Useful to setup strings that can properly be parsed by it
+ return corner_str + '_' + str(temp).replace('-', 'm')
+
+
+def get_bit_list(pin: Union[str, Sequence[str]]) -> Sequence[str]:
+ if isinstance(pin, str):
+ return get_cdba_name_bits(pin) if pin else []
+ else:
+ return [val for p_ in pin for val in get_cdba_name_bits(p_)]
+
+
+class SimAccess(abc.ABC):
+ """A class that interacts with a simulator.
+
+ Parameters
+ ----------
+ parent : str
+ parent directory for SimAccess.
+ sim_config : Dict[str, Any]
+ the simulation configuration dictionary.
+ """
+
+ def __init__(self, parent: str, sim_config: Dict[str, Any]) -> None:
+ self._config = sim_config
+ self._dir_path = (Path(parent) / "simulations").resolve()
+
+ @property
+ @abc.abstractmethod
+ def netlist_type(self) -> DesignOutput:
+ return DesignOutput.CDL
+
+ @abc.abstractmethod
+ def create_netlist(self, output_path: Path, sch_netlist: Path, info: SimNetlistInfo,
+ precision: int = 6) -> None:
+ pass
+
+ @abc.abstractmethod
+ def get_sim_file(self, dir_path: Path, sim_tag: str) -> Path:
+ """Returns path to the simulation file."""
+ pass
+
+ @abc.abstractmethod
+ def load_sim_data(self, dir_path: Path, sim_tag: str) -> SimData:
+ """Load simulation results.
+
+ Parameters
+ ----------
+ dir_path : Path
+ the working directory path.
+ sim_tag : str
+ optional simulation name. Empty for default.
+
+ Returns
+ -------
+ data : Dict[str, Any]
+ the simulation data dictionary.
+ """
+ pass
+
+ @abc.abstractmethod
+ async def async_run_simulation(self, netlist: Path, sim_tag: str) -> None:
+ """A coroutine for simulation a testbench.
+
+ Parameters
+ ----------
+ netlist : Path
+ the netlist file name.
+ sim_tag : str
+ optional simulation name. Empty for default.
+ """
+ pass
+
+ @property
+ def dir_path(self) -> Path:
+ """Path: the directory for simulation files."""
+ return self._dir_path
+
+ @property
+ def config(self) -> Dict[str, Any]:
+ """Dict[str, Any]: simulation configurations."""
+ return self._config
+
+ def run_simulation(self, netlist: Path, sim_tag: str) -> None:
+ coro = self.async_run_simulation(netlist, sim_tag)
+ batch_async_task([coro])
+
+
+class SimProcessManager(SimAccess, abc.ABC):
+ """An implementation of :class:`SimAccess` using :class:`SubProcessManager`.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary file directory for SimAccess.
+ sim_config : Dict[str, Any]
+ the simulation configuration dictionary.
+ """
+
+ def __init__(self, tmp_dir: str, sim_config: Dict[str, Any]) -> None:
+ SimAccess.__init__(self, tmp_dir, sim_config)
+
+ cancel_timeout = sim_config.get('cancel_timeout_ms', 10000) / 1e3
+ self._manager = SubProcessManager(max_workers=sim_config.get('max_workers', 0),
+ cancel_timeout=cancel_timeout)
+
+ @property
+ def manager(self) -> SubProcessManager:
+ return self._manager
diff --git a/src/bag/simulation/cache.py b/src/bag/simulation/cache.py
new file mode 100644
index 0000000..d314589
--- /dev/null
+++ b/src/bag/simulation/cache.py
@@ -0,0 +1,489 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines classes and methods to cache previous simulation results."""
+
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING, Optional, Type, Dict, List, Mapping, Any, Union, Tuple, Sequence, cast
+)
+
+import shutil
+import filecmp
+from pathlib import Path
+from dataclasses import dataclass
+
+from pybag.enum import DesignOutput, LogLevel
+from pybag.core import FileLogger, gds_equal
+
+from ..env import get_gds_layer_map, get_gds_object_map
+from ..io.file import read_yaml, write_yaml
+from ..util.logging import LoggingBase
+from ..util.immutable import combine_hash
+from ..util.importlib import import_class
+from ..concurrent.util import GatherHelper
+from ..concurrent.core import batch_async_task
+from ..interface.database import DbAccess
+from ..design.database import ModuleDB
+from ..design.module import Module, PySchCellView
+from ..layout.template import TemplateDB, TemplateBase
+from .data import SimData
+from .hdf5 import load_sim_data_hdf5
+from .core import TestbenchManager
+from .measure import MeasurementManager
+
+if TYPE_CHECKING:
+ from ..core import BagProject
+
+
+@dataclass(frozen=True)
+class DesignInstance:
+ cell_name: str
+ sch_master: Module
+ lay_master: Optional[TemplateBase]
+ netlist_path: Path
+ cv_info_list: List[PySchCellView]
+
+ @property
+ def cache_name(self) -> str:
+ return self.netlist_path.parent.name
+
+
+@dataclass(frozen=True)
+class SimResults:
+ dut: Optional[DesignInstance]
+ tbm: TestbenchManager
+ data: SimData
+
+
+@dataclass(frozen=True)
+class MeasureResult:
+ dut: Optional[DesignInstance]
+ mm: MeasurementManager
+ data: Mapping[str, Any]
+
+
+class DesignDB(LoggingBase):
+ """A classes that caches extracted netlists.
+ """
+
+ def __init__(self, root_dir: Path, log_file: str, db_access: DbAccess,
+ sim_netlist_type: DesignOutput, sch_db: ModuleDB, lay_db: TemplateDB,
+ extract: bool = False, gen_sch: bool = False, force_extract: bool = False,
+ log_level: LogLevel = LogLevel.DEBUG) -> None:
+ LoggingBase.__init__(self, 'dsn_db', log_file, log_level=log_level)
+
+ self._root_dir = root_dir
+ self._db = db_access
+ self._sim_type = sim_netlist_type
+ self._sch_db = sch_db
+ self._lay_db = lay_db
+ self._extract = extract
+ self._force_extract = force_extract
+ self._gen_sch = gen_sch
+ self._lay_map = get_gds_layer_map()
+ self._obj_map = get_gds_object_map()
+
+ root_dir.mkdir(parents=True, exist_ok=True)
+
+ self._info_file = root_dir / 'cache.yaml'
+ if self._info_file.exists():
+ self._info_specs = read_yaml(self._info_file)
+ else:
+ self._info_specs = dict(
+ cache={},
+ cnt={},
+ )
+ write_yaml(self._info_file, self._info_specs)
+
+ self._cache: Dict[int, List[str]] = self._info_specs['cache']
+ self._cnt: Dict[str, int] = self._info_specs['cnt']
+
+ @property
+ def impl_lib(self) -> str:
+ return self._sch_db.lib_name
+
+ @property
+ def sch_db(self) -> ModuleDB:
+ return self._sch_db
+
+ @property
+ def gen_sch(self) -> bool:
+ return self._gen_sch
+
+ @property
+ def extract(self) -> bool:
+ return self._extract
+
+ async def async_batch_design(self, dut_specs: Sequence[Mapping[str, Any]]
+ ) -> Sequence[DesignInstance]:
+ ans = []
+ extract_set = set()
+ gatherer = GatherHelper()
+ for dut_info in dut_specs:
+ dut, ext_path = await self._create_dut(**dut_info)
+ ans.append(dut)
+ if ext_path is not None and ext_path not in extract_set:
+ extract_set.add(ext_path)
+ impl_cell: str = dut_info['impl_cell']
+ gatherer.append(self._extract_netlist(ext_path, impl_cell))
+
+ if gatherer:
+ await gatherer.gather_err()
+
+ return ans
+
+ async def async_new_design(self, impl_cell: str,
+ lay_cls: Union[Type[TemplateBase], Type[Module], str],
+ dut_params: Mapping[str, Any], extract: Optional[bool] = None,
+ name_prefix: str = '', name_suffix: str = '', flat: bool = False,
+ export_lay: bool = False) -> DesignInstance:
+ dut, ext_path = await self._create_dut(impl_cell, lay_cls, dut_params, extract=extract,
+ name_prefix=name_prefix, name_suffix=name_suffix,
+ flat=flat, export_lay=export_lay)
+ if ext_path is not None:
+ await self._extract_netlist(ext_path, impl_cell)
+
+ return dut
+
+ def new_design(self, impl_cell: str, lay_cls: Union[Type[TemplateBase], Type[Module], str],
+ dut_params: Mapping[str, Any], extract: Optional[bool] = None) -> DesignInstance:
+ coro = self.async_new_design(impl_cell, lay_cls, dut_params, extract=extract)
+ results = batch_async_task([coro])
+ if results is None:
+ self.error('Design generation cancelled')
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ async def _create_dut(self, impl_cell: str,
+ dut_cls: Union[Type[TemplateBase], Type[Module], str],
+ dut_params: Mapping[str, Any], extract: Optional[bool] = None,
+ name_prefix: str = '', name_suffix: str = '', flat: bool = False,
+ export_lay: bool = False) -> Tuple[DesignInstance, Optional[Path]]:
+ sim_ext = self._sim_type.extension
+ exact_cell_names = {impl_cell}
+
+ obj_cls = import_class(dut_cls)
+ if issubclass(obj_cls, TemplateBase):
+ self.log(f'Creating layout: {obj_cls.__name__}')
+ lay_master = self._lay_db.new_template(obj_cls, params=dut_params)
+ sch_params = lay_master.sch_params
+ sch_cls = lay_master.get_schematic_class_inst()
+ layout_hash = hash(lay_master.key)
+ gds_file = str(self._root_dir / 'tmp.gds')
+ if export_lay:
+ self._lay_db.batch_layout([(lay_master, impl_cell)], output=DesignOutput.LAYOUT,
+ name_prefix=name_prefix, name_suffix=name_suffix,
+ exact_cell_names=exact_cell_names)
+ await self._db.async_export_layout(self._lay_db.lib_name, impl_cell, gds_file)
+ else:
+ self._lay_db.batch_layout([(lay_master, impl_cell)], output=DesignOutput.GDS,
+ fname=gds_file, name_prefix=name_prefix,
+ name_suffix=name_suffix,
+ exact_cell_names=exact_cell_names)
+ else:
+ if extract:
+ raise ValueError('Cannot run extraction without layout.')
+
+ lay_master = None
+ sch_params = dut_params
+ sch_cls = obj_cls
+ layout_hash = 0
+ gds_file = ''
+
+ if extract and lay_master is None:
+ raise ValueError('Cannot run extraction without layout.')
+
+ self.log(f'Creating schematic: {sch_cls.__name__}')
+ sch_master: Module = self._sch_db.new_master(sch_cls, params=sch_params)
+
+ # create schematic netlist
+ cdl_netlist = str(self._root_dir / 'tmp.cdl')
+ cv_info_out = []
+ sch_dut_list = [(sch_master, impl_cell)]
+ self._sch_db.batch_schematic(sch_dut_list, output=DesignOutput.CDL,
+ fname=cdl_netlist, cv_info_out=cv_info_out,
+ name_prefix=name_prefix, name_suffix=name_suffix,
+ exact_cell_names=exact_cell_names)
+ if self._gen_sch:
+ self._sch_db.batch_schematic(sch_dut_list,
+ name_prefix=name_prefix, name_suffix=name_suffix,
+ exact_cell_names=exact_cell_names)
+
+ self.log('Check for existing netlist')
+ hash_id = combine_hash(layout_hash, hash(sch_master.key))
+ dir_list = self._cache.get(hash_id, None)
+ if dir_list is None:
+ dir_list = []
+ self._cache[hash_id] = dir_list
+ dir_path = self._generate_cell(impl_cell, cdl_netlist, gds_file)
+ dir_list.append(dir_path.name)
+ write_yaml(self._info_file, self._info_specs)
+ else:
+ dir_path = None
+ for dir_name in dir_list:
+ cur_dir = self._root_dir / dir_name
+ if filecmp.cmp(cdl_netlist, cur_dir / 'netlist.cdl', shallow=False):
+ if (not gds_file) or gds_equal(gds_file, str(cur_dir / 'layout.gds')):
+ self.log('Found existing design, reusing DUT netlist.')
+ dir_path = cur_dir
+ break
+
+ if dir_path is None:
+ dir_path = self._generate_cell(impl_cell, cdl_netlist, gds_file)
+ dir_list.append(dir_path.name)
+ write_yaml(self._info_file, self._info_specs)
+
+ if extract or (extract is None and self._extract):
+ ans = dir_path / 'rcx.sp'
+ if not ans.exists() or self._force_extract:
+ extract_info = dir_path
+ else:
+ extract_info = None
+ else:
+ extract_info = None
+ ans = dir_path / f'netlist.{sim_ext}'
+ if not ans.exists():
+ self._sch_db.batch_schematic(sch_dut_list, output=self._sim_type, fname=str(ans),
+ name_prefix=name_prefix, name_suffix=name_suffix,
+ exact_cell_names=exact_cell_names, flat=flat)
+
+ return DesignInstance(impl_cell, sch_master, lay_master, ans, cv_info_out), extract_info
+
+ async def _extract_netlist(self, dsn_dir: Path, impl_cell: str) -> None:
+ impl_lib = self.impl_lib
+
+ self.log('running LVS...')
+ ext_dir = dsn_dir / 'rcx'
+ lvs_passed, lvs_log = await self._db.async_run_lvs(impl_lib, impl_cell, run_rcx=True,
+ layout=str(dsn_dir / 'layout.gds'),
+ netlist=str(dsn_dir / 'netlist.cdl'),
+ run_dir=ext_dir)
+ if lvs_passed:
+ self.log('LVS passed!')
+ else:
+ self.error(f'LVS failed... log file: {lvs_log}')
+
+ self.log('running RCX...')
+ final_netlist, rcx_log = await self._db.async_run_rcx(impl_lib, impl_cell,
+ run_dir=ext_dir)
+ if final_netlist:
+ self.log('RCX passed!')
+ shutil.copy(final_netlist, str(dsn_dir / 'rcx.sp'))
+ else:
+ self.error(f'RCX failed... log file: {rcx_log}')
+
+ def _generate_cell(self, impl_cell: str, cdl_netlist: str, gds_file: str) -> Path:
+ self.log('No existing design, generating netlist')
+ cur_cnt = self._cnt.get(impl_cell, -1) + 1
+ self._cnt[impl_cell] = cur_cnt
+ dir_name = impl_cell if cur_cnt == 0 else f'{impl_cell}_{cur_cnt}'
+ dir_path = self._root_dir / dir_name
+ dir_path.mkdir(parents=True, exist_ok=True)
+
+ shutil.move(cdl_netlist, str(dir_path / 'netlist.cdl'))
+ if gds_file:
+ shutil.move(gds_file, str(dir_path / 'layout.gds'))
+ return dir_path
+
+
+class SimulationDB(LoggingBase):
+ """A classes that caches netlists, layouts, and simulation results.
+ """
+
+ def __init__(self, log_file: str, dsn_db: DesignDB, force_sim: bool = False,
+ precision: int = 6, log_level: LogLevel = LogLevel.DEBUG) -> None:
+ LoggingBase.__init__(self, 'sim_db', log_file, log_level=log_level)
+
+ self._dsn_db = dsn_db
+ self._sim = self._dsn_db.sch_db.prj.sim_access
+ self._force_sim = force_sim
+ self._precision = precision
+
+ @property
+ def prj(self) -> BagProject:
+ return self._dsn_db.sch_db.prj
+
+ @property
+ def precision(self) -> int:
+ return self._precision
+
+ @property
+ def extract(self) -> bool:
+ return self._dsn_db.extract
+
+ def make_tbm(self, tbm_cls: Union[Type[TestbenchManager], str], tbm_specs: Mapping[str, Any],
+ work_dir: Optional[Path] = None, tb_name: str = '',
+ logger: Optional[FileLogger] = None) -> TestbenchManager:
+ obj_cls = cast(Type[TestbenchManager], import_class(tbm_cls))
+
+ if work_dir is None:
+ work_dir = Path()
+ if logger is None:
+ logger = self.logger
+ return obj_cls(self._sim, work_dir, tb_name, '', tbm_specs, None, None,
+ precision=self._precision, logger=logger)
+
+ def make_mm(self, mm_cls: Union[Type[MeasurementManager], str], meas_specs: Mapping[str, Any]
+ ) -> MeasurementManager:
+ obj_cls = cast(Type[MeasurementManager], import_class(mm_cls))
+
+ return obj_cls(meas_specs, self.log_file, log_level=self.log_level,
+ precision=self._precision)
+
+ def new_design(self, impl_cell: str, lay_cls: Union[Type[TemplateBase], Type[Module], str],
+ dut_params: Mapping[str, Any], extract: Optional[bool] = None) -> DesignInstance:
+ return self._dsn_db.new_design(impl_cell, lay_cls, dut_params, extract=extract)
+
+ def simulate_tbm(self, sim_id: str, sim_dir: Path, dut: DesignInstance,
+ tbm_cls: Union[Type[TestbenchManager], str],
+ tb_params: Optional[Mapping[str, Any]], tbm_specs: Mapping[str, Any],
+ tb_name: str = '') -> SimResults:
+ tbm = self.make_tbm(tbm_cls, tbm_specs)
+ return self.simulate_tbm_obj(sim_id, sim_dir, dut, tbm, tb_params, tb_name=tb_name)
+
+ def simulate_tbm_obj(self, sim_id: str, sim_dir: Path, dut: DesignInstance,
+ tbm: TestbenchManager, tb_params: Optional[Mapping[str, Any]],
+ tb_name: str = '') -> SimResults:
+ coro = self.async_simulate_tbm_obj(sim_id, sim_dir, dut, tbm, tb_params, tb_name=tb_name)
+ results = batch_async_task([coro])
+ if results is None:
+ self.error('Simulation cancelled')
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ def simulate_mm_obj(self, sim_id: str, sim_dir: Path, dut: DesignInstance,
+ mm: MeasurementManager) -> MeasureResult:
+ coro = self.async_simulate_mm_obj(sim_id, sim_dir, dut, mm)
+ results = batch_async_task([coro])
+ if results is None:
+ self.error('Measurement cancelled')
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ async def async_batch_design(self, dut_specs: Sequence[Mapping[str, Any]],
+ ) -> Sequence[DesignInstance]:
+ return await self._dsn_db.async_batch_design(dut_specs)
+
+ async def async_new_design(self, impl_cell: str,
+ lay_cls: Union[Type[TemplateBase], Type[Module], str],
+ dut_params: Mapping[str, Any], extract: Optional[bool] = None,
+ name_prefix: str = '', name_suffix: str = '',
+ flat: bool = False, export_lay: bool = False) -> DesignInstance:
+ return await self._dsn_db.async_new_design(impl_cell, lay_cls, dut_params, extract=extract,
+ name_prefix=name_prefix, export_lay=export_lay,
+ name_suffix=name_suffix, flat=flat)
+
+ async def async_simulate_tbm_obj(self, sim_id: str, sim_dir: Path,
+ dut: Optional[DesignInstance], tbm: TestbenchManager,
+ tb_params: Optional[Mapping[str, Any]],
+ tb_name: str = '') -> SimResults:
+ if not tb_name:
+ tb_name = sim_id
+
+ sch_db = self._dsn_db.sch_db
+ impl_lib = sch_db.lib_name
+ tbm.update(work_dir=sim_dir, tb_name=tb_name, sim=self._sim)
+
+ # update tb_params
+ if dut is None:
+ cv_info_list = []
+ dut_netlist = None
+ dut_mtime = None
+ else:
+ cv_info_list = dut.cv_info_list
+ dut_netlist = dut.netlist_path
+ dut_mtime = dut.netlist_path.stat().st_mtime
+ tb_params = _set_dut(tb_params, impl_lib, dut.cell_name)
+ sim_netlist = tbm.sim_netlist_path
+ sim_data_path = self._sim.get_sim_file(sim_dir, sim_id)
+
+ # check if DUT netlist is updated
+ if sim_data_path.exists():
+ force_sim = self._force_sim
+ data_mtime = sim_data_path.stat().st_mtime
+ else:
+ force_sim = True
+ data_mtime = -1
+
+ # save previous simulation netlist, if exists
+ prev_netlist = sim_netlist.with_name(sim_netlist.name + '.bak')
+ if sim_netlist.exists():
+ shutil.move(str(sim_netlist), str(prev_netlist))
+ elif prev_netlist.exists():
+ prev_netlist.unlink()
+
+ self.log(f'Configuring testbench manager {tbm.__class__.__name__}')
+ tbm.setup(sch_db, tb_params, cv_info_list, dut_netlist, gen_sch=self._dsn_db.gen_sch)
+ if not sim_netlist.is_file():
+ self.error(f'Cannot find simulation netlist: {sim_netlist}')
+
+ # determine whether to run simulation
+ if (not force_sim and prev_netlist.exists() and
+ filecmp.cmp(sim_netlist, prev_netlist, shallow=False)):
+ # simulation netlist is not modified
+ if dut_mtime is not None and dut_mtime >= data_mtime:
+ # DUT netlist is modified, re-run simulation
+ self.log(f'DUT netlist mtime = {dut_mtime} >= sim data mtime = {data_mtime}, '
+ 'Re-running simulation.')
+ run_sim = True
+ else:
+ run_sim = False
+ else:
+ run_sim = True
+
+ if run_sim:
+ self.log(f'Simulating netlist: {sim_netlist}')
+ await self._sim.async_run_simulation(sim_netlist, sim_id)
+ self.log(f'Finished simulating {sim_netlist}')
+ else:
+ self.log('Returning previous simulation data')
+
+ return SimResults(dut, tbm, load_sim_data_hdf5(sim_data_path))
+
+ async def async_simulate_mm_obj(self, sim_id: str, sim_dir: Path, dut: Optional[DesignInstance],
+ mm: MeasurementManager) -> MeasureResult:
+ result = await mm.async_measure_performance(sim_id, sim_dir, self, dut)
+ return MeasureResult(dut, mm, result)
+
+
+def _set_dut(tb_params: Optional[Mapping[str, Any]], dut_lib: str, dut_cell: str
+ ) -> Optional[Mapping[str, Any]]:
+ """Returns a copy of the testbench parameters dictionary with DUT instantiated.
+
+ This method updates the testbench parameters dictionary so that the DUT is instantiated
+ statically in the inner-most wrapper.
+ """
+ if tb_params is None:
+ return tb_params
+
+ ans = {k: v for k, v in tb_params.items()}
+ dut_params: Optional[Mapping[str, Any]] = tb_params.get('dut_params', None)
+ if dut_params is None:
+ ans['dut_lib'] = dut_lib
+ ans['dut_cell'] = dut_cell
+ else:
+ ans['dut_params'] = _set_dut(dut_params, dut_lib, dut_cell)
+ return ans
diff --git a/src/bag/simulation/core.py b/src/bag/simulation/core.py
new file mode 100644
index 0000000..fd10c7d
--- /dev/null
+++ b/src/bag/simulation/core.py
@@ -0,0 +1,1119 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import (
+ TYPE_CHECKING, Optional, Dict, Any, Tuple, List, Iterable, Sequence, Type, Mapping, Union, cast
+)
+
+import abc
+import importlib
+import itertools
+from pathlib import Path
+from copy import deepcopy
+
+import numpy as np
+
+from pybag.enum import DesignOutput, LogLevel
+from pybag.core import FileLogger
+
+from ..math import float_to_si_string
+from ..io.file import read_yaml, write_yaml
+from ..util.immutable import ImmutableList
+from ..util.math import Calculator
+from ..layout.template import TemplateDB, TemplateBase
+from ..design.database import ModuleDB, ModuleType
+from ..design.module import Module
+from ..concurrent.core import batch_async_task
+from ..design.netlist import add_mismatch_offsets
+
+from .base import SimAccess
+from .data import SimNetlistInfo, SimData, swp_info_from_struct
+
+if TYPE_CHECKING:
+ from ..core import BagProject
+
+
+class TestbenchManager(abc.ABC):
+ """A class that creates and setups up a testbench for simulation, then save the result.
+
+ This class is used by MeasurementManager to run simulations.
+
+ Parameters
+ ----------
+ sim : SimAccess
+ the simulator interface object.
+ work_dir : Path
+ working directory path.
+ tb_name : str
+ testbench name.
+ impl_lib : str
+ unused. Remain for backward compatibility.
+ specs : Mapping[str, Any]
+ testbench specs.
+ sim_view_list : Optional[Sequence[Tuple[str, str]]]
+ unused. Remain for backward compatibility.
+ env_list : Optional[Sequence[str]]
+ unused. Remain for backward compatibility.
+ precision : int
+ numeric precision in simulation netlist generation.
+ logger : Optional[FileLogger]
+ the logger object.
+
+ Notes
+ -----
+ The specification dictionary for all testbenches have the following entries:
+
+ sim_envs : Sequence[str]
+ list of simulation environments.
+ sim_params : Mapping[str, Any]
+ simulation parameters dictionary.
+ swp_info : Union[Sequence[Any], Mapping[str, Any]]
+ Optional. the parameter sweep data structure.
+ sim_options: Mapping[str, Any]
+ Optional. Simulator-specific options.
+ monte_carlo_params: Mapping[str, Any]
+ Optional. If specified, will run Monte Carlo with the given parameters.
+ """
+
+ # noinspection PyUnusedLocal
+ def __init__(self, sim: Optional[SimAccess], work_dir: Path, tb_name: str, impl_lib: str,
+ specs: Mapping[str, Any], sim_view_list: Optional[Sequence[Tuple[str, str]]],
+ env_list: Optional[Sequence[str]], precision: int = 6,
+ logger: Optional[FileLogger] = None) -> None:
+ # TODO: refactor TestbenchManager to remove unused variables
+ # TODO: remova TestbenchManager dependency on SimAccess. Make it look more like
+ # TODO: MeasurementManager in general
+ self._sim = sim
+ self._work_dir = work_dir.resolve()
+ self._tb_name = tb_name
+ self._precision = precision
+ self._logger = logger
+
+ self._work_dir.mkdir(parents=True, exist_ok=True)
+ self._specs: Dict[str, Any] = {k: deepcopy(v) for k, v in specs.items()
+ if k is not 'sim_params' and k is not 'env_params'}
+ self._specs['sim_params'] = {k: v for k, v in specs['sim_params'].items()}
+ self._specs['env_params'] = {k: v.copy() for k, v in specs.get('env_params', {}).items()}
+ self.commit()
+
+ @property
+ def logger(self) -> FileLogger:
+ return self._logger
+
+ @property
+ def specs(self) -> Dict[str, Any]:
+ return self._specs
+
+ @property
+ def swp_info(self) -> Union[Sequence[Any], Mapping[str, Any]]:
+ return self._specs.get('swp_info', [])
+
+ @property
+ def sim_params(self) -> Dict[str, Union[float, str]]:
+ """Dict[str, Union[float, str]]: Simulation parameters dictionary, can be modified."""
+ return self._specs['sim_params']
+
+ @property
+ def env_params(self) -> Dict[str, Dict[str, float]]:
+ return self._specs['env_params']
+
+ @property
+ def num_sim_envs(self) -> int:
+ return len(self._specs['sim_envs'])
+
+ @property
+ def sim_envs(self) -> Sequence[str]:
+ return sorted(self._specs['sim_envs'])
+
+ @property
+ def tb_netlist_path(self) -> Path:
+ return self._work_dir / f'tb.{self._sim.netlist_type.extension}'
+
+ @property
+ def sim_netlist_path(self) -> Path:
+ return self._work_dir / f'sim.{self._sim.netlist_type.extension}'
+
+ @property
+ def work_dir(self) -> Path:
+ return self._work_dir
+
+ @property
+ def precision(self) -> int:
+ return self._precision
+
+ @property
+ def sweep_shape(self) -> Tuple[int, ...]:
+ return self.get_sweep_shape(self.num_sim_envs, self.swp_info)
+
+ @classmethod
+ @abc.abstractmethod
+ def get_schematic_class(cls) -> Type[Module]:
+ pass
+
+ @classmethod
+ def get_sweep_shape(cls, num_envs: int, swp: Union[Sequence[Any], Mapping[str, Any]]
+ ) -> Tuple[int, ...]:
+ obj = swp_info_from_struct(swp)
+ return (num_envs,) + obj.shape
+
+ @abc.abstractmethod
+ def get_netlist_info(self) -> SimNetlistInfo:
+ """Returns the netlist information object.
+
+ Returns
+ -------
+ netlist_info : SimNetlistInfo
+ the simulation netlist information object.
+ """
+ pass
+
+ def set_sim_envs(self, env_list: Sequence[str]) -> None:
+ self._specs['sim_envs'] = env_list
+
+ def set_swp_info(self, new_swp: Union[Sequence[Any], Mapping[str, Any]]) -> None:
+ self._specs['swp_info'] = new_swp
+
+ def commit(self) -> None:
+ """Commit changes to specs dictionary. Perform necessary initialization."""
+ pass
+
+ # noinspection PyMethodMayBeStatic
+ def pre_setup(self, sch_params: Optional[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]:
+ """Override to perform any operations prior to calling the setup() function.
+
+ Parameters
+ ----------
+ sch_params :
+ the testbench schematic parameters. None means the previous testbench will be reused.
+ This dictionary should not be modified.
+
+ Returns
+ -------
+ new_params :
+ the schematic parameters to use. Could be a modified copy of the original.
+ """
+ return sch_params
+
+ def print_results(self, data: SimData) -> None:
+ """Override to print results."""
+ pass
+
+ def get_netlist_info_dict(self) -> Dict[str, Any]:
+ """Returns a dictionary representing the SimNetlistInfo object.
+
+ This is a helper function that performs common boiler-plate setup (set corners,
+ swp_info, sim_params, etc.)
+ """
+ sim_envs: Sequence[str] = self._specs['sim_envs']
+ env_values = {var: [val_table[env] for env in sim_envs]
+ for var, val_table in self.env_params.items()}
+ sim_setup = dict(
+ sim_envs=sim_envs,
+ params=self.sim_params,
+ swp_info=self.swp_info,
+ options=self._specs.get('sim_options', {}),
+ monte_carlo=self._specs.get('monte_carlo_params', {}),
+ env_params=env_values,
+ )
+ return sim_setup
+
+ def get_env_param_value(self, name: str, sim_envs: Sequence[str]) -> np.ndarray:
+ env_params: Mapping[str, Mapping[str, float]] = self._specs.get('env_params', {})
+ val_table = env_params[name]
+ return np.array([val_table[env] for env in sim_envs])
+
+ def get_sim_param_value(self, val: Union[float, str]) -> float:
+ cur_val = val
+ sim_params = self.sim_params
+ while isinstance(cur_val, str):
+ cur_val = sim_params[cur_val]
+ return cur_val
+
+ def get_param_value(self, name: str, data: SimData) -> np.ndarray:
+ try:
+ return data.get_param_value(name)
+ except ValueError:
+ # this parameter is not swept
+ env_params: Mapping[str, Mapping[str, float]] = self._specs.get('env_params', {})
+
+ data_shape = self.sweep_shape
+ val_table = env_params.get(name, None)
+ if val_table is None:
+ # this parameter is constant
+ val = self.sim_params[name]
+ if isinstance(val, str):
+ # this parameter is an expression
+ return self.get_calculator(data).eval(val)
+ return np.full(data_shape, val)
+ else:
+ # this param is not constant
+ return _get_env_param_value(data.sim_envs, data_shape, val_table)
+
+ def get_calculator(self, data: SimData) -> Calculator:
+ # TODO: to avoid potential re-creation of get_calculator() over and over again,
+ # TODO: consider just storing these data in SimData on creation.
+ env_params: Mapping[str, Mapping[str, float]] = self._specs.get('env_params', {})
+
+ # get sweep param values
+ swp = swp_info_from_struct(self.swp_info)
+ namespace = {name: data.get_param_value(name) for name in swp}
+
+ # get env param values
+ sim_envs = data.sim_envs
+ data_shape = self.sweep_shape
+ for name, val_table in env_params.items():
+ if name not in namespace:
+ namespace[name] = _get_env_param_value(sim_envs, data_shape, val_table)
+
+ # get sim param values
+ expr_map = {}
+ for name, val in self.sim_params.items():
+ if name not in namespace:
+ if isinstance(val, str):
+ expr_map[name] = val
+ else:
+ namespace[name] = np.full(data_shape, val)
+
+ while expr_map:
+ success_once = False
+ new_expr_map = {}
+ for name, expr in expr_map.items():
+ try:
+ val = Calculator(namespace).eval(expr)
+ namespace[name] = val
+ success_once = True
+ except KeyError:
+ new_expr_map[name] = expr
+ if not success_once:
+ raise ValueError('sim_params circular dependency found.')
+ expr_map = new_expr_map
+
+ return Calculator(namespace)
+
+ def get_sim_param_string(self, val: Union[float, str]) -> str:
+ if isinstance(val, str):
+ return val
+ return float_to_si_string(val, self.precision)
+
+ def update(self, work_dir: Path, tb_name: str, sim: Optional[SimAccess] = None) -> None:
+ """Update the working directory and testbench name.
+
+ This method allows you to reuse the same testbench manager objects for different
+ testbenches.
+ """
+ if work_dir is not None:
+ self._work_dir = work_dir.resolve()
+ self._work_dir.mkdir(parents=True, exist_ok=True)
+ if tb_name:
+ self._tb_name = tb_name
+ if sim is not None:
+ self._sim = sim
+
+ def setup(self, sch_db: ModuleDB, sch_params: Optional[Mapping[str, Any]],
+ dut_cv_info_list: List[Any], dut_netlist: Optional[Path], gen_sch: bool = True,
+ work_dir: Optional[Path] = None, tb_name: str = '') -> None:
+ self.update(work_dir, tb_name)
+
+ tb_netlist_path = self.tb_netlist_path
+ sim_netlist_path = self.sim_netlist_path
+ sch_params = self.pre_setup(sch_params)
+ if sch_params is not None:
+ # noinspection PyTypeChecker
+ sch_master = sch_db.new_master(self.get_schematic_class(), sch_params)
+ if gen_sch:
+ self.log(f'Creating testbench {self._tb_name} schematic master')
+ sch_db.batch_schematic([(sch_master, self._tb_name)])
+ self.log(f'Testbench {self._tb_name} schematic master done')
+
+ # create netlist for tb schematic
+ self.log(f'Creating testbench {self._tb_name} netlist')
+ net_str = '' if dut_netlist is None else str(dut_netlist.resolve())
+ sch_db.batch_schematic([(sch_master, self._tb_name)], output=self._sim.netlist_type,
+ top_subckt=False, fname=str(tb_netlist_path),
+ cv_info_list=dut_cv_info_list, cv_netlist=net_str)
+ self.log(f'Testbench {self._tb_name} netlisting done')
+
+ netlist_info = self.get_netlist_info()
+ self._sim.create_netlist(sim_netlist_path, tb_netlist_path, netlist_info,
+ self._precision)
+
+ async def async_simulate(self) -> None:
+ self.log(f'Simulating {self._tb_name}')
+ await self._sim.async_run_simulation(self.sim_netlist_path, 'sim')
+ self.log(f'Finished simulating {self._tb_name}')
+
+ def simulate(self) -> None:
+ coro = self.async_simulate()
+ batch_async_task([coro])
+
+ def load_sim_data(self) -> SimData:
+ return self._sim.load_sim_data(self._work_dir, 'sim')
+
+ def log(self, msg: str, level: LogLevel = LogLevel.INFO) -> None:
+ if self._logger is None:
+ print(msg)
+ else:
+ self._logger.log(level, msg)
+
+ def error(self, msg: str) -> None:
+ if self._logger is not None:
+ self._logger.log(LogLevel.ERROR, msg)
+ raise ValueError(msg)
+
+
+def _get_env_param_value(sim_envs: Sequence[str], data_shape: Tuple[int, ...],
+ val_table: Mapping[str, float]) -> np.ndarray:
+ new_shape = [1] * len(data_shape)
+ new_shape[0] = len(sim_envs)
+ values = np.array([val_table[env] for env in sim_envs])
+ values = values.reshape(new_shape)
+ return np.broadcast_to(values, data_shape)
+
+
+class MeasurementManager(abc.ABC):
+ """A class that handles circuit performance measurement.
+
+ This class handles all the steps needed to measure a specific performance
+ metric of the device-under-test. This may involve creating and simulating
+ multiple different testbenches, where configuration of successive testbenches
+ depends on previous simulation results. This class reduces the potentially
+ complex measurement tasks into a few simple abstract methods that designers
+ simply have to implement.
+
+ Parameters
+ ----------
+ sim : SimAccess
+ the simulator interface object.
+ dir_path : Path
+ Simulation data directory.
+ meas_name : str
+ measurement setup name.
+ impl_lib : str
+ implementation library name.
+ specs : Dict[str, Any]
+ the measurement specification dictionary.
+ wrapper_lookup : Dict[str, str]
+ the DUT wrapper cell name lookup table.
+ sim_view_list : Sequence[Tuple[str, str]]
+ simulation view list
+ env_list : Sequence[str]
+ simulation environments list.
+ precision : int
+ numeric precision in simulation netlist generation.
+ """
+
+ def __init__(self, sim: SimAccess, dir_path: Path, meas_name: str, impl_lib: str,
+ specs: Dict[str, Any], wrapper_lookup: Dict[str, str],
+ sim_view_list: Sequence[Tuple[str, str]], env_list: Sequence[str],
+ precision: int = 6) -> None:
+ self._sim = sim
+ self._dir_path = dir_path.resolve()
+ self._meas_name = meas_name
+ self._impl_lib = impl_lib
+ self._specs = specs
+ self._wrapper_lookup = wrapper_lookup
+ self._sim_view_list = sim_view_list
+ self._env_list = env_list
+ self._precision = precision
+
+ self._dir_path.mkdir(parents=True, exist_ok=True)
+
+ @abc.abstractmethod
+ def get_initial_state(self) -> str:
+ """Returns the initial FSM state."""
+ return ''
+
+ def get_testbench_info(self, state: str, prev_output: Optional[Dict[str, Any]]
+ ) -> Tuple[str, str, Dict[str, Any], Optional[Dict[str, Any]]]:
+ """Get information about the next testbench.
+
+ Override this method to perform more complex operations.
+
+ Parameters
+ ----------
+ state : str
+ the current FSM state.
+ prev_output : Optional[Dict[str, Any]]
+ the previous post-processing output.
+
+ Returns
+ -------
+ tb_name : str
+ cell name of the next testbench. Should incorporate self.meas_name to avoid
+ collision with testbench for other designs.
+ tb_type : str
+ the next testbench type.
+ tb_specs : Dict[str, Any]
+ the testbench specification dictionary.
+ tb_params : Optional[Dict[str, Any]]
+ the next testbench schematic parameters. If we are reusing an existing
+ testbench, this should be None.
+ """
+ tb_type = state
+ tb_name = self.get_testbench_name(tb_type)
+ tb_specs = self.get_testbench_specs(tb_type).copy()
+ tb_params = self.get_default_tb_sch_params(tb_type)
+
+ return tb_name, tb_type, tb_specs, tb_params
+
+ @abc.abstractmethod
+ def process_output(self, state: str, data: SimData, tb_manager: TestbenchManager
+ ) -> Tuple[bool, str, Dict[str, Any]]:
+ """Process simulation output data.
+
+ Parameters
+ ----------
+ state : str
+ the current FSM state
+ data : SimData
+ simulation data dictionary.
+ tb_manager : TestbenchManager
+ the testbench manager object.
+
+ Returns
+ -------
+ done : bool
+ True if this measurement is finished.
+ next_state : str
+ the next FSM state.
+ output : Dict[str, Any]
+ a dictionary containing post-processed data.
+ """
+ return False, '', {}
+
+ @property
+ def specs(self) -> Dict[str, Any]:
+ return self._specs
+
+ @property
+ def data_dir(self) -> Path:
+ return self._dir_path
+
+ @property
+ def sim_envs(self) -> Sequence[str]:
+ return self._env_list
+
+ def get_testbench_name(self, tb_type: str) -> str:
+ """Returns a default testbench name given testbench type."""
+ return f'{self._meas_name}_TB_{tb_type}'
+
+ async def async_measure_performance(self, sch_db: Optional[ModuleDB], dut_cvi_list: List[Any],
+ dut_netlist: Optional[Path], load_from_file: bool = False,
+ gen_sch: bool = True) -> Dict[str, Any]:
+ """A coroutine that performs measurement.
+
+ The measurement is done like a FSM. On each iteration, depending on the current
+ state, it creates a new testbench (or reuse an existing one) and simulate it.
+ It then post-process the simulation data to determine the next FSM state, or
+ if the measurement is done.
+
+ Parameters
+ ----------
+ sch_db : Optional[ModuleDB]
+ the schematic database.
+
+ if load_from_file is True, this can be None. as it will not be used unless necessary.
+ dut_cvi_list : List[str]
+ cv_info for DUT cell netlist
+
+ if load_from_file is True, this will not be used unless necessary.
+ dut_netlist : Optional[Path]
+ netlist of DUT cell
+
+ if load_from_file is True, this will not be used unless necessary.
+ load_from_file : bool
+ If True, then load existing simulation data instead of running actual simulation.
+ gen_sch : bool
+ True to create testbench schematics.
+
+ Returns
+ -------
+ output : Dict[str, Any]
+ the last dictionary returned by process_output().
+ """
+ cur_state = self.get_initial_state()
+ prev_output = None
+ done = False
+
+ while not done:
+ # create and setup testbench
+ tb_name, tb_type, tb_specs, tb_sch_params = self.get_testbench_info(cur_state,
+ prev_output)
+
+ tb_package = tb_specs['tb_package']
+ tb_cls_name = tb_specs['tb_class']
+ tb_module = importlib.import_module(tb_package)
+ tb_cls = getattr(tb_module, tb_cls_name)
+ work_dir = self._dir_path / cur_state
+ tb_manager: TestbenchManager = tb_cls(self._sim, work_dir, tb_name, self._impl_lib,
+ tb_specs, self._sim_view_list, self._env_list,
+ precision=self._precision)
+
+ if load_from_file:
+ print(f'Measurement {self._meas_name} in state {cur_state}, '
+ 'load sim data from file.')
+ try:
+ cur_results = tb_manager.load_sim_data()
+ except FileNotFoundError:
+ print('Cannot find data file, simulating...')
+ if sch_db is None or not dut_cvi_list or dut_netlist is None:
+ raise ValueError('Cannot create testbench as DUT netlist not given.')
+
+ tb_manager.setup(sch_db, tb_sch_params, dut_cv_info_list=dut_cvi_list,
+ dut_netlist=dut_netlist, gen_sch=gen_sch)
+ await tb_manager.async_simulate()
+ cur_results = tb_manager.load_sim_data()
+ else:
+ tb_manager.setup(sch_db, tb_sch_params, dut_cv_info_list=dut_cvi_list,
+ dut_netlist=dut_netlist, gen_sch=gen_sch)
+ await tb_manager.async_simulate()
+ cur_results = tb_manager.load_sim_data()
+
+ # process and save simulation data
+ print(f'Measurement {self._meas_name} in state {cur_state}, '
+ f'processing data from {tb_type}')
+ done, next_state, prev_output = self.process_output(cur_state, cur_results, tb_manager)
+ write_yaml(self._dir_path / f'{cur_state}.yaml', prev_output)
+
+ cur_state = next_state
+
+ write_yaml(self._dir_path / f'{self._meas_name}.yaml', prev_output)
+ return prev_output
+
+ def measure_performance(self, sch_db: Optional[ModuleDB], dut_cvi_list: List[Any],
+ dut_netlist: Optional[Path], load_from_file: bool = False,
+ gen_sch: bool = True) -> Dict[str, Any]:
+ coro = self.async_measure_performance(sch_db, dut_cvi_list, dut_netlist,
+ load_from_file=load_from_file,
+ gen_sch=gen_sch)
+ return batch_async_task([coro])[0]
+
+ def get_state_output(self, state: str) -> Dict[str, Any]:
+ """Get the post-processed output of the given state."""
+ return read_yaml(self._dir_path / f'{state}.yaml')
+
+ def get_testbench_specs(self, tb_type: str) -> Dict[str, Any]:
+ """Helper method to get testbench specifications."""
+ return self._specs['testbenches'][tb_type]
+
+ def get_default_tb_sch_params(self, tb_type: str) -> Dict[str, Any]:
+ """Helper method to return a default testbench schematic parameters dictionary.
+
+ This method loads default values from specification file, the fill in dut_lib
+ and dut_cell for you.
+
+ Parameters
+ ----------
+ tb_type : str
+ the testbench type.
+
+ Returns
+ -------
+ sch_params : Dict[str, Any]
+ the default schematic parameters dictionary.
+ """
+ tb_specs = self.get_testbench_specs(tb_type)
+ wrapper_type = tb_specs.get('wrapper_type', '')
+
+ if 'sch_params' in tb_specs:
+ tb_params = tb_specs['sch_params'].copy()
+ else:
+ tb_params = {}
+
+ tb_params['dut_lib'] = self._impl_lib
+ tb_params['dut_cell'] = self._wrapper_lookup[wrapper_type]
+ return tb_params
+
+
+class DesignSpecs:
+ """A class that parses the design specification file."""
+
+ def __init__(self, spec_file: str, spec_dict: Optional[Dict[str, Any]] = None) -> None:
+ if spec_dict:
+ self._specs = spec_dict
+ self._root_dir: Path = Path(self._specs['root_dir']).resolve()
+ elif spec_file:
+ spec_path = Path(spec_file).resolve()
+ if spec_path.is_file():
+ self._specs = read_yaml(spec_path)
+ self._root_dir: Path = Path(self._specs['root_dir']).resolve()
+ elif spec_path.is_dir():
+ self._root_dir: Path = spec_path
+ self._specs = read_yaml(self._root_dir / 'specs.yaml')
+ else:
+ raise ValueError(f'{spec_path} is neither data directory or specification file.')
+ else:
+ raise ValueError('spec_file is empty.')
+
+ cls_package = self._specs['layout_package']
+ cls_name = self._specs['layout_class']
+ self._create_layout = cls_package and cls_name
+
+ self._swp_var_list: ImmutableList[str] = ImmutableList(
+ sorted(self._specs['sweep_params'].keys()))
+ self._sweep_params = self._specs['sweep_params']
+ self._params = self._specs['layout_params' if self._create_layout else 'schematic_params']
+
+ @property
+ def root_dir(self) -> Path:
+ return self._root_dir
+
+ @property
+ def swp_var_list(self) -> ImmutableList[str]:
+ return self._swp_var_list
+
+ @property
+ def dut_lib(self) -> str:
+ return self._specs['dut_lib']
+
+ @property
+ def dut_cell(self) -> str:
+ return self._specs['dut_cell']
+
+ @property
+ def impl_lib(self) -> str:
+ return self._specs['impl_lib']
+
+ @property
+ def env_list(self) -> List[str]:
+ return self._specs['env_list']
+
+ @property
+ def view_name(self) -> str:
+ return self._specs['view_name']
+
+ @property
+ def dsn_basename(self) -> str:
+ return self._specs['dsn_basename']
+
+ @property
+ def summary_fname(self) -> str:
+ return self._specs['summary_fname']
+
+ @property
+ def specs(self) -> Dict[str, Any]:
+ return self._specs
+
+ @property
+ def create_layout(self) -> bool:
+ return self._create_layout
+
+ @property
+ def first_params(self) -> Dict[str, Any]:
+ combo = [self._sweep_params[key][0] for key in self._swp_var_list]
+ return self._get_params(combo)
+
+ def get_data_dir(self, dsn_name: str, meas_type: str) -> Path:
+ """Returns the data directory path for the given measurement."""
+ return self._root_dir.joinpath(dsn_name, meas_type)
+
+ def get_swp_values(self, var: str) -> List[Any]:
+ """Returns a list of valid sweep variable values.
+
+ Parameter
+ ---------
+ var : str
+ the sweep variable name.
+
+ Returns
+ -------
+ val_list : List[Any]
+ the sweep values of the given variable.
+ """
+ return self._sweep_params[var]
+
+ def swp_combo_iter(self) -> Iterable[Tuple[Any, ...]]:
+ """Returns an iterator of schematic parameter combinations we sweep over.
+
+ Returns
+ -------
+ combo_iter : Iterable[Tuple[Any, ...]]
+ an iterator of tuples of schematic parameters values that we sweep over.
+ """
+ return itertools.product(*(self._sweep_params[var] for var in self._swp_var_list))
+
+ def dsn_param_iter(self) -> Iterable[Tuple[str, Dict[str, Any]]]:
+ """Returns an iterator of design cell name and the parameter dictionary."""
+ for combo in self.swp_combo_iter():
+ yield self.get_design_name(combo), self._get_params(combo)
+
+ def dsn_name_iter(self) -> Iterable[str]:
+ return (self.get_design_name(combo) for combo in self.swp_combo_iter())
+
+ def get_layout_class(self) -> Type[TemplateBase]:
+ cls_package = self._specs['layout_package']
+ cls_name = self._specs['layout_class']
+ lay_module = importlib.import_module(cls_package)
+ return getattr(lay_module, cls_name)
+
+ def get_design_name(self, combo_list: Sequence[Any]) -> str:
+ name = self.dsn_basename
+ for var, val in zip(self.swp_var_list, combo_list):
+ if isinstance(val, str) or isinstance(val, int):
+ name += f'_{var}_{val}'
+ elif isinstance(val, float):
+ name += f'_{var}_{float_to_si_string(val)}'
+ else:
+ raise ValueError('Unsupported parameter type: %s' % (type(val)))
+
+ return name
+
+ def _get_params(self, combo_list: Sequence[Any]) -> Dict[str, Any]:
+ params = self._params.copy()
+ for var, val in zip(self._swp_var_list, combo_list):
+ params[var] = val
+ return params
+
+
+class DesignManager:
+ """A class that manages instantiating design instances and running simulations.
+
+ This class provides various methods to allow you to sweep design parameters
+ and generate multiple instances at once. It also provides methods for running
+ simulations and helps you interface with TestbenchManager instances.
+
+ Parameters
+ ----------
+ prj : BagProject
+ The BagProject instance.
+ spec_file : str
+ the specification file name or the data directory.
+ """
+
+ def __init__(self, prj: BagProject, spec_file: str = '',
+ spec_dict: Optional[Dict[str, Any]] = None, sch_db: Optional[ModuleDB] = None,
+ lay_db: Optional[TemplateDB] = None) -> None:
+ self._prj = prj
+ self._info = DesignSpecs(spec_file, spec_dict)
+
+ impl_lib = self._info.impl_lib
+ if sch_db is None:
+ self._sch_db = ModuleDB(prj.tech_info, impl_lib, prj=prj)
+ else:
+ self._sch_db = sch_db
+
+ if lay_db is None:
+ self._lay_db = TemplateDB(prj.grid, impl_lib, prj=prj)
+ else:
+ self._lay_db = lay_db
+
+ @classmethod
+ def load_state(cls, prj: BagProject, root_dir: str) -> DesignManager:
+ """Create the DesignManager instance corresponding to data in the given directory."""
+ return cls(prj, root_dir)
+
+ @classmethod
+ def get_wrapper_name(cls, dut_name: str, wrapper_name: str) -> str:
+ """Returns the wrapper cell name corresponding to the given DUT."""
+ return f'{dut_name}_WRAPPER_{wrapper_name}'
+
+ @property
+ def info(self) -> DesignSpecs:
+ """Return the specification dictionary."""
+ return self._info
+
+ async def extract_design(self, lib_name: str, dsn_name: str,
+ rcx_params: Optional[Dict[str, Any]], netlist: Optional[Path]) -> Path:
+ """A coroutine that runs LVS/RCX on a given design.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ dsn_name : str
+ design cell name.
+ rcx_params : Optional[Dict[str, Any]]
+ extraction parameters dictionary.
+ netlist: Path
+ CDL netlist path
+
+ Returns
+ -------
+ rcx_netlist : Path
+ extracted netlist path
+ """
+ print(f'Running LVS on {dsn_name}')
+ lvs_passed, lvs_log = await self._prj.async_run_lvs(lib_name, dsn_name, netlist=netlist,
+ run_rcx=True)
+ if not lvs_passed:
+ raise ValueError('LVS failed for %s. Log file: %s' % (dsn_name, lvs_log))
+
+ print(f'LVS passed on {dsn_name}')
+ print(f'Running RCX on {dsn_name}')
+ rcx_netlist, rcx_log = await self._prj.async_run_rcx(lib_name, dsn_name, params=rcx_params)
+ if not rcx_netlist:
+ raise ValueError(f'RCX failed for {dsn_name}. Log file: {rcx_log}')
+ print(f'RCX passed on {dsn_name}')
+ return Path(rcx_netlist)
+
+ async def verify_design(self, lib_name: str, dsn_name: str,
+ dut_cvi_list: List[Any], dut_netlist: Path,
+ load_from_file: bool = False, gen_sch: bool = True) -> None:
+ """Run all measurements on the given design.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ dsn_name : str
+ design cell name.
+ dut_cvi_list : List[str]
+ cv_info for DUT cell netlist
+ dut_netlist : Path
+ netlist of DUT cell
+ load_from_file : bool
+ If True, then load existing simulation data instead of running actual simulation.
+ gen_sch : bool
+ True to create testbench schematics.
+ """
+ root_dir = self._info.root_dir
+ env_list = self._info.env_list
+ view_name = self._info.view_name
+ summary_fname = self._info.summary_fname
+ meas_list = self._info.specs['measurements']
+ wrapper_list = self._info.specs['dut_wrappers']
+
+ wrapper_lookup = {'': dsn_name}
+ for wrapper_config in wrapper_list:
+ wrapper_type = wrapper_config['name']
+ wrapper_lookup[wrapper_type] = self.get_wrapper_name(dsn_name, wrapper_type)
+
+ result_summary = {}
+ dsn_data_dir = root_dir / dsn_name
+ for meas_specs in meas_list:
+ meas_type = meas_specs['meas_type']
+ meas_package = meas_specs['meas_package']
+ meas_cls_name = meas_specs['meas_class']
+ out_fname = meas_specs['out_fname']
+ data_dir = self._info.get_data_dir(dsn_name, meas_type)
+ meas_name = f'{dsn_name}_MEAS_{meas_type}'
+
+ meas_module = importlib.import_module(meas_package)
+ meas_cls = getattr(meas_module, meas_cls_name)
+
+ meas_manager: MeasurementManager = meas_cls(self._prj.sim_access, data_dir, meas_name,
+ lib_name, meas_specs, wrapper_lookup,
+ [(dsn_name, view_name)], env_list)
+ print(f'Performing measurement {meas_type} on {dsn_name}')
+ meas_res = await meas_manager.async_measure_performance(self._sch_db,
+ dut_cvi_list=dut_cvi_list,
+ dut_netlist=dut_netlist,
+ load_from_file=load_from_file,
+ gen_sch=gen_sch)
+ print(f'Measurement {meas_type} finished on {dsn_name}')
+
+ write_yaml(data_dir / out_fname, meas_res)
+ result_summary[meas_type] = meas_res
+
+ write_yaml(dsn_data_dir / summary_fname, result_summary)
+
+ async def main_task(self, lib_name: str, dsn_name: str, rcx_params: Optional[Dict[str, Any]],
+ dut_cv_info_list: List[str], dut_cdl_netlist: Path, dut_sim_netlist: Path,
+ extract: bool = True, measure: bool = True, load_from_file: bool = False,
+ gen_sch: bool = True, mismatch: bool = False) -> None:
+ """The main coroutine."""
+ if extract:
+ dut_sim_netlist = await self.extract_design(lib_name, dsn_name, rcx_params,
+ netlist=dut_cdl_netlist)
+ if measure:
+ # TODO: fix mismatch for extracted designs
+ if mismatch:
+ add_mismatch_offsets(dut_sim_netlist, dut_sim_netlist)
+ await self.verify_design(lib_name, dsn_name, load_from_file=load_from_file,
+ dut_cvi_list=dut_cv_info_list, dut_netlist=dut_sim_netlist,
+ gen_sch=gen_sch)
+
+ def characterize_designs(self, generate: bool = True, measure: bool = True,
+ load_from_file: bool = False, gen_sch: bool = True,
+ mismatch: bool = False) -> None:
+ """Sweep all designs and characterize them.
+
+ Parameters
+ ----------
+ generate : bool
+ If True, create schematic/layout and run LVS/RCX.
+ measure : bool
+ If True, run all measurements.
+ load_from_file : bool
+ If True, measurements will load existing simulation data
+ instead of running simulations.
+ gen_sch : bool
+ If True, schematics will be generated.
+ mismatch: bool
+ If True, add mismatch offset voltage sources to netlist
+ """
+ impl_lib = self._info.impl_lib
+ rcx_params = self._info.specs.get('rcx_params', None)
+
+ extract = generate and self._info.view_name != 'schematic'
+ flat_sch_netlist = mismatch and not extract
+
+ dut_info_list = self.create_designs(gen_sch=gen_sch, flat_sch_netlist=flat_sch_netlist)
+
+ coro_list = [self.main_task(impl_lib, dsn_name, rcx_params, extract=extract,
+ measure=measure, load_from_file=load_from_file,
+ dut_cv_info_list=cv_info_list,
+ dut_cdl_netlist=netlist_cdl,
+ dut_sim_netlist=netlist_sim, gen_sch=gen_sch,
+ mismatch=mismatch)
+ for dsn_name, cv_info_list, netlist_cdl, netlist_sim in dut_info_list]
+
+ results = batch_async_task(coro_list)
+ if results is not None:
+ for val in results:
+ if isinstance(val, Exception):
+ raise val
+
+ def get_result(self, dsn_name: str) -> Dict[str, Any]:
+ """Returns the measurement result summary dictionary.
+
+ Parameters
+ ----------
+ dsn_name : str
+ the design name.
+
+ Returns
+ -------
+ result : Dict[str, Any]
+ the result dictionary.
+ """
+ return read_yaml(self._info.root_dir / dsn_name / self._info.summary_fname)
+
+ def test_layout(self, gen_sch: bool = True) -> None:
+ """Create a test schematic and layout for debugging purposes"""
+
+ lay_params = self._info.first_params
+ dsn_name = self._info.dsn_basename + '_TEST'
+
+ print('create test layout')
+ sch_name_param_list = self.create_dut_layouts([(dsn_name, lay_params)])
+
+ if gen_sch:
+ print('create test schematic')
+ self.create_dut_schematics(sch_name_param_list, gen_wrappers=False)
+ print('done')
+
+ def create_designs(self, gen_sch: bool = True, flat_sch_netlist: bool = False
+ ) -> List[Tuple[str, List[Any], Path, Path]]:
+ """Create DUT schematics/layouts.
+ """
+ dsn_param_iter = self._info.dsn_param_iter()
+ if self._info.create_layout:
+ print('creating all layouts.')
+ dsn_param_iter = self.create_dut_layouts(dsn_param_iter)
+ print('layout creation done.')
+
+ return self.create_dut_schematics(dsn_param_iter, gen_wrappers=True, gen_sch=gen_sch,
+ flat_sch_netlist=flat_sch_netlist)
+
+ def create_dut_schematics(self, name_param_iter: Iterable[Tuple[str, Dict[str, Any]]],
+ gen_wrappers: bool = True, gen_sch: bool = True,
+ flat_sch_netlist: bool = False) -> List[Tuple[str, List[Any], Path,
+ Path]]:
+ root_dir = self._info.root_dir
+ dut_lib = self._info.dut_lib
+ dut_cell = self._info.dut_cell
+ impl_lib = self._info.impl_lib
+ wrapper_list = self._info.specs['dut_wrappers']
+
+ netlist_type = self._prj.sim_access.netlist_type
+ ext = netlist_type.extension
+ dir_path = root_dir / 'designs' / impl_lib
+ dir_path.mkdir(parents=True, exist_ok=True)
+
+ results: List[Tuple[str, List[Any], Path, Path]] = []
+ tot_info_list = []
+ print('Generating DUT schematics and netlists')
+ for cur_name, sch_params in name_param_iter:
+ gen_cls = cast(Type[ModuleType], ModuleDB.get_schematic_class(dut_lib, dut_cell))
+ sch_master = self._sch_db.new_master(gen_cls, sch_params)
+ cur_info_list = [(sch_master, cur_name)]
+ if gen_wrappers:
+ for wrapper_config in wrapper_list:
+ wrapper_name = wrapper_config['name']
+ wrapper_lib = wrapper_config['lib']
+ wrapper_cell = wrapper_config['cell']
+ wrapper_params = wrapper_config['params'].copy()
+ wrapper_params['dut_lib'] = dut_lib
+ wrapper_params['dut_cell'] = dut_cell
+ wrapper_params['dut_params'] = sch_params
+ gen_cls_wrap = cast(Type[ModuleType],
+ ModuleDB.get_schematic_class(wrapper_lib, wrapper_cell))
+ sch_master_wrap = self._sch_db.new_master(gen_cls_wrap, wrapper_params)
+ cur_info_list.append((sch_master_wrap,
+ self.get_wrapper_name(cur_name, wrapper_name)))
+
+ dut_netlist_sim = dir_path / f'{cur_name}.{ext}'
+ dut_netlist_cdl = dir_path / f'{cur_name}.cdl'
+
+ dut_cv_info_list = []
+ self._sch_db.batch_schematic(cur_info_list, output=netlist_type, top_subckt=True,
+ cv_info_out=dut_cv_info_list, fname=str(dut_netlist_sim),
+ flat=flat_sch_netlist)
+ self._sch_db.batch_schematic(cur_info_list, output=DesignOutput.CDL,
+ fname=str(dut_netlist_cdl), flat=flat_sch_netlist)
+
+ tot_info_list.extend(cur_info_list)
+ results.append((cur_name, dut_cv_info_list, dut_netlist_cdl, dut_netlist_sim))
+
+ print('DUT generation and netlisting done.')
+ if gen_sch:
+ print('Creating schematics...')
+ self._sch_db.batch_schematic(tot_info_list)
+ print('schematic creation done.')
+
+ return results
+
+ def create_dut_layouts(self, name_param_iter: Iterable[Tuple[str, Dict[str, Any]]]
+ ) -> Sequence[Tuple[str, Dict[str, Any]]]:
+ """Create multiple layouts"""
+ temp_cls = self._info.get_layout_class()
+
+ info_list, sch_name_param_list = [], []
+ for cell_name, lay_params in name_param_iter:
+ template = self._lay_db.new_template(params=lay_params, temp_cls=temp_cls, debug=False)
+ info_list.append((template, cell_name))
+ sch_name_param_list.append((cell_name, template.sch_params))
+ self._lay_db.batch_layout(info_list)
+ return sch_name_param_list
diff --git a/src/bag/simulation/data.py b/src/bag/simulation/data.py
new file mode 100644
index 0000000..6fd1c52
--- /dev/null
+++ b/src/bag/simulation/data.py
@@ -0,0 +1,836 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles high level simulation routines.
+
+This module defines SimAccess, which provides methods to run simulations
+and retrieve results.
+"""
+
+from __future__ import annotations
+from typing import (
+ Tuple, Union, Iterable, List, Dict, Any, Optional, TypeVar, Type, Sequence, ItemsView, Mapping
+)
+
+import math
+from enum import Enum
+from dataclasses import dataclass
+
+import numpy as np
+
+from pybag.enum import DesignOutput
+from pybag.core import convert_cdba_name_bit
+
+from ..util.immutable import ImmutableList, ImmutableSortedDict
+
+
+###############################################################################
+# Sweep specifications
+###############################################################################
+
+class SweepSpecType(Enum):
+ LIST = 0
+ LINEAR = 1
+ LOG = 2
+
+
+class SweepInfoType(Enum):
+ MD = 0
+ SET = 1
+
+
+@dataclass(eq=True, frozen=True)
+class SweepList:
+ values: ImmutableList[float]
+
+ def __len__(self) -> int:
+ return len(self.values)
+
+ @property
+ def start(self) -> float:
+ return self.values[0]
+
+
+@dataclass(eq=True, frozen=True)
+class SweepLinear:
+ """stop is inclusive"""
+ start: float
+ stop: float
+ num: int
+ endpoint: bool = True
+
+ def __len__(self) -> int:
+ return self.num
+
+ @property
+ def step(self) -> float:
+ den = self.num - 1 if self.endpoint else self.num
+ return (self.stop - self.start) / den
+
+ @property
+ def stop_inc(self) -> float:
+ return self.stop if self.endpoint else self.start + (self.num - 1) * self.step
+
+
+@dataclass(eq=True, frozen=True)
+class SweepLog:
+ """stop is inclusive"""
+ start: float
+ stop: float
+ num: int
+ endpoint: bool = True
+
+ def __len__(self) -> int:
+ return self.num
+
+ @property
+ def start_log(self) -> float:
+ return math.log10(self.start)
+
+ @property
+ def stop_log(self) -> float:
+ return math.log10(self.stop)
+
+ @property
+ def step_log(self) -> float:
+ den = self.num - 1 if self.endpoint else self.num
+ return (self.stop_log - self.start_log) / den
+
+ @property
+ def stop_inc(self) -> float:
+ if self.endpoint:
+ return self.stop
+ return 10.0 ** (self.start_log + (self.num - 1) * self.step_log)
+
+
+SweepSpec = Union[SweepLinear, SweepLog, SweepList]
+
+
+def swp_spec_from_dict(table: Mapping[str, Any]) -> SweepSpec:
+ swp_type = SweepSpecType[table['type']]
+ if swp_type is SweepSpecType.LIST:
+ return SweepList(ImmutableList(table['values']))
+ elif swp_type is SweepSpecType.LINEAR:
+ return SweepLinear(table['start'], table['stop'], table['num'], table.get('endpoint', True))
+ elif swp_type is SweepSpecType.LOG:
+ return SweepLog(table['start'], table['stop'], table['num'], table.get('endpoint', True))
+ else:
+ raise ValueError(f'Unsupported sweep type: {swp_type}')
+
+
+@dataclass(eq=True, frozen=True)
+class MDSweepInfo:
+ params: ImmutableList[Tuple[str, SweepSpec]]
+
+ @property
+ def ndim(self) -> int:
+ return len(self.params)
+
+ @property
+ def stype(self) -> SweepInfoType:
+ return SweepInfoType.MD
+
+ @property
+ def shape(self) -> Tuple[int, ...]:
+ return tuple((len(val[1]) for val in self.params))
+
+ def __contains__(self, item: str) -> bool:
+ for name, _ in self.params:
+ if name == item:
+ return True
+ return False
+
+ def __iter__(self) -> Iterable[str]:
+ return (item[0] for item in self.params)
+
+ def default_items(self) -> Iterable[Tuple[str, float]]:
+ for name, spec in self.params:
+ yield name, spec.start
+
+
+@dataclass(eq=True, frozen=True)
+class SetSweepInfo:
+ params: ImmutableList[str]
+ values: ImmutableList[ImmutableList[float]]
+
+ @property
+ def stype(self) -> SweepInfoType:
+ return SweepInfoType.SET
+
+ @property
+ def shape(self) -> Tuple[int, ...]:
+ # NOTE: one-element tuple, not typo
+ return len(self.values),
+
+ def __contains__(self, item: str) -> bool:
+ return item in self.params
+
+ def __iter__(self) -> Iterable[str]:
+ return self.params
+
+ def default_items(self) -> Iterable[Tuple[str, float]]:
+ for idx, name in enumerate(self.params):
+ yield name, self.values[0][idx]
+
+
+SweepInfo = Union[MDSweepInfo, SetSweepInfo]
+
+
+def swp_info_from_struct(table: Union[Sequence[Tuple[str, Mapping[str, Any]]], Mapping[str, Any]]
+ ) -> SweepInfo:
+ if isinstance(table, dict) or isinstance(table, ImmutableSortedDict):
+ params = ImmutableList(table['params'])
+ values = []
+ num_par = len(params)
+ for combo in table['values']:
+ if len(combo) != num_par:
+ raise ValueError('Invalid param set values.')
+ values.append(ImmutableList(combo))
+
+ return SetSweepInfo(params, ImmutableList(values))
+ else:
+ par_list = [(par, swp_spec_from_dict(spec)) for par, spec in table]
+ return MDSweepInfo(ImmutableList(par_list))
+
+
+###############################################################################
+# Analyses
+###############################################################################
+
+class AnalysisType(Enum):
+ DC = 0
+ AC = 1
+ TRAN = 2
+ SP = 3
+ NOISE = 4
+ PSS = 5
+ PAC = 6
+ PNOISE = 7
+
+
+class SPType(Enum):
+ S = 0
+ Y = 1
+ Z = 2
+ YZ = 3
+
+
+T = TypeVar('T', bound='AnalysisSweep1D')
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisSweep1D:
+ param: str
+ sweep: Optional[SweepSpec]
+ options: ImmutableSortedDict[str, str]
+ save_outputs: ImmutableList[str]
+
+ @classmethod
+ def from_dict(cls: Type[T], table: Dict[str, Any], def_param: str = '') -> T:
+ param = table.get('param', def_param)
+ sweep = table.get('sweep', None)
+ opt = table.get('options', {})
+ out = table.get('save_outputs', [])
+ if not param or sweep is None:
+ param = ''
+ swp = None
+ else:
+ swp = swp_spec_from_dict(sweep)
+
+ return cls(param, swp, ImmutableSortedDict(opt), ImmutableList(out))
+
+ @property
+ def param_start(self) -> float:
+ if self.param:
+ return self.sweep.start
+ return 0.0
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisDC(AnalysisSweep1D):
+ @property
+ def name(self) -> str:
+ return 'dc'
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisAC(AnalysisSweep1D):
+ freq: float
+
+ @property
+ def name(self) -> str:
+ return 'ac'
+
+ @classmethod
+ def from_dict(cls: Type[T], table: Dict[str, Any], def_param: str = '') -> T:
+ base = AnalysisSweep1D.from_dict(table, def_param='freq')
+ if base.param != 'freq':
+ freq_val = table['freq']
+ else:
+ freq_val = 0.0
+
+ return cls(base.param, base.sweep, base.options, base.save_outputs, freq_val)
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisSP(AnalysisAC):
+ ports: ImmutableList[str]
+ param_type: SPType
+
+ @property
+ def name(self) -> str:
+ return 'sp'
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisNoise(AnalysisAC):
+ p_port: str
+ n_port: str
+ out_probe: str
+ in_probe: str
+
+ @property
+ def name(self) -> str:
+ return 'noise'
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisTran:
+ start: float
+ stop: float
+ strobe: float
+ out_start: float
+ options: ImmutableSortedDict[str, str]
+ save_outputs: ImmutableList[str]
+
+ @property
+ def param(self) -> str:
+ return ''
+
+ @property
+ def param_start(self) -> float:
+ return 0.0
+
+ @property
+ def name(self) -> str:
+ return 'tran'
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisPSS:
+ p_port: str
+ n_port: str
+ period: float
+ fund: float
+ autofund: bool
+ options: ImmutableSortedDict[str, str]
+ save_outputs: ImmutableList[str]
+
+ @property
+ def param(self) -> str:
+ return ''
+
+ @property
+ def name(self) -> str:
+ return 'pss'
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisPAC(AnalysisAC):
+
+ @property
+ def name(self) -> str:
+ return 'pac'
+
+
+@dataclass(eq=True, frozen=True)
+class AnalysisPNoise(AnalysisNoise):
+
+ @property
+ def name(self) -> str:
+ return 'pnoise'
+
+
+AnalysisInfo = Union[AnalysisDC, AnalysisAC, AnalysisSP, AnalysisNoise, AnalysisTran,
+ AnalysisPSS, AnalysisPAC, AnalysisPNoise]
+
+
+def analysis_from_dict(table: Dict[str, Any]) -> AnalysisInfo:
+ ana_type = AnalysisType[table['type']]
+ if ana_type is AnalysisType.DC:
+ return AnalysisDC.from_dict(table)
+ elif ana_type is AnalysisType.AC:
+ return AnalysisAC.from_dict(table)
+ elif ana_type is AnalysisType.SP:
+ base = AnalysisAC.from_dict(table)
+ return AnalysisSP(base.param, base.sweep, base.options, base.save_outputs, base.freq,
+ ImmutableList(table['ports']), SPType[table['param_type']])
+ elif ana_type is AnalysisType.NOISE:
+ base = AnalysisAC.from_dict(table)
+ return AnalysisNoise(base.param, base.sweep, base.options, base.save_outputs, base.freq,
+ table.get('p_port', ''), table.get('n_port', ''),
+ table.get('out_probe', ''), table.get('in_probe', ''))
+ elif ana_type is AnalysisType.TRAN:
+ return AnalysisTran(table.get('start', 0.0), table['stop'], table.get('strobe', 0.0),
+ table.get('out_start', -1.0),
+ ImmutableSortedDict(table.get('options', {})),
+ ImmutableList(table.get('save_outputs', [])))
+ elif ana_type is AnalysisType.PSS:
+ return AnalysisPSS(table.get('p_port', ''), table.get('n_port', ''),
+ table.get('period', 0.0), table.get('fund', 0.0),
+ table.get('autofund', False),
+ ImmutableSortedDict(table.get('options', {})),
+ ImmutableList(table.get('save_outputs', [])))
+ elif ana_type is AnalysisType.PAC:
+ base = AnalysisAC.from_dict(table)
+ return AnalysisPAC(base.param, base.sweep, base.options, base.save_outputs, base.freq)
+ elif ana_type is AnalysisType.PNOISE:
+ base = AnalysisAC.from_dict(table)
+ return AnalysisPNoise(base.param, base.sweep, base.options, base.save_outputs, base.freq,
+ table.get('p_port', ''), table.get('n_port', ''),
+ table.get('out_probe', ''), table.get('in_probe', ''))
+ else:
+ raise ValueError(f'Unknown analysis type: {ana_type}')
+
+
+###############################################################################
+# Simulation Netlist Info
+###############################################################################
+
+@dataclass(eq=True, frozen=True)
+class MonteCarlo:
+ numruns: int
+ seed: int
+ options: ImmutableSortedDict[str, Any]
+
+ @property
+ def name(self) -> str:
+ return 'mc'
+
+
+def monte_carlo_from_dict(mc_dict: Optional[Dict[str, Any]]) -> Optional[MonteCarlo]:
+ if not mc_dict:
+ return None
+
+ numruns: int = mc_dict['numruns']
+ seed: int = mc_dict['seed']
+ options: Dict[str, Any] = mc_dict.get('options', {})
+
+ return MonteCarlo(numruns, seed, options=ImmutableSortedDict(options))
+
+
+@dataclass(eq=True, frozen=True)
+class SimNetlistInfo:
+ sim_envs: ImmutableList[str]
+ analyses: ImmutableList[AnalysisInfo]
+ params: ImmutableSortedDict[str, float]
+ env_params: ImmutableSortedDict[str, ImmutableList[float]]
+ swp_info: SweepInfo
+ outputs: ImmutableSortedDict[str, str]
+ options: ImmutableSortedDict[str, Any]
+ monte_carlo: Optional[MonteCarlo]
+ init_voltages: ImmutableSortedDict[str, Union[str, float]]
+
+ @property
+ def sweep_type(self) -> SweepInfoType:
+ return self.swp_info.stype
+
+
+def netlist_info_from_dict(table: Dict[str, Any]) -> SimNetlistInfo:
+ sim_envs: List[str] = table['sim_envs']
+ analyses: List[Dict[str, Any]] = table['analyses']
+ params: Dict[str, float] = table.get('params', {})
+ env_params: Dict[str, List[float]] = table.get('env_params', {})
+ swp_info: Union[List[Tuple[str, Dict[str, Any]]], Dict[str, Any]] = table.get('swp_info', [])
+ outputs: Dict[str, str] = table.get('outputs', {})
+ options: Dict[str, Any] = table.get('options', {})
+ monte_carlo: Optional[Dict[str, Any]] = table.get('monte_carlo', None)
+ init_voltages: Dict[str, Union[str, float]] = table.get('init_voltages', {})
+
+ if not sim_envs:
+ raise ValueError('simulation environments list is empty')
+
+ env_par_dict = {}
+ num_env = len(sim_envs)
+ for key, val in env_params.items():
+ if len(val) != num_env:
+ raise ValueError("Invalid env_param value.")
+ env_par_dict[key] = ImmutableList(val)
+
+ ana_list = [analysis_from_dict(val) for val in analyses]
+
+ return SimNetlistInfo(ImmutableList(sim_envs), ImmutableList(ana_list),
+ ImmutableSortedDict(params), ImmutableSortedDict(env_par_dict),
+ swp_info_from_struct(swp_info), ImmutableSortedDict(outputs),
+ ImmutableSortedDict(options), monte_carlo_from_dict(monte_carlo),
+ ImmutableSortedDict(init_voltages))
+
+
+###############################################################################
+# Simulation data classes
+###############################################################################
+
+class AnalysisData:
+ """A data struct that stores simulation data from a single analysis"""
+
+ def __init__(self, sweep_params: Sequence[str], data: Dict[str, np.ndarray],
+ is_md: bool) -> None:
+ self._swp_pars = ImmutableList(sweep_params)
+ self._data = data
+ self._is_md = is_md
+ swp_set = set(sweep_params)
+ self._signals = [key for key in data.keys() if key not in swp_set]
+
+ def __getitem__(self, item: str) -> np.ndarray:
+ return self._data[item]
+
+ def __contains__(self, item: str) -> bool:
+ return item in self._data
+
+ @property
+ def data_shape(self) -> Tuple[int, ...]:
+ if not self._signals:
+ return ()
+ return self._data[self._signals[0]].shape
+
+ @property
+ def is_md(self) -> bool:
+ return self._is_md
+
+ @property
+ def sweep_params(self) -> ImmutableList[str]:
+ return self._swp_pars
+
+ @property
+ def signals(self) -> List[str]:
+ return self._signals
+
+ @classmethod
+ def combine(cls, data_list: Sequence[AnalysisData], swp_name: str,
+ swp_vals: Optional[np.ndarray] = None, axis: int = 0) -> AnalysisData:
+ ndata = len(data_list)
+ if ndata < 1:
+ raise ValueError('Must combine at least 1 data.')
+ if swp_vals is None:
+ swp_vals = np.arange(ndata)
+
+ data0 = data_list[0]
+ new_data = {}
+ swp_par_list = list(data0.sweep_params)
+
+ # get all signals
+ max_size = None
+ for sig in data0.signals:
+ arr_list = [arr[sig] for arr in data_list]
+ sizes = [x.shape for x in arr_list]
+ max_size = np.max(list(zip(*sizes)), -1)
+ cur_ans = np.full((len(arr_list),) + tuple(max_size), np.nan)
+ for idx, arr in enumerate(arr_list):
+ # noinspection PyTypeChecker
+ select = (idx,) + tuple(slice(0, s) for s in sizes[idx])
+ cur_ans[select] = arr
+ new_data[sig] = np.moveaxis(cur_ans, 0, axis)
+
+ # get last sweep parameter
+ last_par = swp_par_list[-1]
+ last_xvec = data0[last_par]
+ xvec_list = [data[last_par] for data in data_list]
+ for xvec in xvec_list:
+ if not np.array_equal(xvec_list[0], xvec):
+ # last sweep parameter has to be a multi dimensional array
+ sizes = [x.shape for x in xvec_list]
+ cur_ans = np.full((len(xvec_list),) + tuple(max_size), np.nan)
+ for idx, _xvec in enumerate(xvec_list):
+ # noinspection PyTypeChecker
+ select = (idx, ...) + tuple(slice(0, s) for s in sizes[idx])
+ cur_ans[select] = _xvec
+ last_xvec = np.moveaxis(cur_ans, 0, axis)
+ break
+ new_data[last_par] = last_xvec
+
+ # get all other sweep params
+ for sn in swp_par_list[:-1]:
+ if sn != 'corner':
+ new_data[sn] = data0[sn]
+
+ swp_par_list.insert(axis, swp_name)
+ new_data[swp_name] = swp_vals
+
+ return AnalysisData(swp_par_list, new_data, data0.is_md)
+
+ def get_param_value(self, name: str) -> np.ndarray:
+ param_idx = self._swp_pars.index(name)
+
+ shape = self.data_shape[:-1]
+ shape_init = [1] * len(shape)
+ shape_init[param_idx] = shape[param_idx]
+ arr = self._data[name].reshape(tuple(shape_init))
+ return np.broadcast_to(arr, shape)
+
+ def items(self) -> ItemsView[str, np.ndarray]:
+ return self._data.items()
+
+ def insert(self, name: str, data: np.ndarray) -> None:
+ self._data[name] = data
+ if name not in self._signals:
+ self._signals.append(name)
+
+ def copy(self) -> AnalysisData:
+ _data = {}
+ for k, v in self._data.items():
+ _data[k] = self._data[k].copy()
+ return AnalysisData(self._swp_pars, _data, self._is_md)
+
+ """Adds combination to simulation results"""
+
+ def add(self, new_data: Dict[str, np.ndarray]):
+ if self.is_md:
+ raise AttributeError('Currently only supported in is_md = False mode')
+
+ # check that the size of new data is the same as existing data
+ assert len(self._data.keys()) == len(new_data.keys())
+
+ # check that all sweep parameters are provided
+ for param in self.sweep_params:
+ if param not in new_data.keys():
+ raise ValueError('Param %s not provided in data' % param)
+
+ ref_length = len(list(new_data.values())[0])
+ # add data points
+ for name, arr in new_data.items():
+ # check that all new data arrays are the correct length
+ if name in self.sweep_params or name == 'hash':
+ assert len(arr) == ref_length
+ else:
+ assert len(arr[0]) == ref_length
+
+ # new sweep point
+ if name in self.sweep_params:
+ self._data[name] = np.append(self._data[name], arr)
+ # sweep data
+ else:
+ self._data[name] = np.hstack((self._data[name], arr))
+
+ def remove_sweep(self, name: str, rtol: float = 1e-8, atol: float = 1e-20) -> bool:
+ new_swp_vars = list(self._swp_pars)
+ try:
+ idx = new_swp_vars.index(name)
+ except ValueError:
+ return False
+
+ if self._is_md:
+ swp_vals = self._data.pop(name)
+ if swp_vals.size != 1:
+ self._data[name] = swp_vals
+ raise ValueError('Can only remove sweep with 1 value in a MD sweep.')
+
+ for sig in self._signals:
+ self._data[sig] = np.squeeze(self._data[sig], axis=idx)
+
+ last_var_name = self._swp_pars[-1]
+ last_var_arr = self._data[last_var_name]
+ if len(last_var_arr.shape) != 1:
+ # also need to squeeze last x axis values
+ self._data[last_var_name] = np.squeeze(last_var_arr, axis=idx)
+ del new_swp_vars[idx]
+ self._swp_pars = ImmutableList(new_swp_vars)
+ else:
+ del new_swp_vars[idx]
+
+ # remove corners
+ swp_names = new_swp_vars[1:]
+ sig_shape = self._data[self._signals[0]].shape
+ num_env = sig_shape[0]
+ if len(sig_shape) == 2:
+ # inner most dimension is part of param sweep
+ swp_shape, swp_vals = _check_is_md(num_env, [self._data[par] for par in swp_names],
+ rtol, atol, None)
+ if swp_shape is not None:
+ for par, vals in zip(swp_names, swp_vals):
+ self._data[par] = vals
+ else:
+ # inner most dimension is not part of param sweep
+ last_par = swp_names[-1]
+ last_dset = self._data[last_par]
+ swp_names = swp_names[:-1]
+ swp_shape, swp_vals = _check_is_md(num_env, [self._data[par] for par in swp_names],
+ rtol, atol, last_dset.shape[-1])
+ if swp_shape is not None:
+ for par, vals in zip(swp_names, swp_vals):
+ self._data[par] = vals
+
+ if len(last_dset.shape) > 1:
+ self._data[last_par] = last_dset.reshape(swp_shape)
+
+ self._swp_pars = ImmutableList(new_swp_vars)
+ del self._data[name]
+ if swp_shape is not None:
+ # this is multi-D
+ for sig in self._signals:
+ self._data[sig] = self._data[sig].reshape(swp_shape)
+
+ self._is_md = True
+
+ return True
+
+
+class SimData:
+ """A data structure that stores simulation data as a multi-dimensional array."""
+
+ def __init__(self, sim_envs: Sequence[str], data: Dict[str, AnalysisData],
+ sim_netlist_type: DesignOutput) -> None:
+ if not data:
+ raise ValueError('Empty simulation data.')
+
+ self._sim_envs = ImmutableList(sim_envs)
+ self._table = data
+ self._cur_name = next(iter(self._table.keys()))
+ self._cur_ana: AnalysisData = self._table[self._cur_name]
+ self._netlist_type = sim_netlist_type
+
+ @property
+ def group(self) -> str:
+ return self._cur_name
+
+ @property
+ def group_list(self) -> List[str]:
+ return list(self._table.keys())
+
+ @property
+ def sim_envs(self) -> ImmutableList[str]:
+ return self._sim_envs
+
+ @property
+ def sweep_params(self) -> ImmutableList[str]:
+ return self._cur_ana.sweep_params
+
+ @property
+ def signals(self) -> List[str]:
+ return self._cur_ana.signals
+
+ @property
+ def is_md(self) -> bool:
+ return self._cur_ana.is_md
+
+ @property
+ def data_shape(self) -> Tuple[int, ...]:
+ return self._cur_ana.data_shape
+
+ @property
+ def netlist_type(self) -> DesignOutput:
+ return self._netlist_type
+
+ def __getitem__(self, item: str) -> np.ndarray:
+ return self._cur_ana[convert_cdba_name_bit(item, self._netlist_type)]
+
+ def __contains__(self, item: str) -> bool:
+ return item in self._cur_ana
+
+ def items(self) -> ItemsView[str, np.ndarray]:
+ return self._cur_ana.items()
+
+ def open_group(self, val: str) -> None:
+ tmp = self._table.get(val, None)
+ if tmp is None:
+ raise ValueError(f'Group {val} not found.')
+
+ self._cur_name = val
+ self._cur_ana = tmp
+
+ def open_analysis(self, atype: AnalysisType) -> None:
+ self.open_group(atype.name.lower())
+
+ def insert(self, name: str, data: np.ndarray) -> None:
+ self._cur_ana.insert(name, data)
+
+ def add(self, new_data: Dict[str, np.ndarray]):
+ self._cur_ana.add(new_data)
+
+ def copy(self, rename: Optional[Dict[str, str]] = None) -> SimData:
+ if rename is None:
+ rename = {}
+ _table = {}
+ for k, v in self._table.items():
+ key = rename.get(k, k)
+ _table[key] = self._table[k]
+ return SimData(self._sim_envs, _table, self.netlist_type)
+
+ def deep_copy(self, rename: Optional[Dict[str, str]] = None) -> SimData:
+ if rename is None:
+ rename = {}
+ _table = {}
+ for k, v in self._table.items():
+ key = rename.get(k, k)
+ _table[key] = self._table[k].copy()
+ return SimData(self._sim_envs, _table, self.netlist_type)
+
+ def remove_sweep(self, name: str, rtol: float = 1e-8, atol: float = 1e-20) -> bool:
+ return self._cur_ana.remove_sweep(name, rtol=rtol, atol=atol)
+
+ def get_param_value(self, name: str) -> np.ndarray:
+ return self._cur_ana.get_param_value(name)
+
+ @classmethod
+ def combine(cls, data_list: List[SimData], swp_name: str,
+ swp_vals: Optional[np.ndarray] = None) -> SimData:
+ ndata = len(data_list)
+ if ndata < 1:
+ raise ValueError('Must combine at least 1 data.')
+
+ data0 = data_list[0]
+ sim_envs = data0.sim_envs
+ new_data = {}
+ for grp in data0.group_list:
+ ana_list = [sim_data._table[grp] for sim_data in data_list]
+ new_data[grp] = AnalysisData.combine(ana_list, swp_name, swp_vals=swp_vals, axis=1)
+
+ return SimData(sim_envs, new_data, data0.netlist_type)
+
+
+def _check_is_md(num_env: int, swp_vals: List[np.ndarray], rtol: float, atol: float,
+ last: Optional[int]) -> Tuple[Optional[Tuple[int, ...]], List[np.ndarray]]:
+ num = len(swp_vals)
+ shape_list = [num_env] * (num + 1)
+ new_vals = [np.nan] * num
+ prev_size = 1
+ for idx in range(num - 1, -1, -1):
+ cur_vals = swp_vals[idx]
+ if prev_size > 1:
+ rep_prev = cur_vals.size // prev_size
+ for start_idx in range(0, rep_prev * prev_size, prev_size):
+ if not np.allclose(cur_vals[start_idx:start_idx + prev_size], cur_vals[start_idx],
+ rtol=rtol, atol=atol):
+ # is not MD
+ return None, []
+ cur_vals = cur_vals[0::prev_size]
+
+ occ_vec = np.nonzero(np.isclose(cur_vals, cur_vals[0], rtol=rtol, atol=atol))[0]
+ if occ_vec.size < 2:
+ unique_size = cur_vals.size
+ else:
+ unique_size = occ_vec[1]
+ rep, remain = divmod(cur_vals.size, unique_size)
+ if remain != 0 or not np.allclose(cur_vals, np.tile(cur_vals[:unique_size], rep),
+ rtol=rtol, atol=atol):
+ # is not MD
+ return None, []
+
+ new_vals[idx] = cur_vals[:unique_size]
+ shape_list[idx + 1] = unique_size
+ prev_size *= unique_size
+
+ if last is not None:
+ shape_list.append(last)
+ return tuple(shape_list), new_vals
diff --git a/src/bag/simulation/design.py b/src/bag/simulation/design.py
new file mode 100644
index 0000000..bc01f0c
--- /dev/null
+++ b/src/bag/simulation/design.py
@@ -0,0 +1,188 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Union, Type, Mapping, Dict, Optional, Sequence, cast
+
+import abc
+import pprint
+from pathlib import Path
+from copy import deepcopy
+
+from pybag.enum import LogLevel
+
+from ..util.importlib import import_class
+from ..util.logging import LoggingBase
+from ..concurrent.core import batch_async_task
+from ..design.module import Module
+from ..layout.tech import TechInfo
+from ..layout.routing.grid import RoutingGrid
+from ..layout.template import TemplateBase
+from .core import TestbenchManager
+from .measure import MeasurementManager
+from .cache import SimulationDB, SimResults, MeasureResult, DesignInstance
+
+if TYPE_CHECKING:
+ from ..core import BagProject
+
+
+class DesignerBase(LoggingBase, abc.ABC):
+ """Base class of all design scripts.
+
+ Notes
+ -----
+ 1. This class hides the SimulationDB object from the user. This is because hierarchical
+ designers share the same SimulationDB, and if you don't make sure to update the
+ working directory every time you run
+
+ """
+
+ def __init__(self, root_dir: Path, sim_db: SimulationDB, dsn_specs: Mapping[str, Any]) -> None:
+ cls_name = self.__class__.__name__
+ super().__init__(cls_name, sim_db.log_file, log_level=sim_db.log_level)
+
+ self._root_dir = root_dir
+ self._work_dir = root_dir
+ self._sim_db = sim_db
+ self._dsn_specs = {k: deepcopy(v) for k, v in dsn_specs.items()}
+
+ self.commit()
+
+ @property
+ def tech_info(self) -> TechInfo:
+ return self._sim_db.prj.tech_info
+
+ @property
+ def grid(self) -> RoutingGrid:
+ return self._sim_db.prj.grid
+
+ @property
+ def dsn_specs(self) -> Dict[str, Any]:
+ return self._dsn_specs
+
+ @property
+ def extract(self) -> bool:
+ return self._sim_db.extract
+
+ @property
+ def work_dir(self) -> Path:
+ return self._work_dir
+
+ @classmethod
+ def get_default_param_values(cls) -> Dict[str, Any]:
+ return {}
+
+ @classmethod
+ def design_cell(cls, prj: BagProject, specs: Mapping[str, Any], extract: bool = False,
+ force_sim: bool = False, force_extract: bool = False, gen_sch: bool = False,
+ log_level: LogLevel = LogLevel.DEBUG) -> None:
+ dsn_str: Union[str, Type[DesignerBase]] = specs['dsn_class']
+ root_dir: Union[str, Path] = specs['root_dir']
+ impl_lib: str = specs['impl_lib']
+ dsn_params: Mapping[str, Any] = specs['dsn_params']
+ precision: int = specs.get('precision', 6)
+
+ dsn_cls = cast(Type[DesignerBase], import_class(dsn_str))
+ if isinstance(root_dir, str):
+ root_path = Path(root_dir)
+ else:
+ root_path = root_dir
+
+ dsn_options = dict(
+ extract=extract,
+ force_extract=force_extract,
+ gen_sch=gen_sch,
+ log_level=log_level,
+ )
+ log_file = str(root_path / 'dsn.log')
+ sim_db = prj.make_sim_db(root_path / 'dsn', log_file, impl_lib, dsn_options=dsn_options,
+ force_sim=force_sim, precision=precision, log_level=log_level)
+ designer = dsn_cls(root_path, sim_db, dsn_params)
+ summary = designer.run_design()
+ pprint.pprint(summary)
+
+ def get_design_dir(self, parent_dir: Path) -> Path:
+ if self.extract:
+ return parent_dir / 'extract'
+ else:
+ return parent_dir / 'schematic'
+
+ def commit(self) -> None:
+ """Commit changes to specs dictionary. Perform necessary initialization."""
+ for k, v in self.get_default_param_values().items():
+ if k not in self._dsn_specs:
+ self._dsn_specs[k] = v
+
+ self._work_dir = self.get_design_dir(self._root_dir / self.__class__.__name__)
+
+ def design(self, **kwargs: Any) -> Mapping[str, Any]:
+ coro = self.async_design(**kwargs)
+ results = batch_async_task([coro])
+ if results is None:
+ self.error('Design script cancelled.')
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
+
+ @abc.abstractmethod
+ async def async_design(self, **kwargs: Any) -> Mapping[str, Any]:
+ pass
+
+ def run_design(self) -> Mapping[str, Any]:
+ return self.design(**self.dsn_specs)
+
+ def set_dsn_specs(self, specs: Mapping[str, Any]) -> None:
+ self._dsn_specs = {k: deepcopy(v) for k, v in specs.items()}
+ self.commit()
+
+ def new_designer(self, cls: Union[str, Type[DesignerBase]], dsn_specs: Mapping[str, Any]
+ ) -> DesignerBase:
+ dsn_cls = cast(Type[DesignerBase], import_class(cls))
+ designer = dsn_cls(self._root_dir, self._sim_db, dsn_specs)
+ return designer
+
+ def make_tbm(self, tbm_cls: Union[Type[TestbenchManager], str], tbm_specs: Mapping[str, Any],
+ ) -> TestbenchManager:
+ return self._sim_db.make_tbm(tbm_cls, tbm_specs, logger=self.logger)
+
+ def make_mm(self, mm_cls: Union[Type[MeasurementManager], str], meas_specs: Mapping[str, Any]
+ ) -> MeasurementManager:
+ return self._sim_db.make_mm(mm_cls, meas_specs)
+
+ async def async_batch_dut(self, dut_specs: Sequence[Mapping[str, Any]],
+ ) -> Sequence[DesignInstance]:
+ return await self._sim_db.async_batch_design(dut_specs)
+
+ async def async_new_dut(self, impl_cell: str,
+ lay_cls: Union[Type[TemplateBase], Type[Module], str],
+ dut_params: Mapping[str, Any], extract: Optional[bool] = None,
+ name_prefix: str = '', name_suffix: str = '',
+ flat: bool = False, export_lay: bool = False) -> DesignInstance:
+ return await self._sim_db.async_new_design(impl_cell, lay_cls, dut_params, extract=extract,
+ name_prefix=name_prefix, name_suffix=name_suffix,
+ flat=flat, export_lay=export_lay)
+
+ async def async_simulate_tbm_obj(self, sim_id: str, dut: Optional[DesignInstance],
+ tbm: TestbenchManager, tb_params: Optional[Mapping[str, Any]],
+ tb_name: str = '') -> SimResults:
+ return await self._sim_db.async_simulate_tbm_obj(sim_id, self._work_dir / sim_id, dut, tbm,
+ tb_params, tb_name=tb_name)
+
+ async def async_simulate_mm_obj(self, sim_id: str, dut: Optional[DesignInstance],
+ mm: MeasurementManager) -> MeasureResult:
+ return await self._sim_db.async_simulate_mm_obj(sim_id, self._work_dir / sim_id, dut, mm)
diff --git a/src/bag/simulation/hdf5.py b/src/bag/simulation/hdf5.py
new file mode 100644
index 0000000..f1d928f
--- /dev/null
+++ b/src/bag/simulation/hdf5.py
@@ -0,0 +1,186 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import List, Dict, Any, Tuple
+
+from pathlib import Path
+
+import h5py
+import numpy as np
+
+from pybag.enum import DesignOutput
+from pybag.core import get_bag_logger
+
+from ..util.search import BinaryIterator
+from .data import AnalysisData, SimData
+
+try:
+ # register the blosc filter on load
+ import blosc_filter_pybind11
+ BLOSC_FILTER = blosc_filter_pybind11.register_blosc_filter()
+except ImportError:
+ print('WARNING: Error registering BLOSC filter for HDF5. Default to LZF')
+ blosc_filter_pybind11 = None
+ BLOSC_FILTER = None
+
+MB_SIZE = 1024**2
+
+
+def _set_chunk_args(kwargs: Dict[str, Any], chunk_size_mb: int, shape: Tuple[int, ...],
+ unit_size: int) -> None:
+ if chunk_size_mb == 0:
+ return
+
+ ndim = len(shape)
+ num_max = chunk_size_mb * MB_SIZE // unit_size
+ chunk_shape = [1] * ndim
+ num_cum = 1
+ for cur_idx in range(ndim - 1, -1, -1):
+ size_cur = shape[cur_idx]
+ num_cur = num_cum * size_cur
+ if num_cur > num_max:
+ # binary search on divisor
+ bin_iter = BinaryIterator(2, size_cur + 1)
+ while bin_iter.has_next():
+ div = bin_iter.get_next()
+ q, r = divmod(size_cur, div)
+ q += (r != 0)
+ num_test = num_cum * q
+ if num_test <= num_max:
+ bin_iter.save_info(q)
+ bin_iter.down()
+ elif num_test > num_max:
+ bin_iter.up()
+ else:
+ bin_iter.save_info(q)
+ break
+ chunk_shape[cur_idx] = bin_iter.get_last_save_info()
+ break
+ else:
+ # we can take all values from this dimension
+ chunk_shape[cur_idx] = size_cur
+ if num_cur == num_max:
+ # we're done
+ break
+
+ kwargs['chunks'] = tuple(chunk_shape)
+
+
+def save_sim_data_hdf5(data: SimData, hdf5_path: Path, compress: bool = True,
+ chunk_size_mb: int = 2, cache_size_mb: int = 20,
+ cache_modulus: int = 2341) -> None:
+ """Saves the given MDArray as a HDF5 file.
+
+ The simulation environments are stored as fixed length byte strings,
+ and the sweep parameters are stored as dimension label for each data.
+
+ Parameters
+ ----------
+ data: SimData
+ the data.
+ hdf5_path: Path
+ the hdf5 file path.
+ compress : str
+ HDF5 compression method. Defaults to 'lzf' for speed (use 'gzip' for space).
+ chunk_size_mb : int
+ HDF5 data chunk size, in megabytes. 0 to disable.
+ cache_size_mb : int
+ HDF5 file chunk cache size, in megabytes.
+ cache_modulus : int
+ HDF5 file chunk cache modulus.
+ """
+ # create parent directory
+ hdf5_path.parent.mkdir(parents=True, exist_ok=True)
+
+ str_kwargs: Dict[str, Any] = {}
+ dset_kwargs: Dict[str, Any] = {}
+ if compress:
+ if chunk_size_mb == 0:
+ raise ValueError('Compression can only be done with chunk storage')
+ if BLOSC_FILTER is None:
+ dset_kwargs['compression'] = 'lzf'
+ dset_kwargs['shuffle'] = True
+ else:
+ dset_kwargs['compression'] = BLOSC_FILTER
+ dset_kwargs['compression_opts'] = (0, 0, 0, 0, 5, 1, 0)
+ dset_kwargs['shuffle'] = False
+
+ with h5py.File(str(hdf5_path), 'w', libver='latest', rdcc_nbytes=cache_size_mb * MB_SIZE,
+ rdcc_w0=1.0, rdcc_nslots=cache_modulus) as f:
+ arr = np.array(data.sim_envs, dtype='S')
+ _set_chunk_args(str_kwargs, chunk_size_mb, arr.shape, arr.dtype.itemsize)
+ f.create_dataset('__corners', data=arr, **str_kwargs)
+ f.attrs['netlist_type'] = data.netlist_type.value
+ for group in data.group_list:
+ data.open_group(group)
+ grp = f.create_group(group)
+ grp.attrs['is_md'] = data.is_md
+ arr = np.array(data.sweep_params, dtype='S')
+ _set_chunk_args(str_kwargs, chunk_size_mb, arr.shape, arr.dtype.itemsize)
+ grp.create_dataset('__sweep_params', data=arr, **str_kwargs)
+ for name, arr in data.items():
+ _set_chunk_args(dset_kwargs, chunk_size_mb, arr.shape, arr.dtype.itemsize)
+ grp.create_dataset(name, data=arr, **dset_kwargs)
+
+
+def load_sim_data_hdf5(path: Path, cache_size_mb: int = 20, cache_modulus: int = 2341) -> SimData:
+ """Read simulation results from HDF5 file.
+
+ Parameters
+ ----------
+ path : Path
+ the file to read.
+ cache_size_mb : int
+ HDF5 file chunk cache size, in megabytes.
+ cache_modulus : int
+ HDF5 file chunk cache modulus.
+
+ Returns
+ -------
+ results : SimData
+ the data.
+ """
+ if not path.is_file():
+ raise FileNotFoundError(f'{path} is not a file.')
+
+ with h5py.File(str(path), 'r', rdcc_nbytes=cache_size_mb * MB_SIZE, rdcc_nslots=cache_modulus,
+ rdcc_w0=1.0) as f:
+ corners: List[str] = []
+ ana_dict: Dict[str, AnalysisData] = {}
+ for ana, obj in f.items():
+ if ana == '__corners':
+ corners = obj[:].astype('U').tolist()
+ else:
+ sweep_params: List[str] = []
+ sig_dict: Dict[str, np.ndarray] = {}
+ is_md: bool = bool(obj.attrs['is_md'])
+ for sig, dset in obj.items():
+ if sig == '__sweep_params':
+ sweep_params = dset[:].astype('U').tolist()
+ else:
+ sig_dict[sig] = dset[:]
+ ana_dict[ana] = AnalysisData(sweep_params, sig_dict, is_md)
+
+ netlist_code = f.attrs.get('netlist_type', None)
+ if netlist_code is None:
+ logger = get_bag_logger()
+ logger.warn('Old HDF5 file: cannot find attribute "netlist_type". Assuming SPECTRE.')
+ netlist_type = DesignOutput.SPECTRE
+ else:
+ netlist_type = DesignOutput(netlist_code)
+
+ ans = SimData(corners, ana_dict, netlist_type)
+
+ return ans
diff --git a/src/bag/simulation/measure.py b/src/bag/simulation/measure.py
new file mode 100644
index 0000000..59798f2
--- /dev/null
+++ b/src/bag/simulation/measure.py
@@ -0,0 +1,216 @@
+# SPDX-License-Identifier: Apache-2.0
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from typing import TYPE_CHECKING, Dict, Any, Tuple, Mapping, Union, Optional, Type, cast
+
+import abc
+from pathlib import Path
+from copy import deepcopy
+from dataclasses import dataclass
+
+from pybag.enum import LogLevel
+
+from ..util.logging import LoggingBase
+from ..util.importlib import import_class
+from ..io.file import write_yaml
+from ..concurrent.core import batch_async_task
+
+from .core import TestbenchManager
+
+if TYPE_CHECKING:
+ from .cache import SimulationDB, DesignInstance, SimResults, MeasureResult
+
+
+@dataclass
+class MeasInfo:
+ state: str
+ prev_results: Dict[str, Any]
+
+
+class MeasurementManager(LoggingBase, abc.ABC):
+ """A class that handles circuit performance measurement.
+
+ This class handles all the steps needed to measure a specific performance
+ metric of the device-under-test. This may involve creating and simulating
+ multiple different testbenches, where configuration of successive testbenches
+ depends on previous simulation results. This class reduces the potentially
+ complex measurement tasks into a few simple abstract methods that designers
+ simply have to implement.
+ """
+
+ def __init__(self, meas_specs: Mapping[str, Any], log_file: str,
+ log_level: LogLevel = LogLevel.DEBUG, precision: int = 6) -> None:
+ LoggingBase.__init__(self, self.__class__.__name__, log_file, log_level=log_level)
+
+ self._specs: Dict[str, Any] = {k: deepcopy(v) for k, v in meas_specs.items()}
+ self._precision = precision
+ self.commit()
+
+ @property
+ def specs(self) -> Dict[str, Any]:
+ return self._specs
+
+ @property
+ def precision(self) -> int:
+ return self._precision
+
+ @abc.abstractmethod
+ def initialize(self, sim_db: SimulationDB, dut: DesignInstance) -> Tuple[bool, MeasInfo]:
+ """Initialize this MeasurementManager to get ready for measurement.
+
+ Parameters
+ ----------
+ sim_db : SimulationDB
+ the simulation database object.
+ dut : DesignInstance
+ the design instance.
+
+ Returns
+ -------
+ done : bool
+ If True, then do not run measurement.
+ info : MeasInfo
+ the initial MeasInfo object.
+ """
+ pass
+
+ @abc.abstractmethod
+ def process_output(self, cur_info: MeasInfo, sim_results: Union[SimResults, MeasureResult]
+ ) -> Tuple[bool, MeasInfo]:
+ """Process simulation output data.
+
+ Parameters
+ ----------
+ cur_info : MeasInfo
+ the MeasInfo object representing the current measurement state.
+ sim_results : Union[SimResults, MeasureResult]
+ the simulation results object.
+
+ Returns
+ -------
+ done : bool
+ True if this measurement is finished.
+ next_info : MeasInfo
+ the updated measurement state.
+ """
+ pass
+
+ @abc.abstractmethod
+ def get_sim_info(self, sim_db: SimulationDB, dut: DesignInstance, cur_info: MeasInfo
+ ) -> Tuple[Union[Tuple[TestbenchManager, Mapping[str, Any]],
+ MeasurementManager], bool]:
+ """Get the testbench manager needed for the current measurement state.
+
+ Override to customize your testbench manager.
+
+ Parameters
+ ----------
+ sim_db : SimulationDB
+ the simulation database object.
+ dut : DesignInstance
+ the design instance.
+ cur_info: MeasInfo
+ the MeasInfo object representing the current measurement state.
+
+ Returns
+ -------
+ sim_object : Union[Tuple[TestbenchManager, Mapping[str, Any]], MeasurementManager]
+ either a TestbenchManager/tb_params tuple, or a measurement manager instance.
+ use_dut : bool
+ True to run simulation with DesignInstance.
+ """
+ pass
+
+ def commit(self) -> None:
+ """Commit changes to specs dictionary. Perform necessary initialization."""
+ pass
+
+ def make_tbm(self, tbm_cls: Union[Type[TestbenchManager], str], tbm_specs: Mapping[str, Any],
+ ) -> TestbenchManager:
+ obj_cls = cast(Type[TestbenchManager], import_class(tbm_cls))
+ return obj_cls(None, Path(), '', '', tbm_specs, None, None,
+ precision=self._precision, logger=self.logger)
+
+ def make_mm(self, mm_cls: Union[Type[MeasurementManager], str], mm_specs: Mapping[str, Any]
+ ) -> MeasurementManager:
+ obj_cls = cast(Type[MeasurementManager], import_class(mm_cls))
+ return obj_cls(mm_specs, self.log_file, log_level=self.log_level, precision=self._precision)
+
+ async def async_measure_performance(self, name: str, sim_dir: Path, sim_db: SimulationDB,
+ dut: Optional[DesignInstance]) -> Dict[str, Any]:
+ """A coroutine that performs measurement.
+
+ The measurement is done like a FSM. On each iteration, depending on the current
+ state, it creates a new testbench (or reuse an existing one) and simulate it.
+ It then post-process the simulation data to determine the next FSM state, or
+ if the measurement is done.
+
+ Parameters
+ ----------
+ name : str
+ name of this measurement.
+ sim_dir : Path
+ simulation directory.
+ sim_db : SimulationDB
+ the simulation database object.
+ dut : Optional[DesignInstance]
+ the DUT to measure.
+
+ Returns
+ -------
+ output : Dict[str, Any]
+ the last dictionary returned by process_output().
+ """
+ done, cur_info = self.initialize(sim_db, dut)
+ while not done:
+ cur_state = cur_info.state
+ self.log(f'Measurement {name}, state {cur_state}')
+ sim_id = f'{name}_{cur_state}'
+
+ # create and setup testbench
+ sim_object, use_dut = self.get_sim_info(sim_db, dut, cur_info)
+ cur_dut = dut if use_dut else None
+ if isinstance(sim_object, MeasurementManager):
+ sim_results = await sim_db.async_simulate_mm_obj(sim_id, sim_dir / cur_state,
+ cur_dut, sim_object)
+ else:
+ tbm, tb_params = sim_object
+ sim_results = await sim_db.async_simulate_tbm_obj(cur_state, sim_dir / cur_state,
+ cur_dut, tbm, tb_params,
+ tb_name=sim_id)
+
+ self.log(f'Processing output of {name}, state {cur_state}')
+ done, next_info = self.process_output(cur_info, sim_results)
+ write_yaml(sim_dir / f'{cur_state}.yaml', next_info.prev_results)
+ cur_info = next_info
+
+ self.log(f'Measurement {name} done, recording results.')
+ result = cur_info.prev_results
+ write_yaml(sim_dir / f'{name}.yaml', cur_info.prev_results)
+ return result
+
+ def measure_performance(self, name: str, sim_dir: Path, sim_db: SimulationDB,
+ dut: Optional[DesignInstance]) -> Dict[str, Any]:
+ coro = self.async_measure_performance(name, sim_dir, sim_db, dut)
+ results = batch_async_task([coro])
+ if results is None:
+ return {}
+
+ ans = results[0]
+ if isinstance(ans, Exception):
+ raise ans
+ return ans
diff --git a/src/bag/simulation/spectre.py b/src/bag/simulation/spectre.py
new file mode 100644
index 0000000..3e0bc3b
--- /dev/null
+++ b/src/bag/simulation/spectre.py
@@ -0,0 +1,481 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements bag's interface with spectre simulator.
+"""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Dict, Any, Sequence, Optional, List, Tuple, Union, Mapping, Set
+
+import re
+import shutil
+from pathlib import Path
+from itertools import chain
+
+from pybag.enum import DesignOutput
+from pybag.core import get_cdba_name_bits
+
+from ..math import float_to_si_string
+from ..io.file import read_yaml, open_file
+from ..io.string import wrap_string
+from ..util.immutable import ImmutableList
+from .data import (
+ MDSweepInfo, SimData, SetSweepInfo, SweepLinear, SweepLog, SweepList, SimNetlistInfo,
+ SweepSpec, MonteCarlo, AnalysisInfo, AnalysisAC, AnalysisSP, AnalysisNoise, AnalysisTran,
+ AnalysisSweep1D, AnalysisPSS
+)
+from .base import SimProcessManager, get_corner_temp
+from .hdf5 import load_sim_data_hdf5, save_sim_data_hdf5
+
+if TYPE_CHECKING:
+ from .data import SweepInfo
+
+reserve_params = {'freq', 'time'}
+
+
+class SpectreInterface(SimProcessManager):
+ """This class handles interaction with Ocean simulators.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary file directory for SimAccess.
+ sim_config : Dict[str, Any]
+ the simulation configuration dictionary.
+ """
+
+ def __init__(self, tmp_dir: str, sim_config: Dict[str, Any]) -> None:
+ SimProcessManager.__init__(self, tmp_dir, sim_config)
+ self._model_setup: Dict[str, List[Tuple[str, str]]] = read_yaml(sim_config['env_file'])
+
+ @property
+ def netlist_type(self) -> DesignOutput:
+ return DesignOutput.SPECTRE
+
+ def create_netlist(self, output_path: Path, sch_netlist: Path, info: SimNetlistInfo,
+ precision: int = 6) -> None:
+ output_path_str = str(output_path.resolve())
+ sch_netlist_path_str = str(sch_netlist.resolve())
+ if ('<' in output_path_str or '>' in output_path_str or
+ '<' in sch_netlist_path_str or '>' in sch_netlist_path_str):
+ raise ValueError('spectre does not support directory names with angle brackets.')
+
+ sim_envs = info.sim_envs
+ analyses = info.analyses
+ params = info.params
+ env_params = info.env_params
+ swp_info = info.swp_info
+ monte_carlo = info.monte_carlo
+ sim_options = info.options
+ init_voltages = info.init_voltages
+ if monte_carlo is not None and (isinstance(swp_info, SetSweepInfo) or len(sim_envs) > 1):
+ raise NotImplementedError('Monte Carlo simulation not implemented for parameter sweep '
+ 'and/or process sweep')
+
+ with open_file(sch_netlist, 'r') as f:
+ lines = [l.rstrip() for l in f]
+
+ # write simulator options
+ if sim_options:
+ sim_opt_list = ['simulatorOptions', 'options']
+ for opt, val in sim_options.items():
+ sim_opt_list.append(f'{opt}={val}')
+ sim_opt_str = wrap_string(sim_opt_list)
+ lines.append(sim_opt_str)
+
+ # write parameters
+ param_fmt = 'parameters {}={}'
+ param_set = reserve_params.copy()
+ for par, val in swp_info.default_items():
+ if par not in param_set:
+ lines.append(param_fmt.format(par, _format_val(val, precision)))
+ param_set.add(par)
+ for par, val_list in env_params.items():
+ if par in param_set:
+ raise ValueError('Cannot set a sweep parameter as environment parameter.')
+ lines.append(param_fmt.format(par, _format_val(val_list[0], precision)))
+ param_set.add(par)
+ for par, val in params.items():
+ if par not in param_set:
+ lines.append(param_fmt.format(par, _format_val(val, precision)))
+ param_set.add(par)
+ for ana in analyses:
+ par = ana.param
+ if par and par not in param_set:
+ lines.append(param_fmt.format(par, _format_val(ana.param_start, precision)))
+ param_set.add(par)
+
+ lines.append('')
+
+ if isinstance(swp_info, SetSweepInfo):
+ # write paramset declaration if needed
+ _write_param_set(lines, swp_info.params, swp_info.values, precision)
+ lines.append('')
+
+ if init_voltages:
+ # write initial conditions
+ ic_line = 'ic'
+ for key, val in init_voltages.items():
+ ic_line += f' {key}={_format_val(val, precision)}'
+
+ lines.append(ic_line)
+ lines.append('')
+ has_ic = True
+ else:
+ has_ic = False
+
+ # write statements for each simulation environment
+ # write default model statements
+ for idx, sim_env in enumerate(sim_envs):
+ corner, temp = get_corner_temp(sim_env)
+ if idx != 0:
+ # start altergroup statement
+ lines.append(f'{sim_env} altergroup {{')
+ _write_sim_env(lines, self._model_setup[corner], temp)
+ if idx != 0:
+ # write environment parameters for second sim_env and on
+ for par, val_list in env_params.items():
+ lines.append(param_fmt.format(par, val_list[idx]))
+ # close altergroup statement
+ lines.append('}')
+ lines.append('')
+
+ # write sweep statements
+ num_brackets = _write_sweep_start(lines, swp_info, idx, precision)
+
+ # write Monte Carlo statements if present
+ if isinstance(monte_carlo, MonteCarlo):
+ num_brackets += _write_monte_carlo(lines, monte_carlo)
+
+ if num_brackets > 0:
+ lines.append('')
+
+ # write analyses
+ save_outputs = set()
+ for ana in analyses:
+ _write_analysis(lines, sim_env, ana, precision, has_ic)
+ lines.append('')
+ for output in ana.save_outputs:
+ save_outputs.update(get_cdba_name_bits(output, DesignOutput.SPECTRE))
+
+ # close sweep statements
+ for _ in range(num_brackets):
+ lines.append('}')
+ if num_brackets > 0:
+ lines.append('')
+
+ # write save statements
+ _write_save_statements(lines, save_outputs)
+
+ with open_file(output_path, 'w') as f:
+ f.write('\n'.join(lines))
+ f.write('\n')
+
+ def get_sim_file(self, dir_path: Path, sim_tag: str) -> Path:
+ return dir_path / f'{sim_tag}.hdf5'
+
+ def load_sim_data(self, dir_path: Path, sim_tag: str) -> SimData:
+ hdf5_path = self.get_sim_file(dir_path, sim_tag)
+ import time
+ print('Reading HDF5')
+ start = time.time()
+ ans = load_sim_data_hdf5(hdf5_path)
+ stop = time.time()
+ print(f'HDF5 read took {stop - start:.4g} seconds.')
+ return ans
+
+ async def async_run_simulation(self, netlist: Path, sim_tag: str) -> None:
+ netlist = netlist.resolve()
+ if not netlist.is_file():
+ raise FileNotFoundError(f'netlist {netlist} is not a file.')
+
+ sim_kwargs: Dict[str, Any] = self.config['kwargs']
+ compress: bool = self.config.get('compress', True)
+ rtol: float = self.config.get('rtol', 1e-8)
+ atol: float = self.config.get('atol', 1e-22)
+
+ cmd_str: str = sim_kwargs.get('command', 'spectre')
+ env: Optional[Dict[str, str]] = sim_kwargs.get('env', None)
+ run_64: bool = sim_kwargs.get('run_64', True)
+ fmt: str = sim_kwargs.get('format', 'psfxl')
+ psf_version: str = sim_kwargs.get('psfversion', '1.1')
+ options = sim_kwargs.get('options', [])
+
+ sim_cmd = [cmd_str, '-cols', '100', '-colslog', '100',
+ '-format', fmt, '-raw', f'{sim_tag}.raw']
+
+ if fmt == 'psfxl':
+ sim_cmd.append('-psfversion')
+ sim_cmd.append(psf_version)
+ if run_64:
+ sim_cmd.append('-64')
+ for opt in options:
+ sim_cmd.append(opt)
+
+ sim_cmd.append(str(netlist))
+
+ cwd_path = netlist.parent.resolve()
+ log_path = cwd_path / 'spectre_output.log'
+ raw_path: Path = cwd_path / f'{sim_tag}.raw'
+ hdf5_path: Path = cwd_path / f'{sim_tag}.hdf5'
+
+ if raw_path.is_dir():
+ shutil.rmtree(raw_path)
+ if hdf5_path.is_file():
+ hdf5_path.unlink()
+
+ ret_code = await self.manager.async_new_subprocess(sim_cmd, str(log_path),
+ env=env, cwd=str(cwd_path))
+ if ret_code is None or ret_code != 0 or not raw_path.is_dir():
+ raise ValueError(f'Spectre simulation ended with error. See log file: {log_path}')
+
+ # check if Monte Carlo sim
+ for fname in raw_path.iterdir():
+ if str(fname).endswith('Distributed'):
+ analysis_info: Path = fname / 'Analysis.info'
+ with open_file(analysis_info, 'r') as f:
+ line = f.readline()
+ num_proc = int(re.search(r'(.*) (\d*)\n', line).group(2))
+
+ raw_sep: Path = raw_path / f'{num_proc}'
+ for fname_sep in raw_sep.iterdir():
+ if str(fname_sep).endswith('.mapping'):
+ # Monte Carlo sim in multiprocessing mode
+ mapping_lines = []
+ for i in range(num_proc):
+ with open_file(raw_path / f'{i + 1}' / fname_sep.name, 'r') as fr:
+ for line_in in fr:
+ mapping_lines.append(line_in)
+
+ await self._format_monte_carlo(mapping_lines, cwd_path, compress, rtol,
+ atol, hdf5_path)
+ return
+
+ elif str(fname).endswith('.mapping'):
+ # Monte Carlo sim in single processing mode
+ mapping_lines = open_file(fname, 'r').readlines()
+ await self._format_monte_carlo(mapping_lines, cwd_path, compress, rtol, atol,
+ hdf5_path)
+ return
+
+ # convert to HDF5
+ log_path = cwd_path / 'srr_to_hdf5.log'
+ await self._srr_to_hdf5(compress, rtol, atol, raw_path, hdf5_path, log_path, cwd_path)
+
+ async def _srr_to_hdf5(self, compress: bool, rtol: float, atol: float, raw_path: Path,
+ hdf5_path: Path, log_path: Path, cwd_path: Path) -> None:
+ comp_str = '1' if compress else '0'
+ rtol_str = f'{rtol:.4g}'
+ atol_str = f'{atol:.4g}'
+ sim_cmd = ['srr_to_hdf5', str(raw_path), str(hdf5_path), comp_str, rtol_str, atol_str]
+ ret_code = await self.manager.async_new_subprocess(sim_cmd, str(log_path),
+ cwd=str(cwd_path))
+ if ret_code is None or ret_code != 0 or not hdf5_path.is_file():
+ raise ValueError(f'srr_to_hdf5 ended with error. See log file: {log_path}')
+
+ # post-process HDF5 to convert to MD array
+ _process_hdf5(hdf5_path, rtol, atol)
+
+ async def _format_monte_carlo(self, lines: List[str], cwd_path: Path, compress: bool,
+ rtol: float, atol: float, final_hdf5_path: Path) -> None:
+ # read mapping file and convert each sub-directory into hdf5 files
+ sim_data_list = []
+ for line in lines:
+ reg = re.search(r'(\d*)\t(.*)\n', line)
+ idx, raw_str = reg.group(1), reg.group(2)
+ raw_path: Path = cwd_path / raw_str
+ hdf5_path: Path = cwd_path / f'{raw_path.name}.hdf5'
+ log_path: Path = cwd_path / f'{raw_path.name}_srr_to_hdf5.log'
+ await self._srr_to_hdf5(compress, rtol, atol, raw_path, hdf5_path, log_path,
+ cwd_path)
+ sim_data_list.append(load_sim_data_hdf5(hdf5_path))
+
+ # combine all SimData to one SimData
+ new_sim_data = SimData.combine(sim_data_list, 'monte_carlo')
+ save_sim_data_hdf5(new_sim_data, final_hdf5_path, compress)
+
+
+def _write_sim_env(lines: List[str], models: List[Tuple[str, str]], temp: int) -> None:
+ for fname, section in models:
+ if section:
+ lines.append(f'include "{fname}" section={section}')
+ else:
+ lines.append(f'include "{fname}"')
+ lines.append(f'tempOption options temp={temp}')
+
+
+def _write_param_set(lines: List[str], params: Sequence[str],
+ values: Sequence[ImmutableList[float]], precision: int) -> None:
+ # get list of lists of strings to print, and compute column widths
+ data = [params]
+ col_widths = [len(par) for par in params]
+ for combo in values:
+ str_list = []
+ for idx, val in enumerate(combo):
+ cur_str = _format_val(val, precision)
+ col_widths[idx] = max(col_widths[idx], len(cur_str))
+ str_list.append(cur_str)
+ data.append(str_list)
+
+ # write the columns
+ lines.append('swp_data paramset {')
+ for row in data:
+ lines.append(' '.join(val.ljust(width) for val, width in zip(row, col_widths)))
+ lines.append('}')
+
+
+def _get_sweep_str(par: str, swp_spec: Optional[SweepSpec], precision: int) -> str:
+ if not par or swp_spec is None:
+ return ''
+
+ if isinstance(swp_spec, SweepList):
+ val_list = swp_spec.values
+ # abstol check
+ num_small = 0
+ for val in val_list:
+ if abs(val) < 3.0e-16:
+ num_small += 1
+ if num_small > 1:
+ raise ValueError('sweep values are below spectre abstol, try to find a work around')
+
+ tmp = ' '.join((_format_val(val, precision) for val in val_list))
+ val_str = f'values=[{tmp}]'
+ elif isinstance(swp_spec, SweepLinear):
+ # spectre: stop is inclusive, lin = number of points excluding the last point
+ val_str = f'start={swp_spec.start} stop={swp_spec.stop_inc} lin={swp_spec.num - 1}'
+ elif isinstance(swp_spec, SweepLog):
+ # spectre: stop is inclusive, log = number of points excluding the last point
+ val_str = f'start={swp_spec.start} stop={swp_spec.stop_inc} log={swp_spec.num - 1}'
+ else:
+ raise ValueError('Unknown sweep specification.')
+
+ if par in reserve_params:
+ return val_str
+ else:
+ return f'param={par} {val_str}'
+
+
+def _get_options_str(options: Mapping[str, str]) -> str:
+ return ' '.join((f'{key}={val}' for key, val in options.items()))
+
+
+def _write_sweep_start(lines: List[str], swp_info: SweepInfo, swp_idx: int, precision: int) -> int:
+ if isinstance(swp_info, MDSweepInfo):
+ for dim_idx, (par, swp_spec) in enumerate(swp_info.params):
+ statement = _get_sweep_str(par, swp_spec, precision)
+ lines.append(f'swp{swp_idx}{dim_idx} sweep {statement} {{')
+ return swp_info.ndim
+ else:
+ lines.append(f'swp{swp_idx} sweep paramset=swp_data {{')
+ return 1
+
+
+def _write_monte_carlo(lines: List[str], mc: MonteCarlo) -> int:
+ cur_line = f'__{mc.name}__ montecarlo numruns={mc.numruns} seed={mc.seed}'
+ options_dict = dict(savefamilyplots='yes', appendsd='yes', savedatainseparatedir='yes',
+ donominal='yes', variations='all')
+ options_dict.update(mc.options)
+ opt_str = _get_options_str(options_dict)
+ if opt_str:
+ cur_line += ' '
+ cur_line += opt_str
+ cur_line += ' {'
+ lines.append(cur_line)
+ return 1
+
+
+def _write_analysis(lines: List[str], sim_env: str, ana: AnalysisInfo, precision: int,
+ has_ic: bool) -> None:
+ cur_line = f'__{ana.name}__{sim_env}__'
+ if hasattr(ana, 'p_port') and ana.p_port:
+ cur_line += f' {ana.p_port}'
+ if hasattr(ana, 'n_port') and ana.n_port:
+ cur_line += f' {ana.n_port}'
+ cur_line += f' {ana.name}'
+
+ if isinstance(ana, AnalysisTran):
+ cur_line += (f' start={_format_val(ana.start, precision)}'
+ f' stop={_format_val(ana.stop, precision)}')
+ if isinstance(ana.out_start, str) or ana.out_start > 0:
+ val_str = _format_val(ana.out_start, precision)
+ cur_line += f' outputstart={val_str} strobestart={val_str}'
+ if ana.strobe != 0:
+ cur_line += f' strobeperiod={_format_val(ana.strobe)}'
+ if has_ic:
+ cur_line += ' ic=node'
+ elif isinstance(ana, AnalysisSweep1D):
+ par = ana.param
+ sweep_str = _get_sweep_str(par, ana.sweep, precision)
+ cur_line += ' '
+ cur_line += sweep_str
+ if isinstance(ana, AnalysisAC) and par != 'freq':
+ cur_line += f' freq={float_to_si_string(ana.freq, precision)}'
+
+ if isinstance(ana, AnalysisSP):
+ cur_line += f' ports=[{" ".join(ana.ports)}] paramtype={ana.param_type.name.lower()}'
+ elif isinstance(ana, AnalysisNoise):
+ if ana.out_probe:
+ cur_line += f' oprobe={ana.out_probe}'
+ elif not (hasattr(ana, 'p_port') and ana.p_port):
+ raise ValueError('Either specify out_probe, or specify p_port and n_port')
+ if ana.in_probe:
+ cur_line += f' iprobe={ana.in_probe}'
+ elif isinstance(ana, AnalysisPSS):
+ if ana.period == 0.0 and ana.fund == 0.0 and ana.autofund is False:
+ raise ValueError('For PSS simulation, either specify period or fund, '
+ 'or set autofund = True')
+ if ana.period > 0.0:
+ cur_line += f' period={ana.period}'
+ if ana.fund > 0.0:
+ cur_line += f' fund={ana.fund}'
+ if ana.autofund:
+ cur_line += f' autofund=yes'
+ else:
+ raise ValueError('Unknown analysis specification.')
+
+ opt_str = _get_options_str(ana.options)
+ if opt_str:
+ cur_line += ' '
+ cur_line += opt_str
+
+ if ana.save_outputs:
+ cur_line += ' save=selected'
+
+ lines.append(cur_line)
+
+
+def _write_save_statements(lines: List[str], save_outputs: Set[str]):
+ cur_line = wrap_string(chain(['save'], sorted(save_outputs)))
+ lines.append(cur_line)
+
+
+def _format_val(val: Union[float, str], precision: int = 6) -> str:
+ if isinstance(val, str):
+ return val
+ else:
+ return float_to_si_string(val, precision)
+
+
+def _process_hdf5(path: Path, rtol: float, atol: float) -> None:
+ proc = 'process'
+ sim_data = load_sim_data_hdf5(path)
+ modified = False
+ for grp in sim_data.group_list:
+ sim_data.open_group(grp)
+ if proc in sim_data.sweep_params:
+ modified |= sim_data.remove_sweep(proc, rtol=rtol, atol=atol)
+
+ if modified:
+ save_sim_data_hdf5(sim_data, path)
diff --git a/src/bag/typing.py b/src/bag/typing.py
new file mode 100644
index 0000000..7ec1834
--- /dev/null
+++ b/src/bag/typing.py
@@ -0,0 +1,52 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Union, Tuple
+
+from .util.math import HalfInt
+
+CoordType = int
+PointType = Tuple[CoordType, CoordType]
+
+TrackType = Union[float, HalfInt]
+SizeType = Tuple[int, HalfInt, HalfInt]
diff --git a/src/bag/util/__init__.py b/src/bag/util/__init__.py
new file mode 100644
index 0000000..495ac32
--- /dev/null
+++ b/src/bag/util/__init__.py
@@ -0,0 +1,45 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package defines various utilities classes.
+"""
\ No newline at end of file
diff --git a/src/bag/util/cache.py b/src/bag/util/cache.py
new file mode 100644
index 0000000..5e1ac86
--- /dev/null
+++ b/src/bag/util/cache.py
@@ -0,0 +1,749 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines classes used to cache existing design masters
+"""
+
+from __future__ import annotations
+
+from typing import (
+ TYPE_CHECKING, Sequence, Dict, Set, Any, Optional, TypeVar, Type, Tuple, Iterator,
+ List, Mapping, Iterable
+)
+
+import abc
+import time
+from collections import OrderedDict
+
+from pybag.enum import DesignOutput, SupplyWrapMode
+from pybag.core import (
+ implement_yaml, implement_netlist, implement_gds, SUPPLY_SUFFIX, PySchCellView
+)
+
+from ..env import get_netlist_setup_file, get_gds_layer_map, get_gds_object_map
+from .search import get_new_name
+from .immutable import Param, to_immutable
+
+if TYPE_CHECKING:
+ from ..core import BagProject
+ from ..layout.tech import TechInfo
+
+MasterType = TypeVar('MasterType', bound='DesignMaster')
+DBType = TypeVar('DBType', bound='MasterDB')
+
+
+def format_cell_name(cell_name: str, rename_dict: Dict[str, str], name_prefix: str,
+ name_suffix: str, exact_cell_names: Set[str],
+ supply_wrap_mode: SupplyWrapMode) -> str:
+ ans = rename_dict.get(cell_name, cell_name)
+ if ans not in exact_cell_names:
+ ans = name_prefix + ans + name_suffix
+ if supply_wrap_mode is not SupplyWrapMode.NONE:
+ ans += SUPPLY_SUFFIX
+
+ return ans
+
+
+class DesignMaster(abc.ABC):
+ """A design master instance.
+
+ This class represents a design master in the design database.
+
+ Parameters
+ ----------
+ master_db : MasterDB
+ the master database.
+ params : Param
+ the parameters dictionary.
+ key: Any
+ If not None, the unique ID for this master instance.
+ copy_state : Optional[Dict[str, Any]]
+ If not None, set content of this master from this dictionary.
+
+ Attributes
+ ----------
+ params : Param
+ the parameters dictionary.
+ """
+
+ def __init__(self, master_db: MasterDB, params: Param, *,
+ key: Any = None, copy_state: Optional[Dict[str, Any]] = None) -> None:
+ self._master_db = master_db
+ self._cell_name = ''
+
+ if copy_state:
+ self._children = copy_state['children']
+ self._finalized = copy_state['finalized']
+ self._params = copy_state['params']
+ self._cell_name = copy_state['cell_name']
+ self._key = copy_state['key']
+ else:
+ # use ordered dictionary so we have deterministic dependency order
+ self._children = OrderedDict()
+ self._finalized = False
+
+ # set parameters
+ self._params = params
+ self._key = self.compute_unique_key(params) if key is None else key
+
+ # update design master signature
+ self._cell_name = get_new_name(self.get_master_basename(),
+ self.master_db.used_cell_names)
+
+ @classmethod
+ def get_qualified_name(cls) -> str:
+ """Returns the qualified name of this class."""
+ my_module = cls.__module__
+ if my_module is None or my_module == str.__class__.__module__:
+ return cls.__name__
+ else:
+ return my_module + '.' + cls.__name__
+
+ @classmethod
+ def populate_params(cls, table: Dict[str, Any], params_info: Dict[str, str],
+ default_params: Dict[str, Any]) -> Param:
+ """Fill params dictionary with values from table and default_params"""
+ hidden_params = cls.get_hidden_params()
+
+ result = {}
+ for key, desc in params_info.items():
+ if key not in table:
+ if key not in default_params:
+ raise ValueError('Parameter {} not specified. '
+ 'Description:\n{}'.format(key, desc))
+ else:
+ result[key] = default_params[key]
+ else:
+ result[key] = table[key]
+
+ # add hidden parameters
+ for name, value in hidden_params.items():
+ result[name] = table.get(name, value)
+
+ return Param(result)
+
+ @classmethod
+ def compute_unique_key(cls, params: Param) -> Any:
+ """Returns a unique hashable object (usually tuple or string) that represents this instance.
+
+ Parameters
+ ----------
+ params : Param
+ the parameters object. All default and hidden parameters have been processed already.
+
+ Returns
+ -------
+ unique_id : Any
+ a hashable unique ID representing the given parameters.
+ """
+ return cls.get_qualified_name(), params
+
+ @classmethod
+ def process_params(cls, params: Dict[str, Any]) -> Tuple[Param, Any]:
+ """Process the given parameters dictionary.
+
+ This method computes the final parameters dictionary from the user given one by
+ filling in default and hidden parameter values, and also compute the unique ID of
+ this master instance.
+
+ Parameters
+ ----------
+ params : Dict[str, Any]
+ the parameter dictionary specified by the user.
+
+ Returns
+ -------
+ unique_id : Any
+ a hashable unique ID representing the given parameters.
+ """
+ params_info = cls.get_params_info()
+ default_params = cls.get_default_param_values()
+ params = cls.populate_params(params, params_info, default_params)
+ return params, cls.compute_unique_key(params)
+
+ def update_signature(self, key: Any) -> None:
+ self._key = key
+ self._cell_name = get_new_name(self.get_master_basename(), self.master_db.used_cell_names)
+
+ def get_copy_state_with(self, new_params: Param) -> Dict[str, Any]:
+ return {
+ 'children': self._children.copy(),
+ 'finalized': self._finalized,
+ 'params': new_params,
+ 'cell_name': self._cell_name,
+ 'key': self._key,
+ }
+
+ def get_copy_with(self: MasterType, new_params: Param) -> MasterType:
+ """Returns a copy of this master instance."""
+ copy_state = self.get_copy_state_with(new_params)
+ return self.__class__(self._master_db, None, copy_state=copy_state)
+
+ @classmethod
+ def to_immutable_id(cls, val: Any) -> Any:
+ """Convert the given object to an immutable type for use as keys in dictionary.
+ """
+ try:
+ return to_immutable(val)
+ except ValueError:
+ if hasattr(val, 'get_immutable_key') and callable(val.get_immutable_key):
+ return val.get_immutable_key()
+ else:
+ raise Exception('Unrecognized value %s with type %s' % (str(val), type(val)))
+
+ @classmethod
+ @abc.abstractmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ """Returns a dictionary from parameter names to descriptions.
+
+ Returns
+ -------
+ param_info : Dict[str, str]
+ dictionary from parameter names to descriptions.
+ """
+ return {}
+
+ @classmethod
+ def get_default_param_values(cls) -> Dict[str, Any]:
+ """Returns a dictionary containing default parameter values.
+
+ Override this method to define default parameter values. As good practice,
+ you should avoid defining default values for technology-dependent parameters
+ (such as channel length, transistor width, etc.), but only define default
+ values for technology-independent parameters (such as number of tracks).
+
+ Returns
+ -------
+ default_params : Dict[str, Any]
+ dictionary of default parameter values.
+ """
+ return {}
+
+ @classmethod
+ def get_hidden_params(cls) -> Dict[str, Any]:
+ """Returns a dictionary of hidden parameter values.
+
+ hidden parameters are parameters are invisible to the user and only used
+ and computed internally.
+
+ Returns
+ -------
+ hidden_params : Dict[str, Any]
+ dictionary of hidden parameter values.
+ """
+ return {}
+
+ @abc.abstractmethod
+ def get_master_basename(self) -> str:
+ """Returns the base name to use for this instance.
+
+ Returns
+ -------
+ basename : str
+ the base name for this instance.
+ """
+ return ''
+
+ @abc.abstractmethod
+ def get_content(self, output_type: DesignOutput, rename_dict: Dict[str, str], name_prefix: str,
+ name_suffix: str, shell: bool, exact_cell_names: Set[str],
+ supply_wrap_mode: SupplyWrapMode) -> Tuple[str, Any]:
+ """Returns the content of this master instance.
+
+ Parameters
+ ----------
+ output_type : DesignOutput
+ the output type.
+ rename_dict : Dict[str, str]
+ the renaming dictionary.
+ name_prefix : str
+ the name prefix.
+ name_suffix : str
+ the name suffix.
+ shell : bool
+ True if we're just producing a shell content (i.e. just top level block).
+ exact_cell_names : Set[str]
+ set of cell names to keep exact (don't add prefix and suffix)
+ supply_wrap_mode : SupplyWrapMode
+ the netlisting supply wrap mode.
+
+ Returns
+ -------
+ cell_name : str
+ the master cell name.
+ content : Any
+ the master content data structure.
+ """
+ return '', None
+
+ @property
+ def master_db(self) -> MasterDB:
+ """Returns the database used to create design masters."""
+ return self._master_db
+
+ @property
+ def lib_name(self) -> str:
+ """The master library name"""
+ return self._master_db.lib_name
+
+ @property
+ def cell_name(self) -> str:
+ """The master cell name"""
+ return self._cell_name
+
+ @property
+ def key(self) -> Optional[Any]:
+ """A unique key representing this master."""
+ return self._key
+
+ @property
+ def finalized(self) -> bool:
+ """Returns True if this DesignMaster is finalized."""
+ return self._finalized
+
+ @property
+ def params(self) -> Param:
+ return self._params
+
+ def finalize(self) -> None:
+ """Finalize this master instance.
+ """
+ self._finalized = True
+
+ def add_child_key(self, child_key: object) -> None:
+ """Registers the given child key."""
+ self._children[child_key] = None
+
+ def clear_children_key(self) -> None:
+ """Remove all children keys."""
+ self._children.clear()
+
+ def children(self) -> Iterator[object]:
+ """Iterate over all children's key."""
+ return iter(self._children)
+
+
+class MasterDB(abc.ABC):
+ """A database of existing design masters.
+
+ This class keeps track of existing design masters and maintain design dependency hierarchy.
+
+ Parameters
+ ----------
+ lib_name : str
+ the library to put all generated templates in.
+ prj : Optional[BagProject]
+ the BagProject instance.
+ name_prefix : str
+ generated master name prefix.
+ name_suffix : str
+ generated master name suffix.
+ """
+
+ def __init__(self, lib_name: str, prj: Optional[BagProject] = None, name_prefix: str = '',
+ name_suffix: str = '') -> None:
+
+ self._prj = prj
+ self._lib_name = lib_name
+ self._name_prefix = name_prefix
+ self._name_suffix = name_suffix
+
+ self._used_cell_names: Set[str] = set()
+ self._key_lookup: Dict[Any, Any] = {}
+ self._master_lookup: Dict[Any, DesignMaster] = {}
+
+ @property
+ def prj(self) -> BagProject:
+ return self._prj
+
+ @property
+ @abc.abstractmethod
+ def tech_info(self) -> TechInfo:
+ """TechInfo: the TechInfo object."""
+ pass
+
+ @property
+ def lib_name(self) -> str:
+ """Returns the master library name."""
+ return self._lib_name
+
+ @property
+ def cell_prefix(self) -> str:
+ """Returns the cell name prefix."""
+ return self._name_prefix
+
+ @cell_prefix.setter
+ def cell_prefix(self, new_val: str) -> None:
+ """Change the cell name prefix."""
+ self._name_prefix = new_val
+
+ @property
+ def cell_suffix(self) -> str:
+ """Returns the cell name suffix."""
+ return self._name_suffix
+
+ @property
+ def used_cell_names(self) -> Set[str]:
+ return self._used_cell_names
+
+ @cell_suffix.setter
+ def cell_suffix(self, new_val: str) -> None:
+ """Change the cell name suffix."""
+ self._name_suffix = new_val
+
+ def create_masters_in_db(self, output: DesignOutput, lib_name: str, content_list: List[Any],
+ top_list: List[str],
+ supply_wrap_mode: SupplyWrapMode = SupplyWrapMode.NONE,
+ debug: bool = False, **kwargs: Any) -> None:
+ """Create the masters in the design database.
+
+ Parameters
+ ----------
+ output : DesignOutput
+ the output type.
+ lib_name : str
+ library to create the designs in.
+ content_list : Sequence[Any]
+ a list of the master contents. Must be created in this order.
+ top_list : List[str]
+ list of top level cells.
+ supply_wrap_mode : SupplyWrapMode
+ the supply wrapping mode.
+ debug : bool
+ True to print debug messages
+ **kwargs : Any
+ parameters associated with the given output type.
+ """
+ start = time.time()
+ if output is DesignOutput.LAYOUT:
+ if self._prj is None:
+ raise ValueError('BagProject is not defined.')
+
+ # create layouts
+ self._prj.instantiate_layout(lib_name, content_list)
+ elif output is DesignOutput.GDS:
+ fname = kwargs['fname']
+ square_bracket = kwargs.get('square_bracket', False)
+
+ if square_bracket:
+ raise ValueError('square bracket GDS export not supported yet.')
+
+ lay_map = get_gds_layer_map()
+ obj_map = get_gds_object_map()
+ implement_gds(fname, lib_name, lay_map, obj_map, content_list)
+ elif output is DesignOutput.SCHEMATIC:
+ if self._prj is None:
+ raise ValueError('BagProject is not defined.')
+
+ self._prj.instantiate_schematic(lib_name, content_list)
+ elif output is DesignOutput.YAML:
+ fname = kwargs['fname']
+
+ implement_yaml(fname, content_list)
+ elif output.is_netlist or output.is_model:
+ fname = kwargs['fname']
+ flat = kwargs.get('flat', False)
+ shell = kwargs.get('shell', False)
+ top_subckt = kwargs.get('top_subckt', True)
+ square_bracket = kwargs.get('square_bracket', False)
+ rmin = kwargs.get('rmin', 2000)
+ precision = kwargs.get('precision', 6)
+ cv_info_list = kwargs.get('cv_info_list', [])
+ cv_info_out = kwargs.get('cv_info_out', None)
+ cv_netlist = kwargs.get('cv_netlist', '')
+
+ prim_fname = get_netlist_setup_file()
+ if bool(cv_info_list) != bool(cv_netlist):
+ raise ValueError('cv_netlist and cv_info_list must be given together.')
+
+ implement_netlist(fname, content_list, top_list, output, flat, shell, top_subckt,
+ square_bracket, rmin, precision, supply_wrap_mode, prim_fname,
+ cv_info_list, cv_netlist, cv_info_out)
+ else:
+ raise ValueError('Unknown design output type: {}'.format(output.name))
+ end = time.time()
+
+ if debug:
+ print('design instantiation took %.4g seconds' % (end - start))
+
+ def clear(self):
+ """Clear all existing schematic masters."""
+ self._key_lookup.clear()
+ self._master_lookup.clear()
+
+ def new_master(self: MasterDB, gen_cls: Type[MasterType],
+ params: Optional[Mapping[str, Any]] = None, debug: bool = False,
+ **kwargs) -> MasterType:
+ """Create a generator instance.
+
+ Parameters
+ ----------
+ gen_cls : Type[MasterType]
+ the generator class to instantiate. Overrides lib_name and cell_name.
+ params : Optional[Dict[str, Any]]
+ the parameter dictionary.
+ debug : bool
+ True to print debug messages.
+ **kwargs :
+ optional arguments for generator.
+
+ Returns
+ -------
+ master : MasterType
+ the generator instance.
+ """
+ if params is None:
+ params = {}
+
+ master_params, key = gen_cls.process_params(params)
+ test = self.find_master(key)
+ if test is not None:
+ if debug:
+ print('master cached')
+ return test
+
+ if debug:
+ print('finalizing master')
+ master = gen_cls(self, master_params, key=key, **kwargs)
+ start = time.time()
+ master.finalize()
+ end = time.time()
+ self.register_master(key, master)
+ if debug:
+ print('finalizing master took %.4g seconds' % (end - start))
+
+ return master
+
+ def find_master(self, key: Any) -> Optional[MasterType]:
+ return self._master_lookup.get(key, None)
+
+ def register_master(self, key: Any, master: MasterType) -> None:
+ self._master_lookup[key] = master
+ self._used_cell_names.add(master.cell_name)
+
+ def instantiate_master(self, output: DesignOutput, master: DesignMaster,
+ top_cell_name: str = '', **kwargs) -> None:
+ """Instantiate the given master.
+
+ Parameters
+ ----------
+ output : DesignOutput
+ the design output type.
+ master : DesignMaster
+ the :class:`~bag.layout.template.TemplateBase` to instantiate.
+ top_cell_name : str
+ name of the top level cell. If empty, a default name is used.
+ **kwargs : Any
+ optional arguments for batch_output().
+ """
+ self.batch_output(output, [(master, top_cell_name)], **kwargs)
+
+ def batch_output(self, output: DesignOutput, info_list: Sequence[Tuple[DesignMaster, str]],
+ debug: bool = False, rename_dict: Optional[Dict[str, str]] = None,
+ **kwargs: Any) -> None:
+ """create all given masters in the database.
+
+ Parameters
+ ----------
+ output : DesignOutput
+ The output type.
+ info_list : Sequence[Tuple[DesignMaster, str]]
+ Sequence of (master, cell_name) tuples to instantiate.
+ Use empty string cell_name to use default names.
+ debug : bool
+ True to print debugging messages
+ rename_dict : Optional[Dict[str, str]]
+ optional master cell renaming dictionary.
+ **kwargs : Any
+ parameters associated with the given output type.
+ """
+ supply_wrap_mode: SupplyWrapMode = kwargs.pop('supply_wrap_mode', SupplyWrapMode.NONE)
+ cv_info_list: List[PySchCellView] = kwargs.get('cv_info_list', [])
+ shell: bool = kwargs.get('shell', False)
+ exact_cell_names: Set[str] = kwargs.get('exact_cell_names', set())
+ prefix: str = kwargs.get('name_prefix', self._name_prefix)
+ suffix: str = kwargs.get('name_suffix', self._name_suffix)
+ empty_dict = {}
+
+ if cv_info_list:
+ # need to avoid name collision
+ cv_netlist_names = set((cv.cell_name for cv in cv_info_list))
+
+ # check that exact cell names won't collide with existing names in netlist
+ for name in exact_cell_names:
+ if name in cv_netlist_names or (SupplyWrapMode is not SupplyWrapMode.NONE and
+ (name + SUPPLY_SUFFIX) in cv_netlist_names):
+ raise ValueError(f'Cannot use name {name}, as it is already used by netlist.')
+
+ # get list of names already used by netlist, that we need to avoid
+ netlist_used_names = set(_netlist_used_names_iter(cv_netlist_names, prefix, suffix,
+ supply_wrap_mode))
+ else:
+ netlist_used_names = set()
+
+ # configure renaming dictionary. Verify that renaming dictionary is one-to-one.
+ rename: Dict[str, str] = {}
+ reverse_rename: Dict[str, str] = {}
+ if rename_dict:
+ # make sure user renaming won't cause conflicts
+ for key, val in rename_dict.items():
+ if key != val:
+ if val in reverse_rename:
+ # user renaming is not one-to-one
+ raise ValueError(f'Both {key} and {reverse_rename[val]} are '
+ f'renamed to {val}')
+ if val in netlist_used_names:
+ # user renaming will conflict with a name in the included netlist file
+ raise ValueError(f'Cannot rename {key} to {val}, name {val} used '
+ f'by the netlist.')
+ rename[key] = val
+ reverse_rename[val] = key
+
+ # compute names of generated blocks
+ top_list: List[str] = []
+ for m, name in info_list:
+ m_name = m.cell_name
+ if name and name != m_name:
+ # user wants to rename a generated block
+ if name in reverse_rename:
+ # we don't have one-to-one renaming
+ raise ValueError(f'Both {m_name} and {reverse_rename[name]} are '
+ f'renamed to {name}')
+ rename[m_name] = name
+ reverse_rename[name] = m_name
+ if name in netlist_used_names:
+ # user wants to rename to a name that will conflict with the netlist
+ raise ValueError(f'Cannot use name {name}, as it is already used by netlist.')
+
+ top_list.append(format_cell_name(name, empty_dict, prefix, suffix, exact_cell_names,
+ supply_wrap_mode))
+ if name in self._used_cell_names:
+ # name is an already used name, so we need to rename other blocks using
+ # this name to something else
+ name2 = get_new_name(name, self._used_cell_names, reverse_rename,
+ netlist_used_names)
+ rename[name] = name2
+ reverse_rename[name2] = name
+ else:
+ if m_name in netlist_used_names:
+ if name:
+ raise ValueError(f'Cannot use name {m_name}, '
+ f'as it is already used by netlist.')
+ else:
+ name2 = get_new_name(m_name, self._used_cell_names, reverse_rename,
+ netlist_used_names)
+ rename[m_name] = name2
+ reverse_rename[name2] = m_name
+ print(f'renaming {m_name} to {name2}')
+ top_list.append(format_cell_name(name2, empty_dict, prefix, suffix,
+ exact_cell_names, supply_wrap_mode))
+ else:
+ top_list.append(format_cell_name(m_name, empty_dict, prefix, suffix,
+ exact_cell_names, supply_wrap_mode))
+
+ if debug:
+ print('Retrieving master contents')
+
+ # use ordered dict so that children are created before parents.
+ info_dict = OrderedDict()
+ start = time.time()
+ for master, _ in info_list:
+ self._batch_output_helper(info_dict, master, rename, reverse_rename, netlist_used_names)
+ end = time.time()
+
+ content_list = [master.get_content(output, rename, prefix, suffix, shell,
+ exact_cell_names, supply_wrap_mode)
+ for master in info_dict.values()]
+
+ if debug:
+ print(f'master content retrieval took {end - start:.4g} seconds')
+
+ self.create_masters_in_db(output, self.lib_name, content_list, top_list,
+ supply_wrap_mode=supply_wrap_mode, debug=debug, **kwargs)
+
+ def _batch_output_helper(self, info_dict: Dict[str, DesignMaster], master: DesignMaster,
+ rename: Dict[str, str], rev_rename: Dict[str, str],
+ used_names: Set[str]) -> None:
+ """Helper method for batch_layout().
+
+ Parameters
+ ----------
+ info_dict : Dict[str, DesignMaster]
+ dictionary from existing master cell name to master objects.
+ master : DesignMaster
+ the master object to create.
+ """
+ # get template master for all children
+ for master_key in master.children():
+ child_temp = self._master_lookup[master_key]
+ if child_temp.cell_name not in info_dict:
+ self._batch_output_helper(info_dict, child_temp, rename, rev_rename, used_names)
+
+ # get template master for this cell.
+ cur_name = master.cell_name
+ if cur_name not in rename and cur_name in used_names:
+ name2 = get_new_name(cur_name, self._used_cell_names, rev_rename, used_names)
+ rename[cur_name] = name2
+ rev_rename[name2] = cur_name
+ info_dict[cur_name] = self._master_lookup[master.key]
+
+ def exclude_model(self, lib_name: str, cell_name: str) -> bool:
+ """True to exclude the given schematic generator when generating behavioral models."""
+ if self._prj is None:
+ raise ValueError('BagProject is not defined.')
+ return self._prj.exclude_model(lib_name, cell_name)
+
+
+def _netlist_used_names_iter(used_names: Set[str], prefix: str, suffix: str,
+ sup_wrap_mode: SupplyWrapMode) -> Iterable[str]:
+ pre_len = len(prefix)
+ suf_len = len(suffix)
+ sup_suffix = suffix + SUPPLY_SUFFIX
+ sup_len = len(sup_suffix)
+ for name in used_names:
+ # Error checking with exact_names
+ if name.startswith(prefix):
+ if name.endswith(suffix):
+ yield name[pre_len:len(name) - suf_len]
+ if sup_wrap_mode is not SupplyWrapMode.NONE and name.endswith(sup_suffix):
+ yield name[pre_len:len(name) - sup_len]
diff --git a/src/bag/util/immutable.py b/src/bag/util/immutable.py
new file mode 100644
index 0000000..0e5060d
--- /dev/null
+++ b/src/bag/util/immutable.py
@@ -0,0 +1,282 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various immutable and hashable data types.
+"""
+
+from __future__ import annotations
+
+from typing import TypeVar, Any, Generic, Dict, Iterable, Tuple, Union, Optional, List, overload
+
+import sys
+import bisect
+from collections import Hashable, Mapping, Sequence
+
+T = TypeVar('T')
+U = TypeVar('U')
+ImmutableType = Union[None, Hashable, Tuple[Hashable, ...]]
+
+
+def combine_hash(a: int, b: int) -> int:
+ """Combine the two given hash values.
+
+ Parameter
+ ---------
+ a : int
+ the first hash value.
+ b : int
+ the second hash value.
+
+ Returns
+ -------
+ hash : int
+ the combined hash value.
+ """
+ # algorithm taken from boost::hash_combine
+ return sys.maxsize & (a ^ (b + 0x9e3779b9 + (a << 6) + (a >> 2)))
+
+
+class ImmutableList(Hashable, Sequence, Generic[T]):
+ """An immutable homogeneous list."""
+
+ def __init__(self, values: Optional[Sequence[T]] = None) -> None:
+ if values is None:
+ self._content = []
+ self._hash = 0
+ elif isinstance(values, ImmutableList):
+ self._content = values._content
+ self._hash = values._hash
+ else:
+ self._content = values
+ self._hash = 0
+ for v in values:
+ self._hash = combine_hash(self._hash, 0 if v is None else hash(v))
+
+ @classmethod
+ def sequence_equal(cls, a: Sequence[T], b: Sequence[T]) -> bool:
+ if len(a) != len(b):
+ return False
+ for av, bv in zip(a, b):
+ if av != bv:
+ return False
+ return True
+
+ def __repr__(self) -> str:
+ return repr(self._content)
+
+ def __eq__(self, other: Any) -> bool:
+ return (isinstance(other, ImmutableList) and self._hash == other._hash and
+ self.sequence_equal(self._content, other._content))
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __bool__(self) -> bool:
+ return len(self) > 0
+
+ def __len__(self) -> int:
+ return len(self._content)
+
+ def __iter__(self) -> Iterable[T]:
+ return iter(self._content)
+
+ @overload
+ def __getitem__(self, idx: int) -> T: ...
+ @overload
+ def __getitem__(self, idx: slice) -> ImmutableList[T]: ...
+
+ def __getitem__(self, idx) -> T:
+ if isinstance(idx, int):
+ return self._content[idx]
+ return ImmutableList(self._content[idx])
+
+ def __contains__(self, val: Any) -> bool:
+ return val in self._content
+
+ def index(self, *args, **kwargs) -> int:
+ return self._content.index(*args, **kwargs)
+
+ def to_list(self) -> List[Any]:
+ return list(self._content)
+
+ def to_yaml(self) -> List[Any]:
+ ans = []
+ for val in self._content:
+ if isinstance(val, ImmutableList):
+ ans.append(val.to_yaml())
+ elif isinstance(val, ImmutableSortedDict):
+ ans.append(val.to_yaml())
+ else:
+ ans.append(val)
+
+ return ans
+
+
+class ImmutableSortedDict(Hashable, Mapping, Generic[T, U]):
+ """An immutable dictionary with sorted keys."""
+
+ def __init__(self,
+ table: Optional[Mapping[T, Any]] = None) -> None:
+ if table is not None:
+ if isinstance(table, ImmutableSortedDict):
+ self._keys = table._keys
+ self._vals = table._vals
+ self._hash = table._hash
+ else:
+ self._keys = ImmutableList(sorted(table.keys()))
+ self._vals = ImmutableList([to_immutable(table[k]) for k in self._keys])
+ self._hash = combine_hash(hash(self._keys), hash(self._vals))
+ else:
+ self._keys = ImmutableList([])
+ self._vals = ImmutableList([])
+ self._hash = combine_hash(hash(self._keys), hash(self._vals))
+
+ def __repr__(self) -> str:
+ return repr(list(zip(self._keys, self._vals)))
+
+ def __eq__(self, other: Any) -> bool:
+ return (isinstance(other, ImmutableSortedDict) and
+ self._hash == other._hash and
+ self._keys == other._keys and
+ self._vals == other._vals)
+
+ def __hash__(self) -> int:
+ return self._hash
+
+ def __bool__(self) -> bool:
+ return len(self) > 0
+
+ def __len__(self) -> int:
+ return len(self._keys)
+
+ def __iter__(self) -> Iterable[T]:
+ return iter(self._keys)
+
+ def __contains__(self, item: Any) -> bool:
+ idx = bisect.bisect_left(self._keys, item)
+ return idx != len(self._keys) and self._keys[idx] == item
+
+ def __getitem__(self, item: T) -> U:
+ idx = bisect.bisect_left(self._keys, item)
+ if idx == len(self._keys) or self._keys[idx] != item:
+ raise KeyError('Key not found: {}'.format(item))
+ return self._vals[idx]
+
+ def get(self, item: T, default: Optional[U] = None) -> Optional[U]:
+ idx = bisect.bisect_left(self._keys, item)
+ if idx == len(self._keys) or self._keys[idx] != item:
+ return default
+ return self._vals[idx]
+
+ def keys(self) -> Iterable[T]:
+ return iter(self._keys)
+
+ def values(self) -> Iterable[U]:
+ return iter(self._vals)
+
+ def items(self) -> Iterable[Tuple[T, U]]:
+ return zip(self._keys, self._vals)
+
+ def copy(self, append: Optional[Dict[T, Any]] = None,
+ remove: Optional[Sequence[T]] = None) -> ImmutableSortedDict[T, U]:
+ if append is None and remove is None:
+ return ImmutableSortedDict(self)
+ else:
+ tmp = self.to_dict()
+ if append is not None:
+ tmp.update(append)
+ if remove is not None:
+ for key in remove:
+ tmp.pop(key, None)
+ return ImmutableSortedDict(tmp)
+
+ def to_dict(self) -> Dict[T, U]:
+ return dict(zip(self._keys, self._vals))
+
+ def to_yaml(self) -> Dict[str, Any]:
+ ans = {}
+ for k, v in self.items():
+ if isinstance(v, ImmutableSortedDict):
+ ans[k] = v.to_yaml()
+ elif isinstance(v, ImmutableList):
+ ans[k] = v.to_yaml()
+ else:
+ ans[k] = v
+ return ans
+
+
+Param = ImmutableSortedDict[str, Any]
+
+
+def to_immutable(obj: Any) -> ImmutableType:
+ """Convert the given Python object into an immutable type."""
+ if obj is None:
+ return obj
+ if isinstance(obj, Hashable):
+ # gets around cases of tuple of un-hashable types.
+ try:
+ hash(obj)
+ return obj
+ except TypeError:
+ pass
+ if isinstance(obj, tuple):
+ return tuple((to_immutable(v) for v in obj))
+ if isinstance(obj, list):
+ return ImmutableList([to_immutable(v) for v in obj])
+ if isinstance(obj, set):
+ return ImmutableList([to_immutable(v) for v in sorted(obj)])
+ if isinstance(obj, dict):
+ return ImmutableSortedDict(obj)
+
+ raise ValueError('Cannot convert the following object to immutable type: {}'.format(obj))
+
+
+def update_recursive(table, value, *args) -> None:
+ if not args:
+ return
+ first_key = args[0]
+ if len(args) == 1:
+ table[first_key] = value
+ else:
+ table[first_key] = inner_table = table[first_key].copy()
+ update_recursive(inner_table, value, *(args[1:]))
diff --git a/src/bag/util/importlib.py b/src/bag/util/importlib.py
new file mode 100644
index 0000000..b08519e
--- /dev/null
+++ b/src/bag/util/importlib.py
@@ -0,0 +1,73 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines various import helper methods.
+"""
+
+from typing import Type, Any, Union
+
+import importlib
+
+
+def import_class(class_str: Union[Type[Any], str]) -> Any:
+ """Given a Python class string, import and return that Python class.
+
+ Parameters
+ ----------
+ class_str : str
+ a Python class string.
+
+ Returns
+ -------
+ py_class : Any
+ a Python class.
+ """
+ if isinstance(class_str, str):
+ sections = class_str.split('.')
+
+ module_str = '.'.join(sections[:-1])
+ class_str = sections[-1]
+ modul = importlib.import_module(module_str)
+ return getattr(modul, class_str)
+ else:
+ return class_str
diff --git a/src/bag/util/interval.py b/src/bag/util/interval.py
new file mode 100644
index 0000000..9bff42c
--- /dev/null
+++ b/src/bag/util/interval.py
@@ -0,0 +1,48 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides data structure that keeps track of intervals.
+"""
+
+# noinspection PyUnresolvedReferences
+from pybag.core import PyDisjointIntervals as IntervalSet
diff --git a/src/bag/util/logging.py b/src/bag/util/logging.py
new file mode 100644
index 0000000..a6cf5ca
--- /dev/null
+++ b/src/bag/util/logging.py
@@ -0,0 +1,48 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pybag.enum import LogLevel
+from pybag.core import FileLogger
+
+
+class LoggingBase:
+ def __init__(self, log_name: str, log_file: str, log_level: LogLevel = LogLevel.DEBUG) -> None:
+ self._logger = FileLogger(log_name, log_file, log_level)
+ self._logger.set_level(log_level)
+
+ @property
+ def log_file(self) -> str:
+ return self._logger.log_basename
+
+ @property
+ def log_level(self) -> LogLevel:
+ return self._logger.level
+
+ @property
+ def logger(self) -> FileLogger:
+ return self._logger
+
+ def log(self, msg: str, level: LogLevel = LogLevel.INFO) -> None:
+ self._logger.log(level, msg)
+
+ def error(self, msg: str) -> None:
+ self._logger.log(LogLevel.ERROR, msg)
+ raise ValueError(msg)
+
+ def warn(self, msg: str) -> None:
+ self._logger.log(LogLevel.WARN, msg)
+
+ def set_log_level(self, level: LogLevel) -> None:
+ self._logger.set_level(level)
diff --git a/src/bag/util/math.py b/src/bag/util/math.py
new file mode 100644
index 0000000..bd640ec
--- /dev/null
+++ b/src/bag/util/math.py
@@ -0,0 +1,375 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from typing import Any, Mapping
+
+import ast
+import operator
+from math import trunc, ceil, floor
+from numbers import Integral, Real
+
+
+class HalfInt(Integral):
+ """A class that represents a half integer."""
+
+ def __init__(self, dbl_val: Any) -> None:
+ if isinstance(dbl_val, Integral):
+ self._val = int(dbl_val)
+ else:
+ raise ValueError('HalfInt internal value must be an integer.')
+
+ @classmethod
+ def convert(cls, val: Any) -> HalfInt:
+ if isinstance(val, HalfInt):
+ return val
+ elif isinstance(val, Integral):
+ return HalfInt(2 * int(val))
+ elif isinstance(val, Real):
+ tmp = float(2 * val)
+ if tmp.is_integer():
+ return HalfInt(int(tmp))
+ raise ValueError('Cannot convert {} type {} to HalfInt.'.format(val, type(val)))
+
+ @property
+ def value(self) -> float:
+ q, r = divmod(self._val, 2)
+ return q if r == 0 else q + 0.5
+
+ @property
+ def is_integer(self) -> bool:
+ return self._val % 2 == 0
+
+ @property
+ def dbl_value(self) -> int:
+ return self._val
+
+ def div2(self, round_up: bool = False) -> HalfInt:
+ q, r = divmod(self._val, 2)
+ return HalfInt(q + (r and round_up))
+
+ def to_string(self) -> str:
+ q, r = divmod(self._val, 2)
+ if r == 0:
+ return '{:d}'.format(q)
+ return '{:d}.5'.format(q)
+
+ def up(self) -> HalfInt:
+ return HalfInt(self._val + 1)
+
+ def down(self) -> HalfInt:
+ return HalfInt(self._val - 1)
+
+ def up_even(self, flag: bool) -> HalfInt:
+ return HalfInt(self._val + (self._val & flag))
+
+ def down_even(self, flag: bool) -> HalfInt:
+ return HalfInt(self._val - (self._val & flag))
+
+ def __str__(self):
+ return repr(self)
+
+ def __repr__(self):
+ return 'HalfInt({})'.format(self._val / 2)
+
+ def __hash__(self):
+ return hash(self._val / 2)
+
+ def __eq__(self, other):
+ if isinstance(other, HalfInt):
+ return self._val == other._val
+ return self._val == 2 * other
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __le__(self, other):
+ if isinstance(other, HalfInt):
+ return self._val <= other._val
+ return self._val <= 2 * other
+
+ def __lt__(self, other):
+ if isinstance(other, HalfInt):
+ return self._val < other._val
+ return self._val < 2 * other
+
+ def __ge__(self, other):
+ return not (self < other)
+
+ def __gt__(self, other):
+ return not (self <= other)
+
+ def __add__(self, other):
+ other = HalfInt.convert(other)
+ return HalfInt(self._val + other._val)
+
+ def __sub__(self, other):
+ return self + (-other)
+
+ def __mul__(self, other):
+ other = HalfInt.convert(other)
+ q, r = divmod(self._val * other._val, 2)
+ if r == 0:
+ return HalfInt(q)
+
+ raise ValueError('result is not a HalfInt.')
+
+ def __truediv__(self, other):
+ other = HalfInt.convert(other)
+ q, r = divmod(2 * self._val, other._val)
+ if r == 0:
+ return HalfInt(q)
+
+ raise ValueError('result is not a HalfInt.')
+
+ def __floordiv__(self, other):
+ other = HalfInt.convert(other)
+ return HalfInt(2 * (self._val // other._val))
+
+ def __mod__(self, other):
+ other = HalfInt.convert(other)
+ return HalfInt(self._val % other._val)
+
+ def __divmod__(self, other):
+ other = HalfInt.convert(other)
+ q, r = divmod(self._val, other._val)
+ return HalfInt(2 * q), HalfInt(r)
+
+ def __pow__(self, other, modulus=None):
+ other = HalfInt.convert(other)
+ if self.is_integer and other.is_integer:
+ return HalfInt(2 * (self._val // 2)**(other._val // 2))
+ raise ValueError('result is not a HalfInt.')
+
+ def __lshift__(self, other):
+ raise TypeError('Cannot lshift HalfInt')
+
+ def __rshift__(self, other):
+ raise TypeError('Cannot rshift HalfInt')
+
+ def __and__(self, other):
+ raise TypeError('Cannot and HalfInt')
+
+ def __xor__(self, other):
+ raise TypeError('Cannot xor HalfInt')
+
+ def __or__(self, other):
+ raise TypeError('Cannot or HalfInt')
+
+ def __radd__(self, other):
+ return self + other
+
+ def __rsub__(self, other):
+ return (-self) + other
+
+ def __rmul__(self, other):
+ return self * other
+
+ def __rtruediv__(self, other):
+ return HalfInt.convert(other) / self
+
+ def __rfloordiv__(self, other):
+ return HalfInt.convert(other) // self
+
+ def __rmod__(self, other):
+ return HalfInt.convert(other) % self
+
+ def __rdivmod__(self, other):
+ return HalfInt.convert(other).__divmod__(self)
+
+ def __rpow__(self, other):
+ return HalfInt.convert(other)**self
+
+ def __rlshift__(self, other):
+ raise TypeError('Cannot lshift HalfInt')
+
+ def __rrshift__(self, other):
+ raise TypeError('Cannot rshift HalfInt')
+
+ def __rand__(self, other):
+ raise TypeError('Cannot and HalfInt')
+
+ def __rxor__(self, other):
+ raise TypeError('Cannot xor HalfInt')
+
+ def __ror__(self, other):
+ raise TypeError('Cannot or HalfInt')
+
+ def __iadd__(self, other):
+ return self + other
+
+ def __isub__(self, other):
+ return self - other
+
+ def __imul__(self, other):
+ return self * other
+
+ def __itruediv__(self, other):
+ return self / other
+
+ def __ifloordiv__(self, other):
+ return self // other
+
+ def __imod__(self, other):
+ return self % other
+
+ def __ipow__(self, other):
+ return self ** other
+
+ def __ilshift__(self, other):
+ raise TypeError('Cannot lshift HalfInt')
+
+ def __irshift__(self, other):
+ raise TypeError('Cannot rshift HalfInt')
+
+ def __iand__(self, other):
+ raise TypeError('Cannot and HalfInt')
+
+ def __ixor__(self, other):
+ raise TypeError('Cannot xor HalfInt')
+
+ def __ior__(self, other):
+ raise TypeError('Cannot or HalfInt')
+
+ def __neg__(self):
+ return HalfInt(-self._val)
+
+ def __pos__(self):
+ return HalfInt(self._val)
+
+ def __abs__(self):
+ return HalfInt(abs(self._val))
+
+ def __invert__(self):
+ return -self
+
+ def __complex__(self):
+ raise TypeError('Cannot cast to complex')
+
+ def __int__(self):
+ if self._val % 2 == 1:
+ raise ValueError('Not an integer.')
+ return self._val // 2
+
+ def __float__(self):
+ return self._val / 2
+
+ def __index__(self):
+ return int(self)
+
+ def __round__(self, ndigits=0):
+ if self.is_integer:
+ return HalfInt(self._val)
+ else:
+ return HalfInt(round(self._val / 2) * 2)
+
+ def __trunc__(self):
+ if self.is_integer:
+ return HalfInt(self._val)
+ else:
+ return HalfInt(trunc(self._val / 2) * 2)
+
+ def __floor__(self):
+ if self.is_integer:
+ return HalfInt(self._val)
+ else:
+ return HalfInt(floor(self._val / 2) * 2)
+
+ def __ceil__(self):
+ if self.is_integer:
+ return HalfInt(self._val)
+ else:
+ return HalfInt(ceil(self._val / 2) * 2)
+
+
+# noinspection PyPep8Naming,PyMethodMayBeStatic
+class Calculator(ast.NodeVisitor):
+ """A simple calculator.
+
+ Modified from:
+ https://stackoverflow.com/questions/33029168/how-to-calculate-an-equation-in-a-string-python
+
+ user mgilson said in a comment that he agrees to distribute code with Apache 2.0 license.
+ """
+ _OP_MAP = {
+ ast.Add: operator.add,
+ ast.Sub: operator.sub,
+ ast.Mult: operator.mul,
+ ast.Div: operator.truediv,
+ ast.Invert: operator.neg,
+ ast.FloorDiv: operator.floordiv,
+ }
+
+ def __init__(self, namespace: Mapping[str, Any]) -> None:
+ super().__init__()
+ self._calc_namespace = namespace
+
+ def __getitem__(self, name: str) -> Any:
+ return self._calc_namespace[name]
+
+ @property
+ def namespace(self) -> Mapping[str, Any]:
+ return self._calc_namespace
+
+ def visit_BinOp(self, node):
+ left = self.visit(node.left)
+ right = self.visit(node.right)
+ return self._OP_MAP[type(node.op)](left, right)
+
+ def visit_Num(self, node):
+ return node.n
+
+ def visit_Expr(self, node):
+ return self.visit(node.value)
+
+ def visit_Name(self, node):
+ return self._calc_namespace[node.id]
+
+ def eval(self, expression: str):
+ tree = ast.parse(expression)
+ return self.visit(tree.body[0])
+
+ @classmethod
+ def evaluate(cls, expr: str, namespace: Mapping[str, Any]):
+ return cls(namespace).eval(expr)
diff --git a/src/bag/util/misc.py b/src/bag/util/misc.py
new file mode 100644
index 0000000..d1a835c
--- /dev/null
+++ b/src/bag/util/misc.py
@@ -0,0 +1,40 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+
+def _info(etype, value, tb):
+ if hasattr(sys, 'ps1') or not sys.stderr.isatty():
+ # we are in interactive mode or we don't have a tty-like
+ # device, so we call the default hook
+ sys.__excepthook__(etype, value, tb)
+ else:
+ import pdb
+ import traceback
+ import bdb
+
+ if etype is bdb.BdbQuit:
+ # if user activated debugger and quits, do not trap the quitting exception.
+ return
+ # we are NOT in interactive mode, print the exception...
+ traceback.print_exception(etype, value, tb)
+ print()
+ # ...then start the debugger in post-mortem mode.
+ pdb.post_mortem(tb)
+
+
+def register_pdb_hook():
+ sys.excepthook = _info
diff --git a/src/bag/util/parse.py b/src/bag/util/parse.py
new file mode 100644
index 0000000..872a950
--- /dev/null
+++ b/src/bag/util/parse.py
@@ -0,0 +1,92 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines parsing utility methods.
+"""
+
+import ast
+
+
+class ExprVarScanner(ast.NodeVisitor):
+ """
+ This node visitor collects all variable names found in the
+ AST, and excludes names of functions. Variables having
+ dotted names are not supported.
+ """
+ def __init__(self):
+ self.varnames = set()
+
+ # noinspection PyPep8Naming
+ def visit_Name(self, node):
+ self.varnames.add(node.id)
+
+ # noinspection PyPep8Naming
+ def visit_Call(self, node):
+ if not isinstance(node.func, ast.Name):
+ self.visit(node.func)
+ for arg in node.args:
+ self.visit(arg)
+
+ # noinspection PyPep8Naming
+ def visit_Attribute(self, node):
+ # ignore attributes
+ pass
+
+
+def get_variables(expr):
+ """Parses the given Python expression and return a list of all variables.
+
+ Parameters
+ ----------
+ expr : str
+ An expression string that we want to parse for variable names.
+
+ Returns
+ -------
+ var_list : list[str]
+ Names of variables from the given expression.
+ """
+ root = ast.parse(expr, mode='exec')
+ scanner = ExprVarScanner()
+ scanner.visit(root)
+ return list(scanner.varnames)
diff --git a/src/bag/util/search.py b/src/bag/util/search.py
new file mode 100644
index 0000000..2223ff8
--- /dev/null
+++ b/src/bag/util/search.py
@@ -0,0 +1,801 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides search related utilities.
+"""
+
+from typing import Optional, Callable, Any, Container, Iterable, List, Tuple, Dict, Union
+
+from sortedcontainers import SortedList
+import math
+from collections import namedtuple
+
+MinCostResult = namedtuple('MinCostResult', ['x', 'xmax', 'vmax', 'nfev'])
+
+
+class BinaryIterator:
+ """A class that performs binary search over integers.
+
+ This class supports both bounded or unbounded binary search, and
+ you can also specify a step size.
+
+ Parameters
+ ----------
+ low : int
+ the lower bound (inclusive).
+ high : Optional[int]
+ the upper bound (exclusive). None for unbounded binary search.
+ step : int
+ the step size. All return values will be low + N * step
+ search_step : int
+ the unbounded binary search step size, in units of step.
+ This is only used when trying to find the upper bound.
+ """
+
+ def __init__(self, low: int, high: Optional[int], step: int = 1, search_step: int = 1) -> None:
+ if not isinstance(low, int) or not isinstance(step, int):
+ raise ValueError('low and step must be integers.')
+
+ self._offset = low
+ self._step = step
+ self._low = 0
+ self._save_marker: Optional[int] = None
+ self._save_info = None
+ self._slist: SortedList = SortedList()
+ self._search_step = search_step
+
+ if high is None:
+ self._high: Optional[int] = None
+ self._current = 0
+ else:
+ if not isinstance(high, int):
+ raise ValueError('high must be None or integer.')
+
+ nmax = (high - low) // step
+ if low + step * nmax < high:
+ nmax += 1
+ self._high: Optional[int] = nmax
+ self._current = self._high // 2
+
+ def set_current(self, val: int) -> None:
+ """Set the value of the current marker."""
+ if (val - self._offset) % self._step != 0:
+ raise ValueError(f'value {val} is not multiple of step size.')
+ self._current = (val - self._offset) // self._step
+
+ def has_next(self) -> bool:
+ """returns True if this iterator is not finished yet."""
+ return self._high is None or self._low < self._high
+
+ def get_next(self) -> int:
+ """Returns the next value to look at."""
+ return self._current * self._step + self._offset
+
+ def up(self, val: Optional[float] = None, raise_exception: bool = True) -> None:
+ """Increment this iterator."""
+ if val is not None:
+ self._check_monotonicity(val, raise_exception)
+
+ self._low = self._current + 1
+
+ if self._high is not None:
+ self._current = (self._low + self._high) // 2
+ else:
+ self._current = 2 * self._current if self._current > 0 else self._search_step
+
+ def down(self, val: Optional[float] = None, raise_exception: bool = True) -> None:
+ """Decrement this iterator."""
+ if val is not None:
+ self._check_monotonicity(val, raise_exception)
+
+ self._high = self._current
+ self._current = (self._low + self._high) // 2
+
+ def save(self) -> None:
+ """Save the current index."""
+ self._save_marker = self.get_next()
+
+ def save_info(self, info: Any) -> None:
+ """Save current information."""
+ self.save()
+ self._save_info = info
+
+ def get_last_save(self) -> Optional[int]:
+ """Returns the last saved index."""
+ return self._save_marker
+
+ def get_last_save_info(self) -> Any:
+ """Return last save information."""
+ return self._save_info
+
+ def _check_monotonicity(self, val: float, raise_exception: bool = True) -> None:
+ item = (self._current, val)
+ self._slist.add(item)
+ idx = self._slist.index(item)
+
+ num_vals = len(self._slist)
+ if num_vals >= 3:
+ if idx == num_vals - 1:
+ mon_pos = (val >= self._slist[num_vals - 2][1] >= self._slist[num_vals - 3][1])
+ mon_neg = (val <= self._slist[num_vals - 2][1] <= self._slist[num_vals - 3][1])
+ elif idx == 0:
+ mon_pos = (val <= self._slist[1][1] <= self._slist[2][1])
+ mon_neg = (val >= self._slist[1][1] >= self._slist[2][1])
+ else:
+ mon_pos = (self._slist[idx - 1][1] <= val <= self._slist[idx + 1][1])
+ mon_neg = (self._slist[idx - 1][1] >= val >= self._slist[idx + 1][1])
+
+ if not (mon_pos or mon_neg):
+ msg = 'WARNING: binary iterator observed non-monotonic values.'
+ if raise_exception:
+ raise RuntimeError(msg)
+ else:
+ print(msg)
+ print(f'Observed settings/values: {self._slist}')
+ txt = input('Press enter to continue, or "debug" to enter debugger: ')
+ if txt == 'debug':
+ breakpoint()
+
+
+class FloatBinaryIterator:
+ """A class that performs binary search over floating point numbers.
+
+ This class supports both bounded or unbounded binary search, and terminates
+ when we can guarantee the given error tolerance.
+
+ Parameters
+ ----------
+ low : float
+ the lower bound.
+ high : Optional[float]
+ the upper bound. None for unbounded binary search.
+ tol : float
+ we will guarantee that the final solution will be within this
+ tolerance.
+ search_step : float
+ for unbounded binary search, this is the initial step size when
+ searching for upper bound.
+ max_err : float
+ If unbounded binary search reached this value before finding an
+ upper bound, raise an error.
+ """
+
+ def __init__(self, low: float, high: Optional[float],
+ tol: float = 1.0, search_step: float = 1.0, max_err: float = float('inf')) -> None:
+ self._offset = low
+ self._tol = tol
+ self._high: Optional[float] = None
+ self._low = 0.0
+ self._search_step = search_step
+ self._max_err = max_err
+ self._save_marker: Optional[float] = None
+
+ if high is not None:
+ self._high = high - low
+ self._current = self._high / 2
+ else:
+ self._high = None
+ self._current = self._low
+
+ self._save_info: Any = None
+
+ @property
+ def low(self) -> float:
+ return self._low + self._offset
+
+ @property
+ def high(self) -> float:
+ return float('inf') if self._high is None else self._high + self._offset
+
+ def has_next(self) -> bool:
+ """returns True if this iterator is not finished yet."""
+ return self._high is None or self._high - self._low > self._tol
+
+ def get_next(self) -> float:
+ """Returns the next value to look at."""
+ return self._current + self._offset
+
+ def up(self) -> None:
+ """Increment this iterator."""
+ self._low = self._current
+
+ if self._high is not None:
+ self._current = (self._low + self._high) / 2
+ else:
+ if self._current == 0.0:
+ self._current = self._search_step
+ else:
+ self._current *= 2
+ if self._current > self._max_err:
+ raise ValueError('Unbounded binary search '
+ f'value = {self._current} > max_err = {self._max_err}')
+
+ def down(self) -> None:
+ """Decrement this iterator."""
+ self._high = self._current
+ self._current = (self._low + self._high) / 2
+
+ def save(self) -> None:
+ """Save the current index"""
+ self._save_marker = self._current
+
+ def save_info(self, info: Any) -> None:
+ """Save current information."""
+ self.save()
+ self._save_info = info
+
+ def get_last_save(self) -> Optional[float]:
+ """Returns the last saved index."""
+ return None if self._save_marker is None else self._save_marker + self._offset
+
+ def get_last_save_info(self) -> Any:
+ """Return last save information."""
+ return self._save_info
+
+
+class FloatIntervalSearchHelper:
+ def __init__(self, overhead_factor: float) -> None:
+ self._soln: List[Tuple[float, int]] = [(0, 0), (0, 0), (overhead_factor + 1, 1)]
+ self._k = overhead_factor
+
+ # get number of points for unbounded search
+ self._num_unbnd = int(math.ceil(overhead_factor + 2))
+
+ @property
+ def num_unbound(self) -> int:
+ return self._num_unbnd
+
+ def get_num_points(self, size: float) -> int:
+ index = int(math.ceil(size))
+ cur_len = len(self._soln)
+ if index < cur_len:
+ return self._soln[index][1]
+
+ for idx in range(cur_len, index + 1):
+ self._soln.append(self._find_soln(idx))
+
+ return self._soln[index][1]
+
+ def _find_soln(self, size: int) -> Tuple[float, int]:
+ k = self._k
+ opt_time = k + size - 1
+ opt_num_pts = size - 1
+ num_pts = 1
+ while num_pts < size - 1:
+ remain_size = int(math.ceil(size / (num_pts + 1)))
+
+ cur_time = k + num_pts + self._soln[remain_size][0]
+ if cur_time < opt_time:
+ opt_time = cur_time
+ opt_num_pts = num_pts
+
+ # compute next number of points to consider
+ num_pts = max(num_pts + 1, int(math.ceil(size / (remain_size - 1))))
+
+ return opt_time, opt_num_pts
+
+
+class FloatIntervalSearch:
+ _helper_table: Dict[float, FloatIntervalSearchHelper] = {}
+
+ def __init__(self, low: float, high: Optional[float], overhead_factor: float,
+ tol: float = 1.0, search_step: float = 1.0, max_err: float = float('inf'),
+ guess: Optional[Union[float, Tuple[float, float]]] = None) -> None:
+ self._tol = tol
+ self._low = low
+ self._high: float = float('inf') if high is None else high
+ self._search_step = search_step
+ self._max_err = max_err
+
+ helper = self._helper_table.get(overhead_factor, None)
+ if helper is None:
+ helper = FloatIntervalSearchHelper(overhead_factor)
+ self._helper_table[overhead_factor] = helper
+ self._helper = helper
+
+ if guess is None:
+ self._guess_range = None
+ elif isinstance(guess, (float, int)):
+ if self._low < guess < self._high:
+ self._guess_range = (max(self._low, guess - tol), min(self._high, guess + tol))
+ else:
+ self._guess_range = None
+ else:
+ if guess[0] > guess[1] or guess[0] == float('inf') or guess[1] == float('inf'):
+ raise ValueError(f'Invalid range: {guess}')
+ self._guess_range = (max(self._low, guess[0] - tol), min(self._high, guess[1] + tol))
+
+ @property
+ def low(self) -> float:
+ return self._low
+
+ @property
+ def high(self) -> float:
+ return self._high
+
+ def has_next(self) -> bool:
+ """returns True if this iterator is not finished yet."""
+ return self._high - self._low > self._tol
+
+ def get_sweep_specs(self) -> Dict[str, Any]:
+ if self._guess_range is None:
+ cur_high = self._high
+ cur_low = self._low
+ else:
+ cur_low, cur_high = self._guess_range
+
+ if cur_high == float('inf'):
+ val_min = cur_low + self._search_step
+ val_max = min(self._max_err, cur_low + self._search_step * self._helper.num_unbound)
+ num = int(math.floor((val_max - val_min) / self._search_step)) + 1
+ if num < 1:
+ raise ValueError(f'Unbounded binary search exceed max_err = {self._max_err}')
+ val_max = val_min + self._search_step * num
+ return dict(type='LINEAR', start=val_min, stop=val_max, num=num, endpoint=False)
+ else:
+ intv_len = cur_high - cur_low
+ cur_size = intv_len / self._tol
+ num_pts = self._helper.get_num_points(cur_size)
+ delta = intv_len / (num_pts + 1)
+ return dict(type='LINEAR', start=cur_low + delta, stop=cur_high,
+ num=num_pts, endpoint=False)
+
+ def get_value(self) -> float:
+ if self._guess_range is not None:
+ low = self._guess_range[0] + self._tol
+ if self._low < low < self._high:
+ return low
+ high = self._guess_range[1] - self._tol
+ if self._low < high < self._high:
+ return high
+
+ # init_range is not valid anymore
+ self._guess_range = None
+
+ if self._high == float('inf'):
+ return self._low + self._search_step
+ else:
+ return (self._low + self._high) / 2
+
+ def set_interval(self, low: float, high: Optional[float] = float('inf')) -> None:
+ if high is None:
+ high = float('inf')
+
+ if self._guess_range is not None:
+ tol = self._tol
+ init_lo = self._guess_range[0] + tol
+ init_hi = self._guess_range[1] - tol
+ if (init_lo <= low and high <= init_hi) or init_hi <= low or high <= init_lo:
+ # new interval either completely inside or completely outside guess range
+ # done search within guess range
+ self._guess_range = None
+ else:
+ # new interval partially overlap guess range, update guess range.
+ self._guess_range = (max(low, init_lo - tol), min(high, init_hi + tol))
+
+ self._low = low
+ self._high = high
+ if high == float('inf'):
+ # double search step so it works with either get_points() or get_value().
+ self._search_step *= 2
+
+
+def _contains(test_name: str, container_list: Iterable[Container[str]]) -> bool:
+ """Returns true if test_name is in any container."""
+ for container in container_list:
+ if test_name in container:
+ return True
+ return False
+
+
+def get_new_name(base_name: str, *args: Container[str]) -> str:
+ """Generate a new unique name.
+
+ This method appends an index to the given basename. Binary
+ search is used to achieve logarithmic run time.
+
+ Parameters
+ ----------
+ base_name : str
+ the base name.
+ *args : Container[str]
+ a list of containers of used names.
+
+ Returns
+ -------
+ new_name : str
+ the unique name.
+ """
+ if not _contains(base_name, args):
+ return base_name
+
+ bin_iter = BinaryIterator(1, None)
+ while bin_iter.has_next():
+ new_name = f'{base_name}_{bin_iter.get_next():d}'
+ if _contains(new_name, args):
+ bin_iter.up()
+ else:
+ bin_iter.save_info(new_name)
+ bin_iter.down()
+
+ result = bin_iter.get_last_save_info()
+ assert result is not None, 'binary search should find a solution'
+ return result
+
+
+def minimize_cost_binary(f, # type: Callable[[int], float]
+ vmin, # type: float
+ start=0, # type: int
+ stop=None, # type: Optional[int]
+ step=1, # type: int
+ save=None, # type: Optional[int]
+ nfev=0, # type: int
+ ):
+ # type: (...) -> MinCostResult
+ """Minimize cost given minimum output constraint using binary search.
+
+ Given discrete function f, find the minimum integer x such that f(x) >= vmin using
+ binary search.
+
+ This algorithm only works if f is monotonically increasing, or if f monontonically increases
+ then monontonically decreases, but stop is given and f(stop) >= vmin.
+
+ Parameters
+ ----------
+ f : Callable[[int], float]
+ a function that takes a single integer and output a scalar value. Must monotonically
+ increase then monotonically decrease.
+ vmin : float
+ the minimum output value.
+ start : int
+ the input lower bound.
+ stop : Optional[int]
+ the input upper bound. Use None for unbounded binary search.
+ step : int
+ the input step. function will only be evaulated at the points start + step * N
+ save : Optional[int]
+ If not none, this value will be returned if no solution is found.
+ nfev : int
+ number of function calls already made.
+
+ Returns
+ -------
+ result : MinCostResult
+ the MinCostResult named tuple, with attributes:
+
+ x : Optional[int]
+ the minimum integer such that f(x) >= vmin. If no such x exists, this will be None.
+ nfev : int
+ total number of function calls made.
+
+ """
+ bin_iter = BinaryIterator(start, stop, step=step)
+ while bin_iter.has_next():
+ x_cur = bin_iter.get_next()
+ v_cur = f(x_cur)
+ nfev += 1
+
+ if v_cur >= vmin:
+ save = x_cur
+ bin_iter.down()
+ else:
+ bin_iter.up()
+ return MinCostResult(x=save, xmax=None, vmax=None, nfev=nfev)
+
+
+def minimize_cost_golden(f, vmin, offset=0, step=1, maxiter=1000):
+ # type: (Callable[[int], float], float, int, int, Optional[int]) -> MinCostResult
+ """Minimize cost given minimum output constraint using golden section/binary search.
+
+ Given discrete function f that monotonically increases then monotonically decreases,
+ find the minimum integer x such that f(x) >= vmin.
+
+ This method uses Fibonacci search to find the upper bound of x. If the upper bound
+ is found, a binary search is performed in the interval to find the solution. If
+ vmin is close to the maximum of f, a golden section search is performed to attempt
+ to find x.
+
+ Parameters
+ ----------
+ f : Callable[[int], float]
+ a function that takes a single integer and output a scalar value. Must monotonically
+ increase then monotonically decrease.
+ vmin : float
+ the minimum output value.
+ offset : int
+ the input lower bound. We will for x in the range [offset, infinity).
+ step : int
+ the input step. function will only be evaulated at the points offset + step * N
+ maxiter : Optional[int]
+ maximum number of iterations to perform. If None, will run indefinitely.
+
+ Returns
+ -------
+ result : MinCostResult
+ the MinCostResult named tuple, with attributes:
+
+ x : Optional[int]
+ the minimum integer such that f(x) >= vmin. If no such x exists, this will be None.
+ xmax : Optional[int]
+ the value at which f achieves its maximum. This is set only if x is None
+ vmax : Optional[float]
+ the maximum value of f. This is set only if x is None.
+ nfev : int
+ total number of function calls made.
+ """
+ fib2 = fib1 = fib0 = 0
+ cur_idx = 0
+ nfev = 0
+ xmax = vmax = v_prev = None
+ while maxiter is None or nfev < maxiter:
+ v_cur = f(step * fib0 + offset)
+ nfev += 1
+
+ if v_cur >= vmin:
+ # found upper bound, use binary search to find answer
+ stop = step * fib0 + offset
+ return minimize_cost_binary(f, vmin, start=step * (fib1 + 1) + offset,
+ stop=stop, save=stop, step=step, nfev=nfev)
+ else:
+ if vmax is not None and v_cur <= vmax:
+ if cur_idx <= 3:
+ # special case: 0 <= xmax < 3, and we already checked all possibilities, so
+ # we know vmax < vmin. There is no solution and just return.
+ return MinCostResult(x=None, xmax=step * xmax + offset, vmax=vmax, nfev=nfev)
+ else:
+ # we found the bracket that encloses maximum, perform golden section search
+ a, x, b = fib2, fib1, fib0
+ fx = v_prev
+ while x > a + 1 or b > x + 1:
+ u = a + b - x
+ fu = f(step * u + offset)
+ nfev += 1
+
+ if fu >= fx:
+ if u > x:
+ a, x = x, u
+ fx = fu
+ else:
+ x, b = u, x
+ fx = fu
+
+ if fx >= vmin:
+ # found upper bound, use binary search to find answer
+ stop = step * x + offset
+ return minimize_cost_binary(f, vmin, start=step * (a + 1) + offset,
+ stop=stop, save=stop, step=step,
+ nfev=nfev)
+ else:
+ if u > x:
+ b = u
+ else:
+ a = u
+
+ # golden section search terminated, the maximum is less than vmin
+ return MinCostResult(x=None, xmax=step * x + offset, vmax=fx, nfev=nfev)
+ else:
+ # still not close to maximum, continue searching
+ vmax = v_prev = v_cur
+ xmax = fib0
+ cur_idx += 1
+ if cur_idx <= 3:
+ fib2, fib1, fib0 = fib1, fib0, cur_idx
+ else:
+ fib2, fib1, fib0 = fib1, fib0, fib1 + fib0
+
+ raise ValueError('Maximum number of iteration achieved')
+
+
+def minimize_cost_binary_float(f, # type: Callable[[float], float]
+ vmin, # type: float
+ start, # type: float
+ stop, # type: float
+ tol=1e-8, # type: float
+ save=None, # type: Optional[float]
+ nfev=0, # type: int
+ ):
+ # type: (...) -> MinCostResult
+ """Minimize cost given minimum output constraint using binary search.
+
+ Given discrete function f and an interval, find minimum input x such that f(x) >= vmin using
+ binary search.
+
+ This algorithm only works if f is monotonically increasing, or if f monontonically increases
+ then monontonically decreases, and f(stop) >= vmin.
+
+ Parameters
+ ----------
+ f : Callable[[int], float]
+ a function that takes a single integer and output a scalar value. Must monotonically
+ increase then monotonically decrease.
+ vmin : float
+ the minimum output value.
+ start : float
+ the input lower bound.
+ stop : float
+ the input upper bound.
+ tol : float
+ output tolerance.
+ save : Optional[float]
+ If not none, this value will be returned if no solution is found.
+ nfev : int
+ number of function calls already made.
+
+ Returns
+ -------
+ result : MinCostResult
+ the MinCostResult named tuple, with attributes:
+
+ x : Optional[float]
+ the minimum x such that f(x) >= vmin. If no such x exists, this will be None.
+ nfev : int
+ total number of function calls made.
+
+ """
+ bin_iter = FloatBinaryIterator(start, stop, tol=tol)
+ while bin_iter.has_next():
+ x_cur = bin_iter.get_next()
+ v_cur = f(x_cur)
+ nfev += 1
+
+ if v_cur >= vmin:
+ save = x_cur
+ bin_iter.down()
+ else:
+ bin_iter.up()
+ return MinCostResult(x=save, xmax=None, vmax=None, nfev=nfev)
+
+
+def minimize_cost_golden_float(f, vmin, start, stop, tol=1e-8, maxiter=1000):
+ # type: (Callable[[float], float], float, float, float, float, int) -> MinCostResult
+ """Minimize cost given minimum output constraint using golden section/binary search.
+
+ Given discrete function f that monotonically increases then monotonically decreases,
+ find the minimum integer x such that f(x) >= vmin.
+
+ This method uses Fibonacci search to find the upper bound of x. If the upper bound
+ is found, a binary search is performed in the interval to find the solution. If
+ vmin is close to the maximum of f, a golden section search is performed to attempt
+ to find x.
+
+ Parameters
+ ----------
+ f : Callable[[int], float]
+ a function that takes a single integer and output a scalar value. Must monotonically
+ increase then monotonically decrease.
+ vmin : float
+ the minimum output value.
+ start : float
+ the input lower bound.
+ stop : float
+ the input upper bound.
+ tol : float
+ the solution tolerance.
+ maxiter : int
+ maximum number of iterations to perform.
+
+ Returns
+ -------
+ result : MinCostResult
+ the MinCostResult named tuple, with attributes:
+
+ x : Optional[int]
+ the minimum integer such that f(x) >= vmin. If no such x exists, this will be None.
+ xmax : Optional[int]
+ the value at which f achieves its maximum. This is set only if x is None
+ vmax : Optional[float]
+ the maximum value of f. This is set only if x is None.
+ nfev : int
+ total number of function calls made.
+ """
+
+ fa = f(start)
+ if fa >= vmin:
+ # solution found at start
+ return MinCostResult(x=start, xmax=None, vmax=None, nfev=1)
+
+ fb = f(stop) # type: Optional[float]
+ if fb is None:
+ raise TypeError("f(stop) returned None instead of float")
+ if fb >= vmin:
+ # found upper bound, use binary search to find answer
+ return minimize_cost_binary_float(f, vmin, start, stop, tol=tol, save=stop, nfev=2)
+
+ # solution is somewhere in middle
+ gr = (5 ** 0.5 + 1) / 2
+ delta = (stop - start) / gr
+ c = stop - delta
+ d = start + delta
+
+ fc = f(c) # type: Optional[float]
+ if fc is None:
+ raise TypeError("f(c) returned None instead of float")
+ if fc >= vmin:
+ # found upper bound, use binary search to find answer
+ return minimize_cost_binary_float(f, vmin, start, c, tol=tol, save=stop, nfev=3)
+
+ fd = f(d) # type: Optional[float]
+ if fd is None:
+ raise TypeError("f(d) returned None instead of float")
+ if fd >= vmin:
+ # found upper bound, use binary search to find answer
+ return minimize_cost_binary_float(f, vmin, start, c, tol=tol, save=stop, nfev=4)
+
+ if fc > fd:
+ a, b, d = start, d, c
+ c = b - (b - a) / gr
+ fb, fc, fd = fd, None, fc
+ else:
+ a, b, c = c, stop, d
+ d = a + (b - a) / gr
+ fa, fc, fd = fc, fd, None
+
+ nfev = 4
+ while abs(b - a) > tol and nfev < maxiter:
+ if fc is None:
+ fc = f(c)
+ else:
+ fd = f(d)
+ assert fc is not None, 'Either fc or fd was None and the above should have set it'
+ assert fd is not None, 'Either fc or fd was None and the above should have set it'
+ nfev += 1
+ if fc > fd:
+ if fc >= vmin:
+ return minimize_cost_binary_float(f, vmin, a, c, tol=tol, save=stop, nfev=nfev)
+ b, d = d, c
+ c = b - (b - a) / gr
+ fb, fc, fd = fd, None, fc
+ else:
+ if fd >= vmin:
+ return minimize_cost_binary_float(f, vmin, a, d, tol=tol, save=stop, nfev=nfev)
+ a, c = c, d
+ d = a + (b - a) / gr
+ fa, fc, fd = fc, fd, None
+
+ test = (a + b) / 2
+ vmax = f(test)
+ nfev += 1
+ if vmax >= vmin:
+ return MinCostResult(x=test, xmax=test, vmax=vmax, nfev=nfev)
+ else:
+ return MinCostResult(x=None, xmax=test, vmax=vmax, nfev=nfev)
diff --git a/src/bag/verification/__init__.py b/src/bag/verification/__init__.py
new file mode 100644
index 0000000..f12738c
--- /dev/null
+++ b/src/bag/verification/__init__.py
@@ -0,0 +1,74 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This package contains LVS/RCX related verification methods.
+"""
+
+from typing import Any
+
+import importlib
+
+from .base import Checker
+
+__all__ = ['make_checker', 'Checker']
+
+
+def make_checker(checker_cls, tmp_dir, **kwargs):
+ # type: (str, str, **Any) -> Checker
+ """Returns a checker object.
+
+ Parameters
+ -----------
+ checker_cls : str
+ the Checker class absolute path name.
+ tmp_dir : str
+ directory to save temporary files in.
+ **kwargs : Any
+ keyword arguments needed to create a Checker object.
+ """
+ sections = checker_cls.split('.')
+
+ module_str = '.'.join(sections[:-1])
+ class_str = sections[-1]
+ module = importlib.import_module(module_str)
+ return getattr(module, class_str)(tmp_dir, **kwargs)
diff --git a/src/bag/verification/base.py b/src/bag/verification/base.py
new file mode 100644
index 0000000..69c6356
--- /dev/null
+++ b/src/bag/verification/base.py
@@ -0,0 +1,526 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module defines Checker, an abstract base class that handles LVS/RCX."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, List, Dict, Any, Tuple, Sequence, Optional, Union
+
+import abc
+from pathlib import Path
+
+from ..io.template import new_template_env
+from ..concurrent.core import SubProcessManager
+
+if TYPE_CHECKING:
+ from ..concurrent.core import FlowInfo, ProcInfo
+
+
+class Checker(abc.ABC):
+ """A class that handles LVS/RCX.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary directory to save files in.
+ """
+
+ def __init__(self, tmp_dir: str) -> None:
+ self.tmp_dir = tmp_dir
+ self._tmp_env = new_template_env('bag.verification', 'templates')
+
+ @abc.abstractmethod
+ def get_rcx_netlists(self, lib_name: str, cell_name: str) -> List[str]:
+ """Returns a list of generated extraction netlist file names.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+
+ Returns
+ -------
+ netlists : List[str]
+ a list of generated extraction netlist file names. The first index is the main netlist.
+ """
+ return []
+
+ @abc.abstractmethod
+ async def async_run_drc(self, lib_name: str, cell_name: str, lay_view: str = 'layout',
+ layout: str = '', params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Tuple[bool, str]:
+ """A coroutine for running DRC.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ lay_view : str
+ layout view name. Optional.
+ layout : str
+ the layout file name. If not empty, will not try to generate the layout file.
+ params : Optional[Dict[str, Any]]
+ optional DRC parameter values.
+ run_dir : Union[str, Path]
+ Defaults to empty string. The run directory, use empty string for default.
+
+ Returns
+ -------
+ success : bool
+ True if LVS succeeds.
+ log_fname : str
+ LVS log file name.
+ """
+ return False, ''
+
+ @abc.abstractmethod
+ async def async_run_lvs(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', layout: str = '', netlist: str = '',
+ params: Optional[Dict[str, Any]] = None, run_rcx: bool = False,
+ run_dir: Union[str, Path] = '') -> Tuple[bool, str]:
+ """A coroutine for running LVS.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ sch_view : str
+ schematic view name. Optional.
+ lay_view : str
+ layout view name. Optional.
+ layout : str
+ the layout file name. If not empty, will not try to generate the layout file.
+ netlist : str
+ the CDL netlist name. If provided, will not try to call tools to generate netlist.
+ params : Optional[Dict[str, Any]]
+ optional LVS parameter values.
+ run_rcx : bool
+ True if extraction will be ran after LVS.
+ run_dir : Union[str, Path]
+ Defaults to empty string. The run directory, use empty string for default.
+
+ Returns
+ -------
+ success : bool
+ True if LVS succeeds.
+ log_fname : str
+ LVS log file name.
+ """
+ return False, ''
+
+ @abc.abstractmethod
+ async def async_run_rcx(self, lib_name: str, cell_name: str,
+ params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Tuple[str, str]:
+ """A coroutine for running RCX.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ params : Optional[Dict[str, Any]]
+ optional RCX parameter values.
+ run_dir : Union[str, Path]
+ Defaults to empty string. The run directory, use empty string for default.
+
+ Returns
+ -------
+ netlist : str
+ The RCX netlist file name. empty if RCX failed.
+ log_fname : str
+ RCX log file name.
+ """
+ return '', ''
+
+ @abc.abstractmethod
+ async def async_export_layout(self, lib_name: str, cell_name: str, out_file: str,
+ view_name: str = 'layout',
+ params: Optional[Dict[str, Any]] = None) -> str:
+ """A coroutine for exporting layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ view_name : str
+ layout view name.
+ out_file : str
+ output file name.
+ params : Optional[Dict[str, Any]]
+ optional export parameter values.
+
+ Returns
+ -------
+ log_fname : str
+ log file name.
+ """
+ return ''
+
+ @abc.abstractmethod
+ async def async_export_schematic(self, lib_name: str, cell_name: str, out_file: str,
+ view_name: str = 'schematic',
+ params: Optional[Dict[str, Any]] = None) -> str:
+ """A coroutine for exporting schematic.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ view_name : str
+ schematic view name.
+ out_file : str
+ output file name.
+ params : Optional[Dict[str, Any]]
+ optional export parameter values.
+
+ Returns
+ -------
+ log_fname : str
+ log file name.
+ """
+ return ''
+
+ def render_file_template(self, temp_name: str, params: Dict[str, Any]) -> str:
+ """Returns the rendered content from the given template file."""
+ template = self._tmp_env.get_template(temp_name)
+ return template.render(**params)
+
+ def render_string_template(self, content: str, params: Dict[str, Any]) -> str:
+ """Returns the rendered content from the given template string."""
+ template = self._tmp_env.from_string(content)
+ return template.render(**params)
+
+
+class SubProcessChecker(Checker, abc.ABC):
+ """An implementation of :class:`Checker` using :class:`SubProcessManager`.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary file directory.
+ max_workers : int
+ maximum number of parallel processes.
+ cancel_timeout : float
+ timeout for cancelling a subprocess.
+ """
+
+ def __init__(self, tmp_dir: str, max_workers: int, cancel_timeout: float) -> None:
+ Checker.__init__(self, tmp_dir)
+ self._manager = SubProcessManager(max_workers=max_workers, cancel_timeout=cancel_timeout)
+
+ @abc.abstractmethod
+ def setup_drc_flow(self, lib_name: str, cell_name: str, lay_view: str = 'layout',
+ layout: str = '', params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Sequence[FlowInfo]:
+ """This method performs any setup necessary to configure a DRC subprocess flow.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ lay_view : str
+ layout view name.
+ layout : str
+ the layout file name. If not empty, will not try to generate the layout file.
+ params : Optional[Dict[str, Any]]
+ optional DRC parameter values.
+ run_dir : Union[str, Path]
+ Defaults to empty string. The run directory, use empty string for default.
+
+ Returns
+ -------
+ flow_info : Sequence[FlowInfo]
+ the DRC flow information list. Each element is a tuple of:
+
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ vfun : Sequence[Callable[[Optional[int], str], Any]]
+ a function to validate if it is ok to execute the next process. The output of the
+ last function is returned. The first argument is the return code, the
+ second argument is the log file name.
+ """
+ return []
+
+ @abc.abstractmethod
+ def setup_lvs_flow(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', layout: str = '', netlist: str = '',
+ params: Optional[Dict[str, Any]] = None, run_rcx: bool = False,
+ run_dir: Union[str, Path] = '') -> Sequence[FlowInfo]:
+ """This method performs any setup necessary to configure a LVS subprocess flow.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ sch_view : str
+ schematic view name.
+ lay_view : str
+ layout view name.
+ layout : str
+ the layout file name. If not empty, will not try to generate the layout file.
+ netlist : str
+ the CDL netlist name. If provided, will not try to call tools to generate netlist.
+ params : Optional[Dict[str, Any]]
+ optional LVS parameter values.
+ run_rcx : bool
+ True if extraction will follow LVS.
+ run_dir : Union[str, Path]
+ Defaults to empty string. The run directory, use empty string for default.
+
+ Returns
+ -------
+ flow_info : Sequence[FlowInfo]
+ the LVS flow information list. Each element is a tuple of:
+
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ vfun : Sequence[Callable[[Optional[int], str], Any]]
+ a function to validate if it is ok to execute the next process. The output of the
+ last function is returned. The first argument is the return code, the
+ second argument is the log file name.
+ """
+ return []
+
+ @abc.abstractmethod
+ def setup_rcx_flow(self, lib_name: str, cell_name: str,
+ params: Optional[Dict[str, Any]] = None, run_dir: Union[str, Path] = ''
+ ) -> Sequence[FlowInfo]:
+ """This method performs any setup necessary to configure a RCX subprocess flow.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ params : Optional[Dict[str, Any]]
+ optional RCX parameter values.
+ run_dir : Union[str, Path]
+ Defaults to empty string. The run directory, use empty string for default.
+
+ Returns
+ -------
+ flow_info : Sequence[FlowInfo]
+ the RCX flow information list. Each element is a tuple of:
+
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ vfun : Sequence[Callable[[Optional[int], str], Any]]
+ a function to validate if it is ok to execute the next process. The output of the
+ last function is returned. The first argument is the return code, the
+ second argument is the log file name.
+ """
+ return []
+
+ @abc.abstractmethod
+ def setup_export_layout(self, lib_name: str, cell_name: str, out_file: str,
+ view_name: str = 'layout', params: Optional[Dict[str, Any]] = None
+ ) -> ProcInfo:
+ """This method performs any setup necessary to export layout.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ output file name.
+ view_name : str
+ layout view name.
+ params : Optional[Dict[str, Any]]
+ optional export parameter values.
+
+ Returns
+ -------
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ """
+ return '', '', None, None
+
+ @abc.abstractmethod
+ def setup_export_schematic(self, lib_name: str, cell_name: str, out_file: str,
+ view_name: str = 'schematic',
+ params: Optional[Dict[str, Any]] = None) -> ProcInfo:
+ """This method performs any setup necessary to export schematic.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell name.
+ out_file : str
+ output file name.
+ view_name : str
+ layout view name.
+ params : Optional[Dict[str, Any]]
+ optional export parameter values.
+
+ Returns
+ -------
+ args : Union[str, Sequence[str]]
+ command to run, as string or list of string arguments.
+ log : str
+ log file name.
+ env : Optional[Dict[str, str]]
+ environment variable dictionary. None to inherit from parent.
+ cwd : Optional[str]
+ working directory path. None to inherit from parent.
+ """
+ return '', '', None, None
+
+ async def async_run_drc(self, lib_name: str, cell_name: str, lay_view: str = 'layout',
+ layout: str = '', params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Tuple[bool, str]:
+ flow_info = self.setup_drc_flow(lib_name, cell_name, lay_view, layout, params, run_dir)
+ return await self._manager.async_new_subprocess_flow(flow_info)
+
+ async def async_run_lvs(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', layout: str = '', netlist: str = '',
+ params: Optional[Dict[str, Any]] = None, run_rcx: bool = False,
+ run_dir: Union[str, Path] = '') -> Tuple[bool, str]:
+ flow_info = self.setup_lvs_flow(lib_name, cell_name, sch_view, lay_view, layout,
+ netlist, params, run_rcx, run_dir)
+ return await self._manager.async_new_subprocess_flow(flow_info)
+
+ async def async_run_rcx(self, lib_name: str, cell_name: str,
+ params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Tuple[str, str]:
+ flow_info = self.setup_rcx_flow(lib_name, cell_name, params, run_dir)
+ return await self._manager.async_new_subprocess_flow(flow_info)
+
+ async def async_export_layout(self, lib_name: str, cell_name: str,
+ out_file: str, view_name: str = 'layout',
+ params: Optional[Dict[str, Any]] = None) -> str:
+ proc_info = self.setup_export_layout(lib_name, cell_name, out_file, view_name, params)
+ await self._manager.async_new_subprocess(*proc_info)
+ return proc_info[1]
+
+ async def async_export_schematic(self, lib_name: str, cell_name: str,
+ out_file: str, view_name: str = 'schematic',
+ params: Optional[Dict[str, Any]] = None) -> str:
+ proc_info = self.setup_export_schematic(lib_name, cell_name, out_file, view_name, params)
+ await self._manager.async_new_subprocess(*proc_info)
+ return proc_info[1]
+
+
+def get_flow_config(root_dir: Dict[str, str], template: Dict[str, str],
+ env_vars: Dict[str, Dict[str, str]], link_files: Dict[str, List[str]],
+ params: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
+ ans = {name: dict(root_dir=Path(root_dir[name]).resolve(),
+ template=str(Path(template[name]).resolve()),
+ env_vars=env_vars[name],
+ link_files=_process_link_files(link_files[name]),
+ params=params[name])
+ for name in ['drc', 'lvs', 'rcx']}
+
+ ext_lvs = 'lvs_rcx'
+ ans[ext_lvs] = dict(root_dir=Path(root_dir.get(ext_lvs, root_dir['rcx'])).resolve(),
+ template=str(Path(template.get(ext_lvs, template['lvs'])).resolve()),
+ env_vars=env_vars.get(ext_lvs, env_vars['lvs']),
+ params=params.get(ext_lvs, params['lvs']))
+
+ test = link_files.get(ext_lvs, None)
+ if test is None:
+ ans[ext_lvs]['link_files'] = ans['lvs']['link_files']
+ else:
+ ans[ext_lvs]['link_files'] = _process_link_files(test)
+
+ return ans
+
+
+def _process_link_files(files_list: List[Union[str, List[str]]]) -> List[Tuple[Path, str]]:
+ ans = []
+ for info in files_list:
+ if isinstance(info, str):
+ cur_path = Path(info).resolve()
+ basename = cur_path.name
+ else:
+ cur_path = Path(info[0]).resolve()
+ basename = info[1]
+
+ ans.append((cur_path, basename))
+
+ return ans
diff --git a/src/bag/verification/calibre.py b/src/bag/verification/calibre.py
new file mode 100644
index 0000000..53d9194
--- /dev/null
+++ b/src/bag/verification/calibre.py
@@ -0,0 +1,249 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements LVS/RCX using Calibre and stream out from Virtuoso.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional, Union, List, Tuple, Dict, Any, Sequence, Callable
+
+from enum import Enum
+from pathlib import Path
+
+from ..io import write_file, read_file
+
+from .virtuoso import VirtuosoChecker, all_pass_callback
+
+if TYPE_CHECKING:
+ from .base import FlowInfo
+
+
+class RCXMode(Enum):
+ xrc = 0
+ xact = 1
+ qrc = 2
+
+
+class Calibre(VirtuosoChecker):
+ """A subclass of VirtuosoChecker that uses Calibre for verification.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary directory to save files in.
+ root_dir : Dict[str, str]
+ dictionary of root run directories.
+ template : Dict[str, str]
+ dictionary of SVRF jinja template files.
+ env_vars: Dict[str, Dict[str, str]]
+ dictionary of environment variables.
+ params : Dict[str, Dict[str, Any]]
+ dictionary of default flow parameters.
+ rcx_program : str
+ the extraction program name.
+ max_workers : int
+ maximum number of sub-processes BAG can launch.
+ source_added_file : str
+ the Calibre source.added file location. Environment variable is supported.
+ If empty (default), this is not configured.
+ cancel_timeout_ms : int
+ cancel timeout in milliseconds.
+ enable_color : bool
+ True to enable coloring in GDS export.
+ """
+
+ def __init__(self, tmp_dir: str, root_dir: Dict[str, str], template: Dict[str, str],
+ env_vars: Dict[str, Dict[str, str]], link_files: Dict[str, List[str]],
+ params: Dict[str, Dict[str, Any]], rcx_program: str = 'pex', max_workers: int = 0,
+ source_added_file: str = '', cancel_timeout_ms: int = 10000,
+ enable_color: bool = False) -> None:
+ VirtuosoChecker.__init__(self, tmp_dir, root_dir, template, env_vars, link_files,
+ params, max_workers, source_added_file, cancel_timeout_ms,
+ enable_color)
+
+ self._rcx_mode: RCXMode = RCXMode[rcx_program]
+
+ def get_rcx_netlists(self, lib_name: str, cell_name: str) -> List[str]:
+ """Returns a list of generated extraction netlist file names.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+
+ Returns
+ -------
+ netlists : List[str]
+ a list of generated extraction netlist file names. The first index is the main netlist.
+ """
+ # PVS generate schematic cellviews directly.
+ if self._rcx_mode is RCXMode.qrc:
+ return [f'{cell_name}.spf']
+ else:
+ return [f'{cell_name}.pex.netlist',
+ f'{cell_name}.pex.netlist.pex',
+ f'{cell_name}.pex.netlist.{cell_name}.pxi',
+ ]
+
+ def setup_drc_flow(self, lib_name: str, cell_name: str, lay_view: str = 'layout',
+ layout: str = '', params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Sequence[FlowInfo]:
+ cmd = ['calibre', '-drc', '-hier', None]
+ return self._setup_flow_helper(lib_name, cell_name, layout, None, lay_view,
+ '', params, 'drc', cmd, _drc_passed_check, run_dir)
+
+ def setup_lvs_flow(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', layout: str = '', netlist: str = '',
+ params: Optional[Dict[str, Any]] = None, run_rcx: bool = False,
+ run_dir: Union[str, Path] = '') -> Sequence[FlowInfo]:
+ cmd = ['calibre', '-lvs', '-hier', None]
+ mode = 'lvs_rcx' if run_rcx else 'lvs'
+ return self._setup_flow_helper(lib_name, cell_name, layout, netlist, lay_view,
+ sch_view, params, mode, cmd, _lvs_passed_check, run_dir)
+
+ def setup_rcx_flow(self, lib_name: str, cell_name: str,
+ params: Optional[Dict[str, Any]] = None, run_dir: Union[str, Path] = ''
+ ) -> Sequence[FlowInfo]:
+ # noinspection PyUnusedLocal
+ def _rcx_passed_check(retcode: int, log_file: str) -> Tuple[str, str]:
+ fpath = Path(log_file).resolve()
+ out_file: Path = fpath.parent
+ out_file = out_file.joinpath(f'{cell_name}.spf')
+ if not out_file.is_file():
+ return '', ''
+
+ return str(out_file), log_file
+
+ cmd = ['qrc', '-64', '-cmd', None]
+ flow_list = self._setup_flow_helper(lib_name, cell_name, None, None, '',
+ '', params, 'rcx', cmd, _rcx_passed_check, run_dir)
+
+ _, log_fname, env, dir_name, _ = flow_list[-1]
+ query_log = Path(log_fname).with_name('bag_query.log')
+ cmd = ['calibre', '-query_input', 'query.cmd', '-query', 'svdb']
+ flow_list.insert(len(flow_list) - 2,
+ (cmd, str(query_log), env, dir_name, all_pass_callback))
+ return flow_list
+
+ def _setup_flow_helper(self, lib_name: str, cell_name: str, layout: Optional[str],
+ netlist: Optional[str], lay_view: str, sch_view: str,
+ user_params: Optional[Dict[str, Any]], mode: str, run_cmd: List[str],
+ check_fun: Callable[[Optional[int], str], Any],
+ run_dir_override: Union[str, Path]) -> List[FlowInfo]:
+ tmp = self.setup_job(mode, lib_name, cell_name, layout, netlist, lay_view,
+ sch_view, user_params, run_dir_override)
+ flow_list, run_dir, run_env, params, ctl_params = tmp
+
+ # generate new control file
+ ctl_path = self._make_control_file(mode, run_dir, ctl_params)
+ run_cmd[-1] = str(ctl_path)
+
+ log_path = run_dir / f'bag_{mode}.log'
+ flow_list.append((run_cmd, str(log_path), run_env, str(run_dir), check_fun))
+
+ return flow_list
+
+ def _make_control_file(self, mode: str, run_dir: Path, ctl_params: Dict[str, str]) -> Path:
+ ctl_path = run_dir / f'bag_{mode}.ctrl'
+ temp = self.get_control_template(mode)
+ content = temp.render(**ctl_params)
+ write_file(ctl_path, content)
+
+ return ctl_path
+
+
+# noinspection PyUnusedLocal
+def _drc_passed_check(retcode: int, log_file: str) -> Tuple[bool, str]:
+ """Check if DRC passed
+
+ Parameters
+ ----------
+ retcode : int
+ return code of the LVS process.
+ log_file : str
+ log file name.
+
+ Returns
+ -------
+ success : bool
+ True if LVS passed.
+ log_file : str
+ the log file name.
+ """
+ fpath = Path(log_file)
+ if not fpath.is_file():
+ return False, ''
+
+ cmd_output = read_file(fpath)
+ test_str = '--- TOTAL RESULTS GENERATED = 0 (0)'
+ return test_str in cmd_output, log_file
+
+
+# noinspection PyUnusedLocal
+def _lvs_passed_check(retcode: int, log_file: str) -> Tuple[bool, str]:
+ """Check if LVS passed
+
+ Parameters
+ ----------
+ retcode : int
+ return code of the LVS process.
+ log_file : str
+ log file name.
+
+ Returns
+ -------
+ success : bool
+ True if LVS passed.
+ log_file : str
+ the log file name.
+ """
+ fpath = Path(log_file)
+ if not fpath.is_file():
+ return False, ''
+
+ cmd_output = read_file(fpath)
+ test_str = 'LVS completed. CORRECT. See report file:'
+ return test_str in cmd_output, log_file
diff --git a/src/bag/verification/icv.py b/src/bag/verification/icv.py
new file mode 100644
index 0000000..19468d2
--- /dev/null
+++ b/src/bag/verification/icv.py
@@ -0,0 +1,345 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements LVS/RCX using ICV and stream out from Virtuoso.
+"""
+
+from typing import TYPE_CHECKING, Optional, List, Tuple, Dict, Any, Sequence
+
+import os
+
+from .virtuoso import VirtuosoChecker
+from ..io import read_file, open_temp
+
+if TYPE_CHECKING:
+ from .base import FlowInfo
+
+
+# noinspection PyUnusedLocal
+def _all_pass(retcode: int, log_file: str) -> bool:
+ return True
+
+
+# noinspection PyUnusedLocal
+def lvs_passed(retcode: int, log_file: str) -> Tuple[bool, str]:
+ """Check if LVS passed
+
+ Parameters
+ ----------
+ retcode : int
+ return code of the LVS process.
+ log_file : str
+ log file name.
+
+ Returns
+ -------
+ success : bool
+ True if LVS passed.
+ log_file : str
+ the log file name.
+ """
+ dirname = os.path.dirname(log_file)
+ cell_name = os.path.basename(dirname)
+ lvs_error_file = os.path.join(dirname, cell_name + '.LVS_ERRORS')
+
+ # append error file at the end of log file
+ with open(log_file, 'a') as logf:
+ with open(lvs_error_file, 'r') as errf:
+ for line in errf:
+ logf.write(line)
+
+ if not os.path.isfile(log_file):
+ return False, ''
+
+ cmd_output = read_file(log_file)
+ test_str = 'Final comparison result:PASS'
+
+ return test_str in cmd_output, log_file
+
+
+class ICV(VirtuosoChecker):
+ """A subclass of VirtuosoChecker that uses ICV for verification.
+
+ Parameters
+ ----------
+ tmp_dir : string
+ temporary directory to save files in.
+ lvs_run_dir : str
+ the LVS run directory.
+ lvs_runset : str
+ the LVS runset filename.
+ rcx_run_dir : str
+ the RCX run directory.
+ rcx_runset : str
+ the RCX runset filename.
+ source_added_file : str
+ the source.added file location. Environment variable is supported.
+ Default value is '$DK/Calibre/lvs/source.added'.
+ rcx_mode : str
+ the RC extraction mode. Defaults to 'starrc'.
+ """
+
+ def __init__(self, tmp_dir: str, lvs_run_dir: str, lvs_runset: str, rcx_run_dir: str,
+ rcx_runset: str, source_added_file: str = '$DK/Calibre/lvs/source.added',
+ rcx_mode: str = 'pex', **kwargs):
+
+ max_workers = kwargs.get('max_workers', None)
+ cancel_timeout = kwargs.get('cancel_timeout_ms', None)
+ rcx_params = kwargs.get('rcx_params', {})
+ lvs_params = kwargs.get('lvs_params', {})
+ rcx_link_files = kwargs.get('rcx_link_files', None)
+ lvs_link_files = kwargs.get('lvs_link_files', None)
+
+ if cancel_timeout is not None:
+ cancel_timeout /= 1e3
+
+ VirtuosoChecker.__init__(self, tmp_dir, max_workers, cancel_timeout, source_added_file)
+
+ self.default_rcx_params = rcx_params
+ self.default_lvs_params = lvs_params
+ self.lvs_run_dir = os.path.abspath(lvs_run_dir)
+ self.lvs_runset = lvs_runset
+ self.lvs_link_files = lvs_link_files
+ self.rcx_run_dir = os.path.abspath(rcx_run_dir)
+ self.rcx_runset = rcx_runset
+ self.rcx_link_files = rcx_link_files
+ self.rcx_mode = rcx_mode
+
+ def get_rcx_netlists(self, lib_name: str, cell_name: str) -> List[str]:
+ """Returns a list of generated extraction netlist file names.
+
+ Parameters
+ ----------
+ lib_name : str
+ library name.
+ cell_name : str
+ cell_name
+
+ Returns
+ -------
+ netlists : List[str]
+ a list of generated extraction netlist file names. The first index is the main netlist.
+ """
+ # PVS generate schematic cellviews directly.
+ if self.rcx_mode == 'starrc':
+ return ['%s.spf' % cell_name]
+ else:
+ pass
+
+ def setup_lvs_flow(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', gds: str = '', netlist = '',
+ params: Optional[Dict[str, Any]] = None) -> Sequence[FlowInfo]:
+
+ if netlist:
+ netlist = os.path.abspath(netlist)
+
+ run_dir = os.path.join(self.lvs_run_dir, lib_name, cell_name)
+ os.makedirs(run_dir, exist_ok=True)
+
+ lay_file, sch_file = self._get_lay_sch_files(run_dir, netlist)
+
+ # add schematic/layout export to flow
+ flow_list = []
+ if not gds:
+ cmd, log, env, cwd = self.setup_export_layout(lib_name, cell_name, lay_file, lay_view,
+ None)
+ flow_list.append((cmd, log, env, cwd, _all_pass))
+ if not netlist:
+ cmd, log, env, cwd = self.setup_export_schematic(lib_name, cell_name, sch_file,
+ sch_view, None)
+ flow_list.append((cmd, log, env, cwd, _all_pass))
+
+ lvs_params_actual = self.default_lvs_params.copy()
+ if params is not None:
+ lvs_params_actual.update(params)
+
+ with open_temp(prefix='lvsLog', dir=run_dir, delete=False) as logf:
+ log_file = logf.name
+
+ # set _drPROCESS
+ dr_process_str = '_drPROCESS=' + lvs_params_actual['_drPROCESS']
+
+ cmd = ['icv', '-D', dr_process_str, '-i', lay_file, '-s', sch_file, '-sf', 'SPICE',
+ '-f', 'GDSII', '-c', cell_name, '-vue', '-I']
+ for f in self.lvs_link_files:
+ cmd.append(f)
+
+ flow_list.append((cmd, log_file, None, run_dir, lvs_passed))
+ return flow_list
+
+ def setup_rcx_flow(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', gds: str = '', netlist: str = '',
+ params: Optional[Dict[str, Any]] = None) -> Sequence[FlowInfo]:
+
+ # update default RCX parameters.
+ rcx_params_actual = self.default_rcx_params.copy()
+ if params is not None:
+ rcx_params_actual.update(params)
+
+ run_dir = os.path.join(self.rcx_run_dir, lib_name, cell_name)
+ os.makedirs(run_dir, exist_ok=True)
+
+ lay_file, sch_file = self._get_lay_sch_files(run_dir, netlist)
+ with open_temp(prefix='rcxLog', dir=run_dir, delete=False) as logf:
+ log_file = logf.name
+ flow_list = []
+ if not gds:
+ cmd, log, env, cwd = self.setup_export_layout(lib_name, cell_name, lay_file, lay_view,
+ None)
+ flow_list.append((cmd, log, env, cwd, _all_pass))
+ if not netlist:
+ cmd, log, env, cwd = self.setup_export_schematic(lib_name, cell_name, sch_file,
+ sch_view, None)
+ flow_list.append((cmd, log, env, cwd, _all_pass))
+
+ if self.rcx_mode == 'starrc':
+ # first: run Extraction LVS
+ lvs_params_actual = self.default_lvs_params.copy()
+
+ dr_process_str = '_drPROCESS=' + lvs_params_actual['_drPROCESS']
+
+ cmd = ['icv', '-D', '_drRCextract', '-D', dr_process_str, '-D', '_drICFOAlayers',
+ '-i', lay_file, '-s', sch_file, '-sf', 'SPICE', '-f', 'GDSII',
+ '-c', cell_name, '-I']
+ for f in self.lvs_link_files:
+ cmd.append(f)
+
+ # hack the environment variables to make sure $PWD is the same as current working directory
+ env_copy = os.environ.copy()
+ env_copy['PWD'] = run_dir
+ flow_list.append((cmd, log_file, env_copy, run_dir, lvs_passed))
+
+ # second: setup CCP
+ # make symlinks
+ if self.rcx_link_files:
+ for source_file in self.rcx_link_files:
+ targ_file = os.path.join(run_dir, os.path.basename(source_file))
+ if not os.path.exists(targ_file):
+ os.symlink(source_file, targ_file)
+
+ # generate new cmd for StarXtract
+ cmd_content, result = self.modify_starrc_cmd(run_dir, lib_name, cell_name,
+ rcx_params_actual, sch_file)
+
+ # save cmd for StarXtract
+ with open_temp(dir=run_dir, delete=False) as cmd_file:
+ cmd_fname = cmd_file.name
+ cmd_file.write(cmd_content)
+
+ cmd = ['StarXtract', '-clean', cmd_fname]
+ else:
+ cmd = []
+
+ # noinspection PyUnusedLocal
+ def rcx_passed(retcode, log_fname):
+ dirname = os.path.dirname(log_fname)
+ cell_name = os.path.basename(dirname)
+ results_file = os.path.join(dirname, cell_name + '.RESULTS')
+
+ # append error file at the end of log file
+ with open(log_fname, 'a') as logf:
+ with open(results_file, 'r') as errf:
+ for line in errf:
+ logf.write(line)
+
+ if not os.path.isfile(log_fname):
+ return None, ''
+
+ cmd_output = read_file(log_fname)
+ test_str = 'DRC and Extraction Results: CLEAN'
+
+ if test_str in cmd_output:
+ return results_file, log_fname
+ else:
+ return None, log_fname
+
+ flow_list.append((cmd, log_file, None, run_dir, rcx_passed))
+ return flow_list
+
+ @classmethod
+ def _get_lay_sch_files(cls, run_dir, netlist=''):
+ lay_file = os.path.join(run_dir, 'layout.gds')
+ sch_file = netlist if netlist else os.path.join(run_dir, 'schematic.net')
+ return lay_file, sch_file
+
+ def modify_starrc_cmd(self, run_dir, lib_name, cell_name, starrc_params, sch_file):
+ # type: (str, str, str, Dict[str, Any], str) -> Tuple[str, str]
+ """Modify the cmd file.
+
+ Parameters
+ ----------
+ run_dir : str
+ the run directory.
+ lib_name : str
+ the library name.
+ cell_name : str
+ the cell name.
+ starrc_params : Dict[str, Any]
+ override StarRC parameters.
+ sch_file : str
+ the schematic netlist
+
+ Returns
+ -------
+ starrc_cmd : str
+ the new StarXtract cmd file.
+ output_name : str
+ the extracted netlist file.
+ """
+ output_name = '%s.spf' % cell_name
+ if 'CDSLIBPATH' in os.environ:
+ cds_lib_path = os.path.abspath(os.path.join(os.environ['CDSLIBPATH'], 'cds.lib'))
+ else:
+ cds_lib_path = os.path.abspath('./cds.lib')
+ content = self.render_string_template(read_file(self.rcx_runset),
+ dict(
+ cell_name=cell_name,
+ extract_type=starrc_params['extract'].get('type'),
+ netlist_format=starrc_params.get('netlist_format',
+ 'SPF'),
+ sch_file=sch_file,
+ cds_lib=cds_lib_path,
+ lib_name=lib_name,
+ run_dir=run_dir,
+ ))
+ return content, os.path.join(run_dir, output_name)
diff --git a/src/bag/verification/pvs.py b/src/bag/verification/pvs.py
new file mode 100644
index 0000000..fdda52a
--- /dev/null
+++ b/src/bag/verification/pvs.py
@@ -0,0 +1,168 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module implements LVS/RCX using PVS/QRC and stream out from Virtuoso.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional, List, Dict, Any, Sequence, Tuple, Union
+
+from pathlib import Path
+
+from ..io import read_file, write_file
+
+from .virtuoso import VirtuosoChecker
+
+if TYPE_CHECKING:
+ from .base import FlowInfo
+
+
+class PVS(VirtuosoChecker):
+ """A subclass of VirtuosoChecker that uses PVS/QRC for verification.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary directory to save files in.
+ root_dir : Dict[str, str]
+ dictionary of root run directories.
+ template : Dict[str, str]
+ dictionary of SVRF jinja template files.
+ env_vars: Dict[str, Dict[str, str]]
+ dictionary of environment variables.
+ params : Dict[str, Dict[str, Any]]
+ dictionary of default flow parameters.
+ lvs_cmd : str
+ the lvs command.
+ max_workers : int
+ maximum number of sub-processes BAG can launch.
+ source_added_file : str
+ the Calibre source.added file location. Environment variable is supported.
+ If empty (default), this is not configured.
+ cancel_timeout_ms : int
+ cancel timeout in milliseconds.
+ enable_color : bool
+ True to enable coloring in GDS export.
+ """
+
+ def __init__(self, tmp_dir: str, root_dir: Dict[str, str], template: Dict[str, str],
+ env_vars: Dict[str, Dict[str, str]], link_files: Dict[str, List[str]],
+ params: Dict[str, Dict[str, Any]],
+ lvs_cmd: str = 'pvs', max_workers: int = 0, source_added_file: str = '',
+ cancel_timeout_ms: int = 10000, enable_color: bool = False) -> None:
+ VirtuosoChecker.__init__(self, tmp_dir, root_dir, template, env_vars, link_files,
+ params, max_workers, source_added_file, cancel_timeout_ms,
+ enable_color)
+
+ self._lvs_cmd = lvs_cmd
+
+ def get_rcx_netlists(self, lib_name: str, cell_name: str) -> List[str]:
+ # PVS generate schematic cellviews directly.
+ return []
+
+ def setup_drc_flow(self, lib_name: str, cell_name: str, lay_view: str = 'layout',
+ layout: str = '', params: Optional[Dict[str, Any]] = None,
+ run_dir: Union[str, Path] = '') -> Sequence[FlowInfo]:
+ raise NotImplementedError('Not supported yet.')
+
+ def setup_lvs_flow(self, lib_name: str, cell_name: str, sch_view: str = 'schematic',
+ lay_view: str = 'layout', layout: str = '', netlist: str = '',
+ params: Optional[Dict[str, Any]] = None, run_rcx: bool = False,
+ run_dir: Union[str, Path] = '') -> Sequence[FlowInfo]:
+ mode = 'lvs_rcx' if run_rcx else 'lvs'
+
+ tmp = self.setup_job(mode, lib_name, cell_name, layout, netlist, lay_view,
+ sch_view, params, run_dir)
+ flow_list, run_dir_path, run_env, params, ctl_params = tmp
+
+ if ctl_params['layout_type'] != 'GDSII':
+ raise ValueError('Only LVS with gds file is supported.')
+
+ # generate new control file
+ ctl_path = run_dir_path / f'bag_{mode}.ctrl'
+ temp = self.get_control_template(mode)
+ content = temp.render(**ctl_params)
+ write_file(ctl_path, content)
+
+ cmd = [self._lvs_cmd, '-perc', '-lvs', '-control', str(ctl_path),
+ '-gds', ctl_params['layout_file'], '-layout_top_cell', cell_name,
+ '-source_cdl', ctl_params['netlist_file'], '-source_top_cell', cell_name,
+ 'pvs_rules']
+
+ log_path = run_dir_path / f'bag_{mode}.log'
+ flow_list.append((cmd, str(log_path), run_env, str(run_dir_path), _lvs_passed_check))
+
+ return flow_list
+
+ def setup_rcx_flow(self, lib_name: str, cell_name: str,
+ params: Optional[Dict[str, Any]] = None, run_dir: Union[str, Path] = ''
+ ) -> Sequence[FlowInfo]:
+ raise NotImplementedError('Not supported yet.')
+
+
+# noinspection PyUnusedLocal
+def _lvs_passed_check(retcode: int, log_file: str) -> Tuple[bool, str]:
+ """Check if LVS passed
+
+ Parameters
+ ----------
+ retcode : int
+ return code of the LVS subprocess.
+ log_file : str
+ log file name.
+
+ Returns
+ -------
+ success : bool
+ True if LVS passed.
+ log_file : str
+ the log file name.
+ """
+ fpath = Path(log_file)
+ if not fpath.is_file():
+ return False, ''
+
+ cmd_output = read_file(fpath)
+ test_str = '# Run Result : MATCH'
+ return test_str in cmd_output, log_file
diff --git a/src/bag/verification/templates/gds_export_config.txt b/src/bag/verification/templates/gds_export_config.txt
new file mode 100644
index 0000000..7ef5669
--- /dev/null
+++ b/src/bag/verification/templates/gds_export_config.txt
@@ -0,0 +1,50 @@
+case "preserve"
+cellListFile ""
+cellMap ""
+cellNamePrefix ""
+cellNameSuffix ""
+convertDot "node"
+convertPin "geometry"
+#doNotPreservePcellPins
+#flattenPcells
+#flattenVias
+fontMap ""
+#ignoreLines
+#ignorePcellEvalFail
+labelCase "preserve"
+labelDepth "1"
+labelMap ""
+layerMap ""
+library "{{lib_name}}"
+logFile ""
+maxVertices "200"
+#mergePathSegsToPath
+#noConvertHalfWidthPath
+noInfo ""
+#noObjectProp
+#noOutputTextDisplays
+#noOutputUnplacedInst
+noWarn ""
+objectMap ""
+outputDir "{{run_dir}}"
+#pathToPolygon
+pinAttNum "0"
+propMap ""
+#propValueOnly
+#rectToBox
+refLibList ""
+replaceBusBitChar "{{square_bracket}}"
+enableColoring "{{enable_color}}"
+#reportPrecisionLoss
+#respectGDSIINameLimit
+runDir "{{run_dir}}"
+#snapToGrid
+strmFile "{{output_name}}"
+strmVersion "5"
+summaryFile ""
+techLib ""
+topCell "{{cell_name}}"
+userSkillFile ""
+viaMap ""
+view "{{view_name}}"
+warnToErr ""
diff --git a/src/bag/verification/templates/oasis_export_config.txt b/src/bag/verification/templates/oasis_export_config.txt
new file mode 100644
index 0000000..8898dab
--- /dev/null
+++ b/src/bag/verification/templates/oasis_export_config.txt
@@ -0,0 +1,69 @@
+arrayInstToScalar "false"
+backupOasisLogFiles "false"
+case "preserve"
+cellListFile ""
+cellMap ""
+cellNamePrefix ""
+cellNameSuffix ""
+checkPolygon "false"
+circleToPolygon "false"
+compress "false"
+#compressLevel "1"
+convertDot "circle"
+convertPcellPin ""
+convertPin "geometry"
+dbuPerUU ""
+diagonalPathToPolygon "false"
+#doNotPreservePcellPins "false"
+donutNumSides "64"
+ellipseNumSides "64"
+enableColoring "{{enable_color}}"
+flattenPcells "false"
+flattenVias "false"
+#flattenViaShapesToViaPurp "false"
+GDSIICompatible "false"
+hierDepth "32767"
+ignoreLines "false"
+ignoreMissingCells "false"
+ignoreObjectMapFromTech "false"
+ignorePcellEvalFail "false"
+ignoreRoutes "false"
+ignoreZeroWidthPathSeg "false"
+infoToWarn ""
+labelDepth "1"
+labelMap ""
+layerMap ""
+library "{{lib_name}}"
+logFile ""
+mergePathSegsToPath "false"
+noConvertHalfWidthPath "false"
+noInfo ""
+noObjectProp "false"
+noOutputTextDisplays "false"
+noOutputUnplacedInst "false"
+noWarn ""
+oasisFile "{{output_name}}"
+objectMap ""
+outputDir ""
+pathToPolygon "false"
+preservePinAtt "false"
+preserveTextAtt "false"
+propMap ""
+refLibList ""
+replaceBusBitChar "{{square_bracket}}"
+runDir "{{run_dir}}"
+showCompletionMsgBox "true"
+snapToGrid "false"
+subMasterSeparator "_CDNS_"
+summaryFile ""
+techLib ""
+textCase "preserve"
+topCell "{{cell_name}}"
+translateUnmappedLPP "false"
+userSkillFile ""
+verbose "false"
+viaMap ""
+view "{{view_name}}"
+virtualMemory "false"
+warnToErr ""
+wildCardInCellMap "false"
diff --git a/src/bag/verification/templates/si_env.txt b/src/bag/verification/templates/si_env.txt
new file mode 100644
index 0000000..7bc5972
--- /dev/null
+++ b/src/bag/verification/templates/si_env.txt
@@ -0,0 +1,27 @@
+simStopList = '("auCdl")
+simViewList = '("auCdl" "schematic")
+globalGndSig = ""
+globalPowerSig = ""
+shrinkFACTOR = 0
+checkScale = "meter"
+diodeCheck = "none"
+capacitorCheck = "none"
+resistorCheck = "none"
+resistorModel = ""
+shortRES = 2000
+simNetlistHier = 't
+pinMAP = 'nil
+displayPININFO = 't
+checkLDD = 'nil
+connects = ""
+setEQUIV = ""
+simRunDir = "{{run_dir}}"
+hnlNetlistFileName = "{{output_name}}"
+simSimulator = "auCdl"
+simViewName = "{{view_name}}"
+simCellName = "{{cell_name}}"
+simLibName = "{{lib_name}}"
+incFILE = "{{source_added_file}}"
+cdlSimViewList = '("auCdl" "schematic")
+cdlSimStopList = '("auCdl")
+
diff --git a/src/bag/verification/virtuoso.py b/src/bag/verification/virtuoso.py
new file mode 100644
index 0000000..497aa4e
--- /dev/null
+++ b/src/bag/verification/virtuoso.py
@@ -0,0 +1,265 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module handles exporting schematic/layout from Virtuoso.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional, Dict, Any, List, Tuple, Union
+
+import os
+import shutil
+from abc import ABC
+from pathlib import Path
+
+from jinja2 import Template
+
+from pybag.enum import DesignOutput
+
+from ..io import write_file
+from ..io.template import new_template_env_fs
+from ..env import get_bag_work_dir
+from .base import SubProcessChecker, get_flow_config
+
+if TYPE_CHECKING:
+ from .base import ProcInfo, FlowInfo
+
+
+class VirtuosoChecker(SubProcessChecker, ABC):
+ """the base Checker class for Virtuoso.
+
+ This class implement layout/schematic export procedures.
+
+ Parameters
+ ----------
+ tmp_dir : str
+ temporary directory to save files in.
+ root_dir : Dict[str, str]
+ dictionary of root run directories.
+ template : Dict[str, str]
+ dictionary of SVRF jinja template files.
+ env_vars: Dict[str, Dict[str, str]]
+ dictionary of environment variables.
+ params : Dict[str, Dict[str, Any]]
+ dictionary of default flow parameters.
+ max_workers : int
+ maximum number of sub-processes BAG can launch.
+ source_added_file : str
+ the Calibre source.added file location. Environment variable is supported.
+ If empty (default), this is not configured.
+ cancel_timeout_ms : int
+ cancel timeout in milliseconds.
+ enable_color : bool
+ True to enable coloring in GDS export.
+ """
+
+ def __init__(self, tmp_dir: str, root_dir: Dict[str, str], template: Dict[str, str],
+ env_vars: Dict[str, Dict[str, str]], link_files: Dict[str, List[str]],
+ params: Dict[str, Dict[str, Any]], max_workers: int = 0,
+ source_added_file: str = '', cancel_timeout_ms: int = 10000,
+ enable_color: bool = False) -> None:
+
+ cancel_timeout = cancel_timeout_ms / 1e3
+ SubProcessChecker.__init__(self, tmp_dir, max_workers, cancel_timeout)
+
+ self._flow_config = get_flow_config(root_dir, template, env_vars, link_files, params)
+ self._source_added_file = source_added_file
+ self._bag_work_dir = get_bag_work_dir()
+ self._strm_params = dict(enable_color=enable_color)
+ self._temp_env_ctrl = new_template_env_fs()
+
+ @property
+ def bag_work_dir(self) -> str:
+ return self._bag_work_dir
+
+ def get_config(self, mode: str) -> Dict[str, Any]:
+ return self._flow_config[mode]
+
+ def get_control_template(self, mode: str) -> Template:
+ template: str = self.get_config(mode)['template']
+ return self._temp_env_ctrl.get_template(template)
+
+ def setup_job(self, mode: str, lib_name: str, cell_name: str,
+ layout: Optional[str], netlist: Optional[str], lay_view: str, sch_view: str,
+ user_params: Optional[Dict[str, Any]], run_dir_override: Union[str, Path]
+ ) -> Tuple[List[FlowInfo], Path, Optional[Dict[str, str]],
+ Dict[str, Any], Dict[str, Any]]:
+ config = self.get_config(mode)
+ root_dir: Path = config['root_dir']
+ link_files: List[Tuple[Path, Path]] = config['link_files']
+ env_vars: Dict[str, str] = config['env_vars']
+ params: Dict[str, Any] = config['params']
+
+ if isinstance(run_dir_override, str):
+ if run_dir_override:
+ run_dir = Path(run_dir_override).resolve()
+ else:
+ run_dir = root_dir.joinpath(lib_name, cell_name)
+ else:
+ run_dir = run_dir_override.resolve()
+
+ run_dir.mkdir(parents=True, exist_ok=True)
+
+ for fpath, basename in link_files:
+ link = run_dir / basename
+ if not link.exists():
+ link.symlink_to(fpath)
+
+ flow_list = []
+ sch_path = run_dir / 'netlist.cdl'
+ ctl_params = dict(cell_name=cell_name)
+ if layout is not None:
+ if layout:
+ ext = Path(layout).suffix[1:]
+ lay_path = run_dir / f'layout.{ext}'
+ shutil.copy(layout, str(lay_path))
+ else:
+ ext = 'gds'
+ lay_path = run_dir / 'layout.gds'
+ info = self.setup_export_layout(lib_name, cell_name, str(lay_path), lay_view)
+ flow_list.append((info[0], info[1], info[2], info[3], all_pass_callback))
+
+ if ext == DesignOutput.GDS.extension:
+ ctl_params['layout_type'] = 'GDSII'
+ elif ext == DesignOutput.OASIS.extension:
+ ctl_params['layout_type'] = 'OASIS'
+ else:
+ raise ValueError(f'Cannot determine layout type from layout file name: {lay_path}')
+ ctl_params['layout_file'] = str(lay_path)
+
+ if netlist is not None:
+ if netlist:
+ shutil.copy(netlist, str(sch_path))
+ else:
+ info = self.setup_export_schematic(lib_name, cell_name, str(sch_path), sch_view)
+ flow_list.append((info[0], info[1], info[2], info[3], all_pass_callback))
+
+ ctl_params['netlist_file'] = str(sch_path)
+
+ params_actual = params.copy()
+ if user_params is not None:
+ params_actual.update(params)
+
+ if env_vars:
+ run_env = dict(**os.environ)
+ run_env.update(env_vars)
+ else:
+ run_env = None
+
+ return flow_list, run_dir, run_env, params_actual, ctl_params
+
+ def setup_export_layout(self, lib_name: str, cell_name: str, out_file: str,
+ view_name: str = 'layout', params: Optional[Dict[str, Any]] = None
+ ) -> ProcInfo:
+ if params is None:
+ params = {}
+
+ enable_color: bool = params.get('enable_color',
+ self._strm_params.get('enable_color', False))
+ square_bracket: bool = params.get('square_bracket', False)
+ output_type: DesignOutput = params.get('output_type', DesignOutput.GDS)
+
+ if output_type is DesignOutput.GDS:
+ template_name = 'gds_export_config.txt'
+ cmd_str = 'strmout'
+ elif output_type is DesignOutput.OASIS:
+ template_name = 'oasis_export_config.txt'
+ cmd_str = 'oasisout'
+ else:
+ raise ValueError(f'Unknown layout export format: {output_type.name}')
+
+ out_path = Path(out_file).resolve()
+ run_dir = out_path.parent
+ out_name = out_path.name
+ log_file = str(run_dir / 'layout_export.log')
+
+ run_dir.mkdir(parents=True, exist_ok=True)
+
+ # fill in stream out configuration file.
+ content = self.render_file_template(template_name,
+ dict(lib_name=lib_name,
+ cell_name=cell_name,
+ view_name=view_name,
+ output_name=out_name,
+ run_dir=str(run_dir),
+ enable_color=str(enable_color).lower(),
+ square_bracket=str(square_bracket).lower(),
+ ))
+ # run strmOut
+ ctrl_file = run_dir / 'stream_template'
+ write_file(ctrl_file, content)
+ cmd = [cmd_str, '-templateFile', str(ctrl_file)]
+ return cmd, log_file, None, self._bag_work_dir
+
+ def setup_export_schematic(self, lib_name: str, cell_name: str, out_file: str,
+ view_name: str = 'schematic',
+ params: Optional[Dict[str, Any]] = None) -> ProcInfo:
+ out_path = Path(out_file).resolve()
+ run_dir = out_path.parent
+ out_name = out_path.name
+ log_file = str(run_dir / 'schematic_export.log')
+
+ run_dir.mkdir(parents=True, exist_ok=True)
+
+ # fill in stream out configuration file.
+ content = self.render_file_template('si_env.txt',
+ dict(
+ lib_name=lib_name,
+ cell_name=cell_name,
+ view_name=view_name,
+ output_name=out_name,
+ source_added_file=self._source_added_file,
+ run_dir=run_dir,
+ ))
+
+ # run command
+ write_file(run_dir / 'si.env', content)
+ cmd = ['si', str(run_dir), '-batch', '-command', 'netlist']
+
+ return cmd, log_file, None, self._bag_work_dir
+
+
+# noinspection PyUnusedLocal
+def all_pass_callback(retcode: int, log_file: str) -> bool:
+ return True
diff --git a/src/bag/virtuoso.py b/src/bag/virtuoso.py
new file mode 100644
index 0000000..3ed2c7e
--- /dev/null
+++ b/src/bag/virtuoso.py
@@ -0,0 +1,153 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module provides functions needed to get Virtuoso to work with BAG.
+"""
+
+import os
+import sys
+import argparse
+
+import bag.interface
+import bag.io
+
+
+def run_skill_server(args):
+ """Run the BAG/Virtuoso server."""
+ error_msg = ''
+ server = None
+ port_file = None
+ port_number = None
+
+ try:
+ # process command line arguments
+ min_port = args.min_port
+ max_port = args.max_port
+ # remove directory from port file name
+ port_file = os.path.basename(args.port_file)
+ log_file = args.log_file
+
+ # create log file directory, and remove old log.
+ if log_file is not None:
+ log_file = os.path.abspath(log_file)
+ log_dir = os.path.dirname(log_file)
+ if not os.path.exists(log_dir):
+ os.makedirs(log_dir)
+ elif os.path.exists(log_file):
+ os.remove(log_file)
+
+ # determine port file name
+ if 'BAG_WORK_DIR' not in os.environ:
+ raise Exception('Environment variable BAG_WORK_DIR not defined')
+ work_dir = os.environ['BAG_WORK_DIR']
+ if not os.path.isdir(work_dir):
+ raise Exception('$BAG_WORK_DIR = %s is not a directory' % work_dir)
+
+ port_file = os.path.join(work_dir, port_file)
+
+ # determine temp directory
+ tmp_dir = None
+ if 'BAG_TEMP_DIR' in os.environ:
+ tmp_dir = os.environ['BAG_TEMP_DIR']
+ if not os.path.isdir(tmp_dir):
+ if os.path.exists(tmp_dir):
+ raise Exception('$BAG_TEMP_DIR = %s is not a directory' % tmp_dir)
+ else:
+ os.makedirs(tmp_dir)
+
+ # attempt to open port and start server
+ router = bag.interface.ZMQRouter(min_port=min_port, max_port=max_port, log_file=log_file)
+ server = bag.interface.SkillServer(router, sys.stdout, sys.stdin, tmpdir=tmp_dir)
+ port_number = router.get_port()
+ except Exception as ex:
+ error_msg = 'bag server process error:\n%s\n' % str(ex)
+
+ if not error_msg:
+ bag.io.write_file(port_file, '%r\n' % port_number)
+
+ # TODO: somehow this is a bug??!! figure it out.
+ # make sure port_file is removed at exit
+ # def exit_handler():
+ # if os.path.exists(port_file):
+ # os.remove(port_file)
+
+ # atexit.register(exit_handler)
+ # signal.signal(signal.SIGTERM, exit_handler)
+
+ try:
+ sys.stdout.write('BAG skill server has started. Yay!\n')
+ sys.stdout.flush()
+ server.run()
+ except Exception as ex:
+ error_msg = 'bag server process error:\n%s\n' % str(ex)
+
+ if error_msg:
+ sys.stderr.write(error_msg)
+ sys.stderr.flush()
+
+
+def parse_command_line_arguments():
+ """Parse command line arguments, then run the corresponding function."""
+
+ desc = 'A Python program that performs tasks for virtuoso.'
+ parser = argparse.ArgumentParser(description=desc)
+ desc = ('Valid commands. Supply -h/--help flag after '
+ 'the command name to learn more about the command.')
+ sub_parsers = parser.add_subparsers(title='Commands', description=desc, help='command name.')
+
+ desc = 'Run BAG skill server.'
+ par2 = sub_parsers.add_parser('run_skill_server', description=desc, help=desc)
+
+ par2.add_argument('min_port', type=int, help='minimum socket port number.')
+ par2.add_argument('max_port', type=int, help='maximum socket port number.')
+ par2.add_argument('port_file', type=str, help='file to write the port number to.')
+ par2.add_argument('log_file', type=str, nargs='?', default=None,
+ help='log file name.')
+ par2.set_defaults(func=run_skill_server)
+
+ args = parser.parse_args()
+ args.func(args)
+
+
+if __name__ == '__main__':
+ parse_command_line_arguments()
diff --git a/src/bag_test/__init__.py b/src/bag_test/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/bag_test/layout/__init__.py b/src/bag_test/layout/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/bag_test/layout/fill.py b/src/bag_test/layout/fill.py
new file mode 100644
index 0000000..9fc600d
--- /dev/null
+++ b/src/bag_test/layout/fill.py
@@ -0,0 +1,277 @@
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This script demonstrates how to add substrate contact in transistor row."""
+
+from typing import Any, Dict
+
+from pybag.enum import RoundMode
+from pybag.core import BBox
+
+from bag.util.immutable import Param
+from bag.layout.template import TemplateDB, TemplateBase
+
+
+class FillEdgeTest(TemplateBase):
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='width.',
+ h='height.',
+ fill_layer='fill layer ID.',
+ )
+
+ def draw_layout(self):
+ w: int = self.params['w']
+ h: int = self.params['h']
+ fill_layer: int = self.params['fill_layer']
+
+ grid = self.grid
+ tech_info = grid.tech_info
+ fill_info = tech_info.get_max_space_fill_info(fill_layer)
+
+ self.set_size_from_bound_box(fill_layer, BBox(0, 0, w, h), round_up=True)
+ bbox = self.bound_box
+
+ tdir = grid.get_direction(fill_layer)
+ pdir = tdir.perpendicular()
+ margin = fill_info.get_margin(pdir)
+ margin_le = fill_info.get_margin(tdir)
+ sp_le = fill_info.get_space(tdir)
+ dim = bbox.get_dim(pdir)
+ dim_le = bbox.get_dim(tdir)
+
+ tidxl = grid.coord_to_track(fill_layer, margin, mode=RoundMode.LESS_EQ)
+ tidxr = grid.coord_to_track(fill_layer, dim - margin, mode=RoundMode.GREATER_EQ)
+ wlen = grid.get_min_cont_length(fill_layer, 1)
+
+ # fill inner
+ self.add_wires(fill_layer, tidxl + 1, margin_le, dim_le - margin_le, num=tidxr - tidxl - 1)
+
+ lower = margin_le
+ self.add_wires(fill_layer, tidxl, lower, lower + wlen)
+ lower += wlen + sp_le
+ self.add_wires(fill_layer, tidxl, lower, lower + wlen)
+ lower += wlen + sp_le + 2
+ self.add_wires(fill_layer, tidxl, lower, lower + wlen)
+ lower += wlen + 2 * sp_le + wlen * 2
+ self.add_wires(fill_layer, tidxl, lower, lower + wlen)
+
+ self.do_max_space_fill(fill_layer, bbox)
+
+
+class FillEdgeCenterTest(TemplateBase):
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ fill_layer='fill layer ID.',
+ )
+
+ def draw_layout(self):
+ fill_layer: int = self.params['fill_layer']
+
+ grid = self.grid
+ tech_info = grid.tech_info
+ fill_info = tech_info.get_max_space_fill_info(fill_layer)
+
+ tdir = grid.get_direction(fill_layer)
+ pdir = tdir.perpendicular()
+ margin = fill_info.get_margin(pdir)
+ margin_le = fill_info.get_margin(tdir)
+ sp_le = fill_info.get_space(tdir)
+
+ blk_arr = grid.get_block_size(fill_layer, half_blk_x=False, half_blk_y=False)
+ dim_q = blk_arr[pdir.value]
+
+ w = (int(round(margin * 1.5)) // dim_q) * dim_q
+ wlen = grid.get_min_cont_length(fill_layer, 1)
+ h = 2 * margin_le + 7 * wlen + 5 * sp_le
+
+ self.set_size_from_bound_box(fill_layer, BBox(0, 0, w, h), round_up=True,
+ half_blk_x=False, half_blk_y=False)
+ bbox = self.bound_box
+
+ dim = bbox.get_dim(pdir)
+
+ tidx0 = grid.find_next_track(fill_layer, dim - margin, mode=RoundMode.LESS)
+ tidx1 = grid.find_next_track(fill_layer, margin, mode=RoundMode.GREATER)
+ tidx_l = tidx0 + 1
+ tidx_r = tidx1 - 1
+
+ lower = margin_le
+ self.add_wires(fill_layer, tidx0, lower, lower + wlen)
+ self.add_wires(fill_layer, tidx1, lower, lower + wlen)
+ lower += wlen + sp_le
+ self.add_wires(fill_layer, tidx1, lower, lower + wlen)
+ lower += wlen + sp_le
+ self.add_wires(fill_layer, tidx0, lower, lower + wlen)
+ lower += 2 * wlen + 2 * sp_le
+ self.add_wires(fill_layer, tidx_l, lower, lower + wlen)
+ lower += 2 * wlen + sp_le
+ self.add_wires(fill_layer, tidx_r, lower, lower + wlen)
+
+ self.do_max_space_fill(fill_layer, bbox)
+
+
+class FillEndTest(TemplateBase):
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='width.',
+ h='height.',
+ fill_layer='fill layer ID.',
+ )
+
+ def draw_layout(self):
+ w: int = self.params['w']
+ h: int = self.params['h']
+ fill_layer: int = self.params['fill_layer']
+
+ grid = self.grid
+ tech_info = grid.tech_info
+ fill_info = tech_info.get_max_space_fill_info(fill_layer)
+
+ self.set_size_from_bound_box(fill_layer, BBox(0, 0, w, h), round_up=True)
+ bbox = self.bound_box
+
+ tdir = grid.get_direction(fill_layer)
+ pdir = tdir.perpendicular()
+ margin = fill_info.get_margin(pdir)
+ margin_le = fill_info.get_margin(tdir)
+ sp_le = fill_info.get_space(tdir)
+ dim = bbox.get_dim(pdir)
+ dim_le = bbox.get_dim(tdir)
+
+ tidxl = grid.coord_to_track(fill_layer, margin, mode=RoundMode.LESS_EQ)
+ tidxr = grid.coord_to_track(fill_layer, dim - margin, mode=RoundMode.GREATER_EQ)
+ tidx_end = grid.coord_to_track(fill_layer, dim, mode=RoundMode.LESS)
+
+ wlen = grid.get_min_cont_length(fill_layer, 1)
+
+ # fill inner and transverse edges
+ gap = (margin_le + wlen) // 2 + sp_le
+ self.add_wires(fill_layer, tidxl, gap, dim_le - gap, num=tidxr - tidxl + 1)
+ self.add_wires(fill_layer, 0, margin_le, dim_le - margin_le)
+ self.add_wires(fill_layer, tidx_end, margin_le, dim_le - margin_le)
+
+ self.do_max_space_fill(fill_layer, bbox)
+
+
+class FillCenterTest(TemplateBase):
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='width.',
+ h='height.',
+ fill_layer='fill layer ID.',
+ )
+
+ def draw_layout(self):
+ w: int = self.params['w']
+ h: int = self.params['h']
+ fill_layer: int = self.params['fill_layer']
+
+ grid = self.grid
+ tech_info = grid.tech_info
+ fill_info = tech_info.get_max_space_fill_info(fill_layer)
+
+ self.set_size_from_bound_box(fill_layer, BBox(0, 0, w, h), round_up=True)
+ bbox = self.bound_box
+
+ tdir = grid.get_direction(fill_layer)
+ pdir = tdir.perpendicular()
+ margin = fill_info.get_margin(pdir)
+ margin_le = fill_info.get_margin(tdir)
+ dim = bbox.get_dim(pdir)
+ dim_le = bbox.get_dim(tdir)
+
+ wlen = grid.get_min_cont_length(fill_layer, 1)
+
+ # fill edges and ends
+ tidxl = grid.coord_to_track(fill_layer, margin, mode=RoundMode.LESS_EQ, even=True)
+ tidxr = grid.coord_to_track(fill_layer, dim - margin, mode=RoundMode.GREATER_EQ, even=True)
+
+ tcoord_u = dim_le - margin_le
+ num = tidxr - tidxl - 1
+ self.add_wires(fill_layer, tidxl, margin_le, tcoord_u, num=2, pitch=tidxr - tidxl)
+ self.add_wires(fill_layer, tidxl + 1, margin_le, margin_le + wlen, num=num, pitch=1)
+ self.add_wires(fill_layer, tidxl + 1, tcoord_u - wlen, tcoord_u, num=num, pitch=1)
+
+ self.do_max_space_fill(fill_layer, bbox)
+
+
+class FillCenterTest2(TemplateBase):
+
+ def __init__(self, temp_db: TemplateDB, params: Param, **kwargs: Any) -> None:
+ TemplateBase.__init__(self, temp_db, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ return dict(
+ w='width.',
+ h='height.',
+ fill_layer='fill layer ID.',
+ )
+
+ def draw_layout(self):
+ w: int = self.params['w']
+ h: int = self.params['h']
+ fill_layer: int = self.params['fill_layer']
+
+ grid = self.grid
+ tech_info = grid.tech_info
+ fill_info = tech_info.get_max_space_fill_info(fill_layer)
+
+ self.set_size_from_bound_box(fill_layer, BBox(0, 0, w, h), round_up=True)
+ bbox = self.bound_box
+
+ tdir = grid.get_direction(fill_layer)
+ pdir = tdir.perpendicular()
+ margin = fill_info.get_margin(pdir)
+ margin_le = fill_info.get_margin(tdir)
+ sp_le = fill_info.get_space(tdir)
+ dim = bbox.get_dim(pdir)
+ dim_le = bbox.get_dim(tdir)
+
+ tidxl = grid.coord_to_track(fill_layer, margin, mode=RoundMode.LESS_EQ)
+ tidxr = grid.coord_to_track(fill_layer, dim - margin, mode=RoundMode.GREATER_EQ)
+ tidx_end = grid.coord_to_track(fill_layer, dim, mode=RoundMode.LESS)
+
+ wlen = grid.get_min_cont_length(fill_layer, 1)
+
+ # fill inner and transverse edges
+ gap = margin_le + wlen + sp_le
+ with open('debug.txt', 'w') as f:
+ print(margin_le, wlen, sp_le, dim_le, file=f)
+ self.add_wires(fill_layer, tidxl, gap, dim_le - gap, num=tidxr - tidxl + 1)
+ self.add_wires(fill_layer, 0, margin_le, dim_le - margin_le)
+ self.add_wires(fill_layer, tidx_end, margin_le, dim_le - margin_le)
+
+ self.do_max_space_fill(fill_layer, bbox)
diff --git a/src/bag_test/schematic/__init__.py b/src/bag_test/schematic/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/bag_test/schematic/net_bus.py b/src/bag_test/schematic/net_bus.py
new file mode 100644
index 0000000..52a7156
--- /dev/null
+++ b/src/bag_test/schematic/net_bus.py
@@ -0,0 +1,64 @@
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, Any
+
+import os
+import pkg_resources
+
+from bag.design.module import Module
+from bag.design.database import ModuleDB
+from bag.util.immutable import Param
+
+
+# noinspection PyPep8Naming
+class bag_test__net_bus(Module):
+ """Module for library bag_test cell net_bus.
+
+ Fill in high level description here.
+ """
+
+ yaml_file = pkg_resources.resource_filename(__name__,
+ os.path.join('netlist_info',
+ 'net_bus.yaml'))
+
+ def __init__(self, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, self.yaml_file, database, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ """Returns a dictionary from parameter names to descriptions.
+
+ Returns
+ -------
+ param_info : Optional[Dict[str, str]]
+ dictionary from parameter names to descriptions.
+ """
+ return dict(
+ mult='number of bits in the intermediate bus'
+ )
+
+ def design(self, mult: int) -> None:
+
+ self.instances['X0'].design(mult=1)
+ self.instances['X1'].design(mult=mult)
+
+ if mult > 1:
+ bus_name = f'<{mult - 1}:0>'
+ mid_name = f'mid{bus_name}'
+
+ self.rename_pin('mid', mid_name)
+
+ self.rename_instance('X0', f'X0{bus_name}', [('vout', mid_name)])
+ self.reconnect_instance_terminal('X1', f'vin{bus_name}', mid_name)
diff --git a/src/bag_test/schematic/netlist_info/net_bus.symbol.yaml b/src/bag_test/schematic/netlist_info/net_bus.symbol.yaml
new file mode 100644
index 0000000..255c627
--- /dev/null
+++ b/src/bag_test/schematic/netlist_info/net_bus.symbol.yaml
@@ -0,0 +1,298 @@
+lib_name: bag_test
+cell_name: net_bus
+view_name: symbol
+bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ mid:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -44
+ - 284
+ - -36
+ stype: 0
+ ttype: 1
+ vin:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: "[@instanceName]"
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: "[@partName]"
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - -40
+ -
+ - 240
+ - -40
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - -40
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: mid
+instances:
+ {}
+props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1559712693
+ partName:
+ - 3
+ - net_bus
+ pin#:
+ - 0
+ - 5
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("mid" "vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 99
+ _dbvCvTimeStamp:
+ - 0
+ - 99
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/src/bag_test/schematic/netlist_info/net_bus.yaml b/src/bag_test/schematic/netlist_info/net_bus.yaml
new file mode 100644
index 0000000..803d203
--- /dev/null
+++ b/src/bag_test/schematic/netlist_info/net_bus.yaml
@@ -0,0 +1,479 @@
+lib_name: bag_test
+cell_name: net_bus
+view_name: schematic
+bbox:
+ - -220
+ - -600
+ - 768
+ - -210
+terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -220
+ - R0
+ bbox:
+ - -217
+ - -246
+ - -160
+ - -210
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -220
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -240
+ - R0
+ bbox:
+ - -217
+ - -266
+ - -160
+ - -230
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -240
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ mid:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - -120
+ - -320
+ - R0
+ bbox:
+ - -120
+ - -346
+ - -63
+ - -310
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -95
+ - -320
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ vin:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -280
+ - R0
+ bbox:
+ - -217
+ - -306
+ - -160
+ - -270
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -280
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - -120
+ - -280
+ - R0
+ bbox:
+ - -120
+ - -306
+ - -63
+ - -270
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -95
+ - -280
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+shapes:
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 140
+ - -600
+ -
+ - 140
+ - -560
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 133
+ - -564
+ alignment: 8
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 140
+ - -370
+ -
+ - 140
+ - -330
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 133
+ - -366
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: mid
+ origin:
+ - 340
+ - -453
+ alignment: 5
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: mid
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - -40
+ - -460
+ -
+ - 0
+ - -460
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - -4
+ - -453
+ alignment: 8
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 540
+ - -600
+ -
+ - 540
+ - -560
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 533
+ - -564
+ alignment: 8
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 540
+ - -370
+ -
+ - 540
+ - -330
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 533
+ - -366
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 680
+ - -460
+ -
+ - 720
+ - -460
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 684
+ - -453
+ alignment: 2
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: mid
+ points:
+ -
+ - 280
+ - -460
+ -
+ - 400
+ - -460
+instances:
+ X0:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ xform:
+ - 0
+ - -460
+ - R0
+ bbox:
+ - -4
+ - -564
+ - 368
+ - -366
+ connections:
+ VDD: VDD
+ VSS: VSS
+ vin: vin
+ vout: mid
+ params:
+ {}
+ is_primitive: false
+ X1:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ xform:
+ - 400
+ - -460
+ - R0
+ bbox:
+ - 396
+ - -564
+ - 768
+ - -366
+ connections:
+ VDD: VDD
+ VSS: VSS
+ vin: mid
+ vout: vout
+ params:
+ {}
+ is_primitive: false
+props:
+ connectivityLastUpdated:
+ - 0
+ - 1113
+ instance#:
+ - 0
+ - 2
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1559712708
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 5
+ schGeometryLastUpdated:
+ - 0
+ - 1113
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 1113
+ _dbvCvTimeStamp:
+ - 0
+ - 1113
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/src/bag_test/schematic/netlist_info/pin_array_0.symbol.yaml b/src/bag_test/schematic/netlist_info/pin_array_0.symbol.yaml
new file mode 100644
index 0000000..bda113f
--- /dev/null
+++ b/src/bag_test/schematic/netlist_info/pin_array_0.symbol.yaml
@@ -0,0 +1,257 @@
+lib_name: bag_test
+cell_name: pin_array_0
+view_name: symbol
+bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ vin:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: "[@instanceName]"
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: "[@partName]"
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+instances:
+ {}
+props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1557496184
+ partName:
+ - 3
+ - pin_array_0
+ pin#:
+ - 0
+ - 4
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 71
+ _dbvCvTimeStamp:
+ - 0
+ - 71
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/src/bag_test/schematic/netlist_info/pin_array_0.yaml b/src/bag_test/schematic/netlist_info/pin_array_0.yaml
new file mode 100644
index 0000000..37e3395
--- /dev/null
+++ b/src/bag_test/schematic/netlist_info/pin_array_0.yaml
@@ -0,0 +1,386 @@
+lib_name: bag_test
+cell_name: pin_array_0
+view_name: schematic
+bbox:
+ - -80
+ - -730
+ - 210
+ - -490
+terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -500
+ - R0
+ bbox:
+ - -77
+ - -526
+ - -20
+ - -490
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -500
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -520
+ - R0
+ bbox:
+ - -77
+ - -546
+ - -20
+ - -510
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -520
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ vin:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -560
+ - R0
+ bbox:
+ - -77
+ - -586
+ - -20
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -560
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - 20
+ - -560
+ - R0
+ bbox:
+ - 20
+ - -586
+ - 77
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 45
+ - -560
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+shapes:
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 193
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 200
+ - -680
+ -
+ - 200
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - 140
+ - -680
+ -
+ - 140
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 73
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 20
+ - -680
+ -
+ - 20
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 80
+ - -680
+ -
+ - 80
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 13
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - 133
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+instances:
+ XIN:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 140
+ - -680
+ - R0
+ bbox:
+ - 130
+ - -730
+ - 150
+ - -676
+ connections:
+ noConn: vin
+ params:
+ {}
+ is_primitive: true
+ XOUT:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 200
+ - -680
+ - R0
+ bbox:
+ - 190
+ - -730
+ - 210
+ - -676
+ connections:
+ noConn: vout
+ params:
+ {}
+ is_primitive: true
+ XVDD:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 20
+ - -680
+ - R0
+ bbox:
+ - 10
+ - -730
+ - 30
+ - -676
+ connections:
+ noConn: VDD
+ params:
+ {}
+ is_primitive: true
+ XVSS:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 80
+ - -680
+ - R0
+ bbox:
+ - 70
+ - -730
+ - 90
+ - -676
+ connections:
+ noConn: VSS
+ params:
+ {}
+ is_primitive: true
+props:
+ connectivityLastUpdated:
+ - 0
+ - 743
+ instance#:
+ - 0
+ - 4
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1557499961
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 4
+ schGeometryLastUpdated:
+ - 0
+ - 743
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 743
+ _dbvCvTimeStamp:
+ - 0
+ - 743
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/src/bag_test/schematic/pin_array_0.py b/src/bag_test/schematic/pin_array_0.py
new file mode 100644
index 0000000..b96857a
--- /dev/null
+++ b/src/bag_test/schematic/pin_array_0.py
@@ -0,0 +1,57 @@
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, Any
+
+import os
+import pkg_resources
+
+from bag.design.module import Module
+from bag.design.database import ModuleDB
+from bag.util.immutable import Param
+
+
+# noinspection PyPep8Naming
+class bag_test__pin_array_0(Module):
+ """Module for library bag_test cell pin_array_0.
+
+ Fill in high level description here.
+ """
+
+ yaml_file = pkg_resources.resource_filename(__name__,
+ os.path.join('netlist_info',
+ 'pin_array_0.yaml'))
+
+ def __init__(self, database: ModuleDB, params: Param, **kwargs: Any) -> None:
+ Module.__init__(self, self.yaml_file, database, params, **kwargs)
+
+ @classmethod
+ def get_params_info(cls) -> Dict[str, str]:
+ """Returns a dictionary from parameter names to descriptions.
+
+ Returns
+ -------
+ param_info : Optional[Dict[str, str]]
+ dictionary from parameter names to descriptions.
+ """
+ return dict(
+ mult='number of pins in parallel',
+ )
+
+ def design(self, mult: int) -> None:
+ if mult > 1:
+ bus_name = f'<{mult - 1}:0>'
+ in_name = f'vin{bus_name}'
+ self.rename_pin('vin', in_name)
+ self.rename_instance('XIN', f'XIN{bus_name}', [('noConn', in_name)])
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..5573caf
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,14 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..b1bdd13
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,56 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from bag.env import create_tech_info, create_routing_grid
+
+
+@pytest.fixture(scope='session')
+def tech_info():
+ return create_tech_info()
+
+
+@pytest.fixture(scope='session')
+def routing_grid(tech_info):
+ return create_routing_grid(tech_info=tech_info)
diff --git a/tests/io/__init__.py b/tests/io/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/io/test_file.py b/tests/io/test_file.py
new file mode 100644
index 0000000..2876241
--- /dev/null
+++ b/tests/io/test_file.py
@@ -0,0 +1,64 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from bag.io.file import make_temp_dir
+
+
+def test_make_temp_dir():
+ """Check that make_temp_dir creates a temporary directory."""
+ # check prefix is correct
+ prefix = 'foobar'
+ dirname = make_temp_dir(prefix)
+ assert os.path.basename(dirname).startswith(prefix)
+ assert os.path.isdir(dirname)
+
+ # check make_temp_dir will create parent directory if it's not there
+ tmp_p1 = make_temp_dir('tmp_parent')
+ parent = os.path.join(tmp_p1, 'parent')
+ assert not os.path.isdir(parent)
+ dirname = make_temp_dir(prefix, parent_dir=parent)
+ assert os.path.isdir(parent)
+ assert os.path.basename(dirname).startswith(prefix)
+ assert os.path.isdir(dirname)
diff --git a/tests/io/test_string.py b/tests/io/test_string.py
new file mode 100644
index 0000000..547053e
--- /dev/null
+++ b/tests/io/test_string.py
@@ -0,0 +1,55 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from bag.io.string import to_yaml_str
+
+
+@pytest.mark.parametrize("arg, expect", [
+ ([1, 2], '[1, 2]\n'),
+ ({'a': 3, 'b': 'hi'}, '{a: 3, b: hi}\n'),
+])
+def test_to_yaml_str(arg, expect):
+ """Check that to_yaml_str() converts Python objects to YAML string correctly."""
+ assert to_yaml_str(arg) == expect
diff --git a/tests/layout/__init__.py b/tests/layout/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/layout/conftest.py b/tests/layout/conftest.py
new file mode 100644
index 0000000..3dbc26c
--- /dev/null
+++ b/tests/layout/conftest.py
@@ -0,0 +1,51 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from bag.layout.template import TemplateDB
+
+
+@pytest.fixture
+def temp_db(routing_grid):
+ return TemplateDB(routing_grid, 'PYTEST')
diff --git a/tests/layout/routing/__init__.py b/tests/layout/routing/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/layout/routing/test_fill.py b/tests/layout/routing/test_fill.py
new file mode 100644
index 0000000..087a87a
--- /dev/null
+++ b/tests/layout/routing/test_fill.py
@@ -0,0 +1,266 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from bag.layout.routing.fill import fill_symmetric_helper
+
+
+def check_disjoint_union(outer_list, inner_list, start, stop):
+ # test outer list has 1 more element than inner list
+ assert len(outer_list) == len(inner_list) + 1
+
+ sintv, eintv = outer_list[0], outer_list[-1]
+ if inner_list:
+ # test outer list covers more range than inner list
+ assert sintv[0] <= inner_list[0][0] and eintv[1] >= inner_list[-1][1]
+ # test outer list touches both boundaries
+ assert sintv[0] == start and eintv[1] == stop
+
+ # test intervals are disjoint and union is equal to given interval
+ for idx in range(len(outer_list)):
+ intv1 = outer_list[idx]
+ # test interval is non-negative
+ assert intv1[0] <= intv1[1]
+ if idx < len(inner_list):
+ intv2 = inner_list[idx]
+ # test interval is non-negative
+ assert intv2[0] <= intv2[1]
+ # test interval abuts
+ assert intv1[1] == intv2[0]
+ assert intv2[1] == outer_list[idx + 1][0]
+
+
+def check_symmetric(intv_list, start, stop):
+ # test given interval list is symmetric
+ flip_list = [(stop + start - b, stop + start - a) for a, b in reversed(intv_list)]
+ for i1, i2 in zip(intv_list, flip_list):
+ assert i1[0] == i2[0] and i1[1] == i2[1]
+
+
+def check_props(fill_list, space_list, num_diff_sp1, num_diff_sp2, n, tot_intv, inc_sp, sp,
+ eq_sp_parity, num_diff_sp_max, num_fill, fill_first, start, stop, n_flen_max,
+ sp_edge_tweak=False):
+ # check num_diff_sp is the same
+ assert num_diff_sp1 == num_diff_sp2
+ if n % 2 == eq_sp_parity and not sp_edge_tweak:
+ # check all spaces are the same
+ assert num_diff_sp1 == 0
+ else:
+ # check num_diff_sp is less than or equal to 1
+ assert num_diff_sp1 <= num_diff_sp_max
+ # test we get correct number of fill
+ assert len(fill_list) == num_fill
+ # test fill and space are disjoint and union is correct
+ if fill_first:
+ check_disjoint_union(fill_list, space_list, start, stop)
+ else:
+ check_disjoint_union(space_list, fill_list, start, stop)
+ # check symmetry
+ check_symmetric(fill_list, tot_intv[0], tot_intv[1])
+ check_symmetric(space_list, tot_intv[0], tot_intv[1])
+ # check fill has only two lengths, and they differ by 1
+ len_list = sorted(set((b - a) for a, b in fill_list))
+ assert len(len_list) <= n_flen_max
+ assert (len_list[-1] - len_list[0]) <= n_flen_max - 1
+
+ if space_list:
+ # check space has only two lengths, and they differ by 1
+ len_list = sorted(set((b - a) for a, b in space_list))
+ assert len(len_list) <= (2 if num_diff_sp1 > 0 else 1)
+ assert (len_list[-1] - len_list[0]) <= 1
+ # check that space is the right values
+ if len(len_list) == 1:
+ # if only one space, check that it is sp + inc only if num_diff_sp > 0
+ if num_diff_sp1 > 0:
+ sp_correct = sp + 1 if inc_sp else sp - 1
+ else:
+ sp_correct = sp
+ assert len_list[0] == sp_correct
+ else:
+ # check it has space sp and sp + inc_sp
+ if inc_sp:
+ assert len_list[0] == sp
+ else:
+ assert len_list[-1] == sp
+
+
+@pytest.mark.parametrize('sp', [3, 4, 5])
+@pytest.mark.parametrize('inc_sp', [True, False])
+@pytest.mark.parametrize('offset', [0, 4, 7])
+@pytest.mark.parametrize('foe', [True, False])
+def test_fill_symmetric_non_cyclic(sp, inc_sp, offset, foe):
+ # test fill symmetric for non-cyclic
+ area_max = 50
+ for area in range(sp + 1, area_max + 1):
+ tot_intv = offset, offset + area
+ for nfill in range(1, area - sp + 1):
+ nsp = nfill - 1 if foe else nfill + 1
+ # compute minimum possible footprint
+ if nfill % 2 == 1 or inc_sp:
+ # minimum possible footprint
+ min_footprint = nfill * 1 + nsp * sp
+ else:
+ # if we have even fill and we can decrease space, then decrease middle space by 1
+ min_footprint = nfill * 1 + nsp * sp - 1
+ if min_footprint > area:
+ # test exception when drawing with no solution
+ # we have no solution when minimum possible footprint > area
+ with pytest.raises(ValueError):
+ fill_symmetric_helper(area, nfill, sp, offset=offset, inc_sp=inc_sp,
+ invert=False, fill_on_edge=foe, cyclic=False)
+ with pytest.raises(ValueError):
+ fill_symmetric_helper(area, nfill, sp, offset=offset, inc_sp=inc_sp,
+ invert=True, fill_on_edge=foe, cyclic=False)
+ else:
+ # get fill and space list
+ fill_list, num_diff_sp1 = fill_symmetric_helper(area, nfill, sp, offset=offset,
+ inc_sp=inc_sp,
+ invert=False, fill_on_edge=foe,
+ cyclic=False)
+ space_list, num_diff_sp2 = fill_symmetric_helper(area, nfill, sp, offset=offset,
+ inc_sp=inc_sp,
+ invert=True, fill_on_edge=foe,
+ cyclic=False)
+
+ check_props(fill_list, space_list, num_diff_sp1, num_diff_sp2, nfill, tot_intv,
+ inc_sp, sp,
+ 1, 1, nfill, foe, tot_intv[0], tot_intv[1], 2)
+
+
+@pytest.mark.parametrize('sp', [3, 4, 5])
+@pytest.mark.parametrize('inc_sp', [True, False])
+@pytest.mark.parametrize('offset', [0, 4, 7])
+def test_fill_symmetric_cyclic_edge_fill(sp, inc_sp, offset):
+ # test fill symmetric for cyclic, fill on edge
+ area_max = 50
+ for area in range(sp + 1, area_max + 1):
+ tot_intv = offset, offset + area
+ for nfill in range(1, area - sp + 1):
+ nsp = nfill
+ if nfill % 2 == 0 or inc_sp:
+ # minimum possible footprint. Edge fill block must be even (hence the + 1)
+ min_footprint = nfill * 1 + 1 + nsp * sp
+ else:
+ # if we have odd fill and we can decrease space, then decrease middle space by 1
+ min_footprint = nfill * 1 + 1 + nsp * sp - 1
+ if min_footprint > area:
+ # test exception when drawing with no solution
+ # we have no solution when minimum possible footprint > area
+ with pytest.raises(ValueError):
+ fill_symmetric_helper(area, nfill, sp, offset=offset, inc_sp=inc_sp,
+ invert=False, fill_on_edge=True, cyclic=True)
+ with pytest.raises(ValueError):
+ fill_symmetric_helper(area, nfill, sp, offset=offset, inc_sp=inc_sp,
+ invert=True, fill_on_edge=True, cyclic=True)
+ else:
+ # get fill and space list
+ fill_list, num_diff_sp1 = fill_symmetric_helper(area, nfill, sp, offset=offset,
+ inc_sp=inc_sp,
+ invert=False, fill_on_edge=True,
+ cyclic=True)
+ space_list, num_diff_sp2 = fill_symmetric_helper(area, nfill, sp, offset=offset,
+ inc_sp=inc_sp,
+ invert=True, fill_on_edge=True,
+ cyclic=True)
+ # test boundary fills centers on edge
+ sintv, eintv = fill_list[0], fill_list[-1]
+ assert (sintv[1] + sintv[0]) % 2 == 0 and (eintv[1] + eintv[0]) % 2 == 0
+ assert ((sintv[1] + sintv[0]) // 2 == tot_intv[0] and
+ (eintv[1] + eintv[0]) // 2 == tot_intv[1])
+ # test other properties
+ check_props(fill_list, space_list, num_diff_sp1, num_diff_sp2, nfill, tot_intv,
+ inc_sp, sp,
+ 0, 1, nfill + 1, True, sintv[0], eintv[1], 3)
+
+
+@pytest.mark.parametrize('sp', [3, 4, 5])
+@pytest.mark.parametrize('inc_sp', [True, False])
+@pytest.mark.parametrize('offset', [0, 4, 7])
+def test_fill_symmetric_cyclic_edge_space(sp, inc_sp, offset):
+ # test fill symmetric for cyclic, space on edge
+ area_max = 50
+ for area in range(sp + 1, area_max + 1):
+ tot_intv = offset, offset + area
+ for nfill in range(1, area - sp + 1):
+ nsp = nfill
+ adj_sp = 1 if inc_sp else -1
+ sp_edge_tweak = sp % 2 == 1
+ if sp_edge_tweak:
+ # minimum possible footprint. Edge space block must be even (hence the + adj_sp)
+ min_footprint = nfill * 1 + nsp * sp + adj_sp
+ else:
+ min_footprint = nfill * 1 + nsp * sp
+ if nfill % 2 == 0 and not inc_sp:
+ # if we have middle space block, we can subtract one more from middle.
+ min_footprint -= 1
+ if min_footprint > area:
+ # test exception when drawing with no solution
+ # we have no solution when minimum possible footprint > area
+ with pytest.raises(ValueError):
+ fill_symmetric_helper(area, nfill, sp, offset=offset, inc_sp=inc_sp,
+ invert=False, fill_on_edge=False, cyclic=True)
+ print(area, nfill, sp, inc_sp)
+ with pytest.raises(ValueError):
+ fill_symmetric_helper(area, nfill, sp, offset=offset, inc_sp=inc_sp,
+ invert=True, fill_on_edge=False, cyclic=True)
+ else:
+ # get fill and space list
+ fill_list, num_diff_sp1 = fill_symmetric_helper(area, nfill, sp, offset=offset,
+ inc_sp=inc_sp,
+ invert=False, fill_on_edge=False,
+ cyclic=True)
+ space_list, num_diff_sp2 = fill_symmetric_helper(area, nfill, sp, offset=offset,
+ inc_sp=inc_sp,
+ invert=True, fill_on_edge=False,
+ cyclic=True)
+
+ # test boundary space centers on edge
+ sintv, eintv = space_list[0], space_list[-1]
+ assert (sintv[1] + sintv[0]) % 2 == 0 and (eintv[1] + eintv[0]) % 2 == 0
+ assert ((sintv[1] + sintv[0]) // 2 == tot_intv[0] and
+ (eintv[1] + eintv[0]) // 2 == tot_intv[1])
+ # test other properties
+ check_props(fill_list, space_list, num_diff_sp1, num_diff_sp2, nfill, tot_intv,
+ inc_sp, sp,
+ 1, 2, nfill, False, sintv[0], eintv[1], 2, sp_edge_tweak)
diff --git a/tests/layout/routing/test_grid.py b/tests/layout/routing/test_grid.py
new file mode 100644
index 0000000..8a6e080
--- /dev/null
+++ b/tests/layout/routing/test_grid.py
@@ -0,0 +1,62 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Union
+
+import pytest
+
+from pybag.enum import RoundMode
+
+from bag.util.math import HalfInt
+from bag.layout.routing import RoutingGrid
+
+
+@pytest.mark.parametrize("lay, coord, w_ntr, mode, half_track, expect", [
+ (4, 720, 1, RoundMode.LESS_EQ, True, HalfInt(10)),
+])
+def test_find_next_track(routing_grid: RoutingGrid, lay: int, coord: int, w_ntr: int,
+ mode: Union[RoundMode, int], half_track: bool, expect: HalfInt) -> None:
+ """Check that find_next_htr() works properly."""
+ ans = routing_grid.find_next_track(lay, coord, tr_width=w_ntr, half_track=half_track, mode=mode)
+ assert ans == expect
+ assert isinstance(ans, HalfInt)
diff --git a/tests/util/__init__.py b/tests/util/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/util/test_param.py b/tests/util/test_param.py
new file mode 100644
index 0000000..494c1c3
--- /dev/null
+++ b/tests/util/test_param.py
@@ -0,0 +1,61 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from bag.util.immutable import Param
+
+
+@pytest.mark.parametrize("val", [
+ 2,
+ 3.5,
+ 'hi',
+ (1, 2, 'bye'),
+ [1, 2, 'foo'],
+ {1: 'fi', 3.5: 'bar'},
+ (1, [1, 2], 'lol'),
+])
+def test_get_hash(val):
+ """Check that get_hash() works properly. on supported datatypes"""
+ ans = Param.get_hash(val)
+ assert isinstance(ans, int)
diff --git a/tests_data/bag_test/layout/50_fill_edge/out.gds b/tests_data/bag_test/layout/50_fill_edge/out.gds
new file mode 100644
index 0000000..370c081
Binary files /dev/null and b/tests_data/bag_test/layout/50_fill_edge/out.gds differ
diff --git a/tests_data/bag_test/layout/50_fill_edge/params.yaml b/tests_data/bag_test/layout/50_fill_edge/params.yaml
new file mode 100644
index 0000000..22c7455
--- /dev/null
+++ b/tests_data/bag_test/layout/50_fill_edge/params.yaml
@@ -0,0 +1,6 @@
+module: bag_test.layout.fill
+class: FillEdgeTest
+params:
+ w: 12000
+ h: 15000
+ fill_layer: 3
diff --git a/tests_data/bag_test/layout/51_fill_edge_center/out.gds b/tests_data/bag_test/layout/51_fill_edge_center/out.gds
new file mode 100644
index 0000000..3b1291f
Binary files /dev/null and b/tests_data/bag_test/layout/51_fill_edge_center/out.gds differ
diff --git a/tests_data/bag_test/layout/51_fill_edge_center/params.yaml b/tests_data/bag_test/layout/51_fill_edge_center/params.yaml
new file mode 100644
index 0000000..93a578c
--- /dev/null
+++ b/tests_data/bag_test/layout/51_fill_edge_center/params.yaml
@@ -0,0 +1,4 @@
+module: bag_test.layout.fill
+class: FillEdgeCenterTest
+params:
+ fill_layer: 3
diff --git a/tests_data/bag_test/layout/52_fill_end/out.gds b/tests_data/bag_test/layout/52_fill_end/out.gds
new file mode 100644
index 0000000..6f73124
Binary files /dev/null and b/tests_data/bag_test/layout/52_fill_end/out.gds differ
diff --git a/tests_data/bag_test/layout/52_fill_end/params.yaml b/tests_data/bag_test/layout/52_fill_end/params.yaml
new file mode 100644
index 0000000..5a9368d
--- /dev/null
+++ b/tests_data/bag_test/layout/52_fill_end/params.yaml
@@ -0,0 +1,6 @@
+module: bag_test.layout.fill
+class: FillEndTest
+params:
+ w: 12000
+ h: 15000
+ fill_layer: 3
diff --git a/tests_data/bag_test/layout/53_fill_center/out.gds b/tests_data/bag_test/layout/53_fill_center/out.gds
new file mode 100644
index 0000000..4e2cf96
Binary files /dev/null and b/tests_data/bag_test/layout/53_fill_center/out.gds differ
diff --git a/tests_data/bag_test/layout/53_fill_center/params.yaml b/tests_data/bag_test/layout/53_fill_center/params.yaml
new file mode 100644
index 0000000..9640a78
--- /dev/null
+++ b/tests_data/bag_test/layout/53_fill_center/params.yaml
@@ -0,0 +1,6 @@
+module: bag_test.layout.fill
+class: FillCenterTest
+params:
+ w: 12000
+ h: 15000
+ fill_layer: 3
diff --git a/tests_data/bag_test/layout/54_fill_center2/out.gds b/tests_data/bag_test/layout/54_fill_center2/out.gds
new file mode 100644
index 0000000..deb5683
Binary files /dev/null and b/tests_data/bag_test/layout/54_fill_center2/out.gds differ
diff --git a/tests_data/bag_test/layout/54_fill_center2/params.yaml b/tests_data/bag_test/layout/54_fill_center2/params.yaml
new file mode 100644
index 0000000..924501d
--- /dev/null
+++ b/tests_data/bag_test/layout/54_fill_center2/params.yaml
@@ -0,0 +1,6 @@
+module: bag_test.layout.fill
+class: FillCenterTest2
+params:
+ w: 12000
+ h: 15000
+ fill_layer: 3
diff --git a/tests_data/bag_test/schematic/net_bus_00/out.yaml b/tests_data/bag_test/schematic/net_bus_00/out.yaml
new file mode 100644
index 0000000..c192142
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/out.yaml
@@ -0,0 +1,2063 @@
+pin_array_0_1:
+ lib_name: PYTEST
+ cell_name: pin_array_0_1
+ view_name: schematic
+ bbox:
+ - -80
+ - -730
+ - 210
+ - -490
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -500
+ - R0
+ bbox:
+ - -77
+ - -526
+ - -20
+ - -490
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -500
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -520
+ - R0
+ bbox:
+ - -77
+ - -546
+ - -20
+ - -510
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -520
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ vin<3:0>:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -560
+ - R0
+ bbox:
+ - -77
+ - -586
+ - -20
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -560
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - 20
+ - -560
+ - R0
+ bbox:
+ - 20
+ - -586
+ - 77
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 45
+ - -560
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 193
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 200
+ - -680
+ -
+ - 200
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - 140
+ - -680
+ -
+ - 140
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 73
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 20
+ - -680
+ -
+ - 20
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 80
+ - -680
+ -
+ - 80
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 13
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - 133
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ instances:
+ XIN<3:0>:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 140
+ - -680
+ - R0
+ bbox:
+ - 130
+ - -730
+ - 150
+ - -676
+ connections:
+ noConn: vin<3:0>
+ params:
+ {}
+ is_primitive: true
+ XOUT:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 200
+ - -680
+ - R0
+ bbox:
+ - 190
+ - -730
+ - 210
+ - -676
+ connections:
+ noConn: vout
+ params:
+ {}
+ is_primitive: true
+ XVDD:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 20
+ - -680
+ - R0
+ bbox:
+ - 10
+ - -730
+ - 30
+ - -676
+ connections:
+ noConn: VDD
+ params:
+ {}
+ is_primitive: true
+ XVSS:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 80
+ - -680
+ - R0
+ bbox:
+ - 70
+ - -730
+ - 90
+ - -676
+ connections:
+ noConn: VSS
+ params:
+ {}
+ is_primitive: true
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 743
+ instance#:
+ - 0
+ - 4
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1557499961
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 4
+ schGeometryLastUpdated:
+ - 0
+ - 743
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 743
+ _dbvCvTimeStamp:
+ - 0
+ - 743
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0_1__symbol:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ vin<3:0>:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin<3:0>
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1557496184
+ partName:
+ - 3
+ - pin_array_0
+ pin#:
+ - 0
+ - 4
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 71
+ _dbvCvTimeStamp:
+ - 0
+ - 71
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0:
+ lib_name: PYTEST
+ cell_name: pin_array_0
+ view_name: schematic
+ bbox:
+ - -80
+ - -730
+ - 210
+ - -490
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -500
+ - R0
+ bbox:
+ - -77
+ - -526
+ - -20
+ - -490
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -500
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -520
+ - R0
+ bbox:
+ - -77
+ - -546
+ - -20
+ - -510
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -520
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ vin:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -560
+ - R0
+ bbox:
+ - -77
+ - -586
+ - -20
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -560
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - 20
+ - -560
+ - R0
+ bbox:
+ - 20
+ - -586
+ - 77
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 45
+ - -560
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 193
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 200
+ - -680
+ -
+ - 200
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - 140
+ - -680
+ -
+ - 140
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 73
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 20
+ - -680
+ -
+ - 20
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 80
+ - -680
+ -
+ - 80
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 13
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - 133
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ instances:
+ XIN:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 140
+ - -680
+ - R0
+ bbox:
+ - 130
+ - -730
+ - 150
+ - -676
+ connections:
+ noConn: vin
+ params:
+ {}
+ is_primitive: true
+ XOUT:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 200
+ - -680
+ - R0
+ bbox:
+ - 190
+ - -730
+ - 210
+ - -676
+ connections:
+ noConn: vout
+ params:
+ {}
+ is_primitive: true
+ XVDD:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 20
+ - -680
+ - R0
+ bbox:
+ - 10
+ - -730
+ - 30
+ - -676
+ connections:
+ noConn: VDD
+ params:
+ {}
+ is_primitive: true
+ XVSS:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 80
+ - -680
+ - R0
+ bbox:
+ - 70
+ - -730
+ - 90
+ - -676
+ connections:
+ noConn: VSS
+ params:
+ {}
+ is_primitive: true
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 743
+ instance#:
+ - 0
+ - 4
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1557499961
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 4
+ schGeometryLastUpdated:
+ - 0
+ - 743
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 743
+ _dbvCvTimeStamp:
+ - 0
+ - 743
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0__symbol:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ vin:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1557496184
+ partName:
+ - 3
+ - pin_array_0
+ pin#:
+ - 0
+ - 4
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 71
+ _dbvCvTimeStamp:
+ - 0
+ - 71
+ cdbRevision:
+ - 0
+ - 227612
+PYTEST:
+ lib_name: PYTEST
+ cell_name: net_bus
+ view_name: schematic
+ bbox:
+ - -220
+ - -600
+ - 768
+ - -210
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -220
+ - R0
+ bbox:
+ - -217
+ - -246
+ - -160
+ - -210
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -220
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -240
+ - R0
+ bbox:
+ - -217
+ - -266
+ - -160
+ - -230
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -240
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ mid<3:0>:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - -120
+ - -320
+ - R0
+ bbox:
+ - -120
+ - -346
+ - -63
+ - -310
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -95
+ - -320
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ vin:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -280
+ - R0
+ bbox:
+ - -217
+ - -306
+ - -160
+ - -270
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -280
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - -120
+ - -280
+ - R0
+ bbox:
+ - -120
+ - -306
+ - -63
+ - -270
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -95
+ - -280
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 140
+ - -600
+ -
+ - 140
+ - -560
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 133
+ - -564
+ alignment: 8
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 140
+ - -370
+ -
+ - 140
+ - -330
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 133
+ - -366
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: mid
+ origin:
+ - 340
+ - -453
+ alignment: 5
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: mid
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - -40
+ - -460
+ -
+ - 0
+ - -460
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - -4
+ - -453
+ alignment: 8
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 540
+ - -600
+ -
+ - 540
+ - -560
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 533
+ - -564
+ alignment: 8
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 540
+ - -370
+ -
+ - 540
+ - -330
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 533
+ - -366
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 680
+ - -460
+ -
+ - 720
+ - -460
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 684
+ - -453
+ alignment: 2
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: mid
+ points:
+ -
+ - 280
+ - -460
+ -
+ - 400
+ - -460
+ instances:
+ X0<3:0>:
+ lib_name: PYTEST
+ cell_name: pin_array_0
+ view_name: symbol
+ xform:
+ - 0
+ - -460
+ - R0
+ bbox:
+ - -4
+ - -564
+ - 368
+ - -366
+ connections:
+ VDD: <*4>VDD
+ VSS: <*4>VSS
+ vin: <*4>vin
+ vout: mid<3:0>
+ params:
+ {}
+ is_primitive: false
+ X1:
+ lib_name: PYTEST
+ cell_name: pin_array_0_1
+ view_name: symbol
+ xform:
+ - 400
+ - -460
+ - R0
+ bbox:
+ - 396
+ - -564
+ - 768
+ - -366
+ connections:
+ VDD: VDD
+ VSS: VSS
+ vin<3:0>: mid<3:0>
+ vout: vout
+ params:
+ {}
+ is_primitive: false
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 1113
+ instance#:
+ - 0
+ - 2
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1559712708
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 5
+ schGeometryLastUpdated:
+ - 0
+ - 1113
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 1113
+ _dbvCvTimeStamp:
+ - 0
+ - 1113
+ cdbRevision:
+ - 0
+ - 227612
+PYTEST__symbol:
+ lib_name: bag_test
+ cell_name: net_bus
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ mid<3:0>:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -44
+ - 284
+ - -36
+ stype: 0
+ ttype: 1
+ vin:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - -40
+ -
+ - 240
+ - -40
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - -40
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: mid<3:0>
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1559712693
+ partName:
+ - 3
+ - net_bus
+ pin#:
+ - 0
+ - 5
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("mid" "vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 99
+ _dbvCvTimeStamp:
+ - 0
+ - 99
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/tests_data/bag_test/schematic/net_bus_00/out_0.cdl b/tests_data/bag_test/schematic/net_bus_00/out_0.cdl
new file mode 100644
index 0000000..809dfee
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/out_0.cdl
@@ -0,0 +1,142 @@
+*.BIPOLAR
+*.RESI = 2000
+*.SCALE METER
+*.MEGA
+*.RESVAL
+*.CAPVAL
+*.DIOPERI
+*.DIOAREA
+*.EQUATION
+.PARAM
+
+
+
+.SUBCKT nmos4_18 B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n2svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_svt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_lvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_hvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_standard B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_fast B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_low_power B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_18 B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p2svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_svt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_lvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_hvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_standard B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_fast B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_low_power B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT res_metal_1 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm1] l=l w=w r=0.0736*l/w
+.ENDS
+
+.SUBCKT res_metal_2 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm2] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_3 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm3] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_4 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm4] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_5 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm5] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_6 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm6] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_7 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm7] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_8 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resmt] l=l w=w r=0.0214*l/w
+.ENDS
+
+
+.SUBCKT pin_array_0_1 VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+*.PININFO VDD:I VSS:I vin<3>:I vin<2>:I vin<1>:I vin<0>:I vout:O
+.ENDS
+
+
+.SUBCKT pin_array_0 VDD VSS vin vout
+*.PININFO VDD:I VSS:I vin:I vout:O
+.ENDS
+
+
+.SUBCKT PYTEST VDD VSS vin mid<3> mid<2> mid<1> mid<0> vout
+*.PININFO VDD:I VSS:I vin:I mid<3>:O mid<2>:O mid<1>:O mid<0>:O vout:O
+X0_3 VDD VSS vin mid<3> / pin_array_0
+X0_2 VDD VSS vin mid<2> / pin_array_0
+X0_1 VDD VSS vin mid<1> / pin_array_0
+X0_0 VDD VSS vin mid<0> / pin_array_0
+X1 VDD VSS mid<3> mid<2> mid<1> mid<0> vout / pin_array_0_1
+.ENDS
diff --git a/tests_data/bag_test/schematic/net_bus_00/out_0.scs b/tests_data/bag_test/schematic/net_bus_00/out_0.scs
new file mode 100644
index 0000000..fa9dedc
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/out_0.scs
@@ -0,0 +1,132 @@
+simulator lang=spectre
+include "/mnt/tools/projects/erichang/cds_ff_mpt/ddr_cds_ff_mpt/cds_ff_mpt/netlist_setup/spectre_prim.scs"
+
+
+
+
+subckt nmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n2svt l=l nfin=w nf=nf m=1
+ends nmos4_18
+
+subckt nmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_svt
+
+subckt nmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_lvt
+
+subckt nmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_hvt
+
+subckt nmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_standard
+
+subckt nmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_fast
+
+subckt nmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_low_power
+
+subckt pmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p2svt l=l nfin=w nf=nf m=1
+ends pmos4_18
+
+subckt pmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_svt
+
+subckt pmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_lvt
+
+subckt pmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_hvt
+
+subckt pmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_standard
+
+subckt pmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_fast
+
+subckt pmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_low_power
+
+subckt res_metal_1 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm1 l=l w=w
+ends res_metal_1
+
+subckt res_metal_2 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm2 l=l w=w
+ends res_metal_2
+
+subckt res_metal_3 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm3 l=l w=w
+ends res_metal_3
+
+subckt res_metal_4 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm4 l=l w=w
+ends res_metal_4
+
+subckt res_metal_5 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm5 l=l w=w
+ends res_metal_5
+
+subckt res_metal_6 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm6 l=l w=w
+ends res_metal_6
+
+subckt res_metal_7 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm7 l=l w=w
+ends res_metal_7
+
+subckt res_metal_8 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm8 l=l w=w
+ends res_metal_8
+
+
+subckt pin_array_0_1 VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+ends pin_array_0_1
+
+
+subckt pin_array_0 VDD VSS vin vout
+ends pin_array_0
+
+
+subckt PYTEST VDD VSS vin mid<3> mid<2> mid<1> mid<0> vout
+X0_3 VDD VSS vin mid<3> pin_array_0
+X0_2 VDD VSS vin mid<2> pin_array_0
+X0_1 VDD VSS vin mid<1> pin_array_0
+X0_0 VDD VSS vin mid<0> pin_array_0
+X1 VDD VSS mid<3> mid<2> mid<1> mid<0> vout pin_array_0_1
+ends PYTEST
diff --git a/tests_data/bag_test/schematic/net_bus_00/out_0.v b/tests_data/bag_test/schematic/net_bus_00/out_0.v
new file mode 100644
index 0000000..48c1019
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/out_0.v
@@ -0,0 +1,180 @@
+
+
+
+module nmos4_18(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_svt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_lvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_hvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_standard(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_fast(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_low_power(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_18(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_svt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_lvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_hvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_standard(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_fast(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_low_power(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+
+module pin_array_0_1(
+ input wire VDD,
+ input wire VSS,
+ input wire [3:0] vin,
+ output wire vout
+);
+
+endmodule
+
+
+module pin_array_0(
+ input wire VDD,
+ input wire VSS,
+ input wire vin,
+ output wire vout
+);
+
+endmodule
+
+
+module PYTEST(
+ input wire VDD,
+ input wire VSS,
+ input wire vin,
+ output wire [3:0] mid,
+ output wire vout
+);
+
+pin_array_0 X0_3 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[3] )
+);
+
+pin_array_0 X0_2 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[2] )
+);
+
+pin_array_0 X0_1 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[1] )
+);
+
+pin_array_0 X0_0 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[0] )
+);
+
+pin_array_0_1 X1 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( mid[3:0] ),
+ .vout( vout )
+);
+
+endmodule
diff --git a/tests_data/bag_test/schematic/net_bus_00/out_1.v b/tests_data/bag_test/schematic/net_bus_00/out_1.v
new file mode 100644
index 0000000..3d2dd26
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/out_1.v
@@ -0,0 +1,11 @@
+
+
+module PYTEST(
+ input wire VDD,
+ input wire VSS,
+ input wire vin,
+ output wire [3:0] mid,
+ output wire vout
+);
+
+endmodule
diff --git a/tests_data/bag_test/schematic/net_bus_00/out_2.scs b/tests_data/bag_test/schematic/net_bus_00/out_2.scs
new file mode 100644
index 0000000..097afc7
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/out_2.scs
@@ -0,0 +1,131 @@
+simulator lang=spectre
+include "/mnt/tools/projects/erichang/cds_ff_mpt/ddr_cds_ff_mpt/cds_ff_mpt/netlist_setup/spectre_prim.scs"
+
+
+
+
+subckt nmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n2svt l=l nfin=w nf=nf m=1
+ends nmos4_18
+
+subckt nmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_svt
+
+subckt nmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_lvt
+
+subckt nmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_hvt
+
+subckt nmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_standard
+
+subckt nmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_fast
+
+subckt nmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_low_power
+
+subckt pmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p2svt l=l nfin=w nf=nf m=1
+ends pmos4_18
+
+subckt pmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_svt
+
+subckt pmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_lvt
+
+subckt pmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_hvt
+
+subckt pmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_standard
+
+subckt pmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_fast
+
+subckt pmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_low_power
+
+subckt res_metal_1 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm1 l=l w=w
+ends res_metal_1
+
+subckt res_metal_2 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm2 l=l w=w
+ends res_metal_2
+
+subckt res_metal_3 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm3 l=l w=w
+ends res_metal_3
+
+subckt res_metal_4 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm4 l=l w=w
+ends res_metal_4
+
+subckt res_metal_5 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm5 l=l w=w
+ends res_metal_5
+
+subckt res_metal_6 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm6 l=l w=w
+ends res_metal_6
+
+subckt res_metal_7 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm7 l=l w=w
+ends res_metal_7
+
+subckt res_metal_8 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm8 l=l w=w
+ends res_metal_8
+
+
+subckt pin_array_0_1 VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+ends pin_array_0_1
+
+
+subckt pin_array_0 VDD VSS vin vout
+ends pin_array_0
+
+
+X0_3 VDD VSS vin mid<3> pin_array_0
+X0_2 VDD VSS vin mid<2> pin_array_0
+X0_1 VDD VSS vin mid<1> pin_array_0
+X0_0 VDD VSS vin mid<0> pin_array_0
+X1 VDD VSS mid<3> mid<2> mid<1> mid<0> vout pin_array_0_1
+
diff --git a/tests_data/bag_test/schematic/net_bus_00/params.yaml b/tests_data/bag_test/schematic/net_bus_00/params.yaml
new file mode 100644
index 0000000..9f71df6
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_00/params.yaml
@@ -0,0 +1,2 @@
+params:
+ mult: 4
diff --git a/tests_data/bag_test/schematic/net_bus_01/out.yaml b/tests_data/bag_test/schematic/net_bus_01/out.yaml
new file mode 100644
index 0000000..5fedc03
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/out.yaml
@@ -0,0 +1,2063 @@
+pin_array_0_1:
+ lib_name: PYTEST
+ cell_name: pin_array_0_1
+ view_name: schematic
+ bbox:
+ - -80
+ - -730
+ - 210
+ - -490
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -500
+ - R0
+ bbox:
+ - -77
+ - -526
+ - -20
+ - -490
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -500
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -520
+ - R0
+ bbox:
+ - -77
+ - -546
+ - -20
+ - -510
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -520
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ vin<3:0>:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -560
+ - R0
+ bbox:
+ - -77
+ - -586
+ - -20
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -560
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - 20
+ - -560
+ - R0
+ bbox:
+ - 20
+ - -586
+ - 77
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 45
+ - -560
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 193
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 200
+ - -680
+ -
+ - 200
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - 140
+ - -680
+ -
+ - 140
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 73
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 20
+ - -680
+ -
+ - 20
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 80
+ - -680
+ -
+ - 80
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 13
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - 133
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ instances:
+ XIN<3:0>:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 140
+ - -680
+ - R0
+ bbox:
+ - 130
+ - -730
+ - 150
+ - -676
+ connections:
+ noConn: vin<3:0>
+ params:
+ {}
+ is_primitive: true
+ XOUT:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 200
+ - -680
+ - R0
+ bbox:
+ - 190
+ - -730
+ - 210
+ - -676
+ connections:
+ noConn: vout
+ params:
+ {}
+ is_primitive: true
+ XVDD:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 20
+ - -680
+ - R0
+ bbox:
+ - 10
+ - -730
+ - 30
+ - -676
+ connections:
+ noConn: VDD
+ params:
+ {}
+ is_primitive: true
+ XVSS:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 80
+ - -680
+ - R0
+ bbox:
+ - 70
+ - -730
+ - 90
+ - -676
+ connections:
+ noConn: VSS
+ params:
+ {}
+ is_primitive: true
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 743
+ instance#:
+ - 0
+ - 4
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1557499961
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 4
+ schGeometryLastUpdated:
+ - 0
+ - 743
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 743
+ _dbvCvTimeStamp:
+ - 0
+ - 743
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0_1__symbol:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ vin<3:0>:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin<3:0>
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1557496184
+ partName:
+ - 3
+ - pin_array_0
+ pin#:
+ - 0
+ - 4
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 71
+ _dbvCvTimeStamp:
+ - 0
+ - 71
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0_2:
+ lib_name: PYTEST
+ cell_name: pin_array_0
+ view_name: schematic
+ bbox:
+ - -80
+ - -730
+ - 210
+ - -490
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -500
+ - R0
+ bbox:
+ - -77
+ - -526
+ - -20
+ - -490
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -500
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -520
+ - R0
+ bbox:
+ - -77
+ - -546
+ - -20
+ - -510
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -520
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ vin:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -560
+ - R0
+ bbox:
+ - -77
+ - -586
+ - -20
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -560
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - 20
+ - -560
+ - R0
+ bbox:
+ - 20
+ - -586
+ - 77
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 45
+ - -560
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 193
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 200
+ - -680
+ -
+ - 200
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - 140
+ - -680
+ -
+ - 140
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 73
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 20
+ - -680
+ -
+ - 20
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 80
+ - -680
+ -
+ - 80
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 13
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - 133
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ instances:
+ XIN:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 140
+ - -680
+ - R0
+ bbox:
+ - 130
+ - -730
+ - 150
+ - -676
+ connections:
+ noConn: vin
+ params:
+ {}
+ is_primitive: true
+ XOUT:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 200
+ - -680
+ - R0
+ bbox:
+ - 190
+ - -730
+ - 210
+ - -676
+ connections:
+ noConn: vout
+ params:
+ {}
+ is_primitive: true
+ XVDD:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 20
+ - -680
+ - R0
+ bbox:
+ - 10
+ - -730
+ - 30
+ - -676
+ connections:
+ noConn: VDD
+ params:
+ {}
+ is_primitive: true
+ XVSS:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 80
+ - -680
+ - R0
+ bbox:
+ - 70
+ - -730
+ - 90
+ - -676
+ connections:
+ noConn: VSS
+ params:
+ {}
+ is_primitive: true
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 743
+ instance#:
+ - 0
+ - 4
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1557499961
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 4
+ schGeometryLastUpdated:
+ - 0
+ - 743
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 743
+ _dbvCvTimeStamp:
+ - 0
+ - 743
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0_2__symbol:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ vin:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1557496184
+ partName:
+ - 3
+ - pin_array_0
+ pin#:
+ - 0
+ - 4
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 71
+ _dbvCvTimeStamp:
+ - 0
+ - 71
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0:
+ lib_name: PYTEST
+ cell_name: net_bus
+ view_name: schematic
+ bbox:
+ - -220
+ - -600
+ - 768
+ - -210
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -220
+ - R0
+ bbox:
+ - -217
+ - -246
+ - -160
+ - -210
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -220
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -240
+ - R0
+ bbox:
+ - -217
+ - -266
+ - -160
+ - -230
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -240
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ mid<3:0>:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - -120
+ - -320
+ - R0
+ bbox:
+ - -120
+ - -346
+ - -63
+ - -310
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -95
+ - -320
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ vin:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -160
+ - -280
+ - R0
+ bbox:
+ - -217
+ - -306
+ - -160
+ - -270
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -190
+ - -280
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - -120
+ - -280
+ - R0
+ bbox:
+ - -120
+ - -306
+ - -63
+ - -270
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -95
+ - -280
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 140
+ - -600
+ -
+ - 140
+ - -560
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 133
+ - -564
+ alignment: 8
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 140
+ - -370
+ -
+ - 140
+ - -330
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 133
+ - -366
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: mid
+ origin:
+ - 340
+ - -453
+ alignment: 5
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: mid
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - -40
+ - -460
+ -
+ - 0
+ - -460
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - -4
+ - -453
+ alignment: 8
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 540
+ - -600
+ -
+ - 540
+ - -560
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 533
+ - -564
+ alignment: 8
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 540
+ - -370
+ -
+ - 540
+ - -330
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 533
+ - -366
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 680
+ - -460
+ -
+ - 720
+ - -460
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 684
+ - -453
+ alignment: 2
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: mid
+ points:
+ -
+ - 280
+ - -460
+ -
+ - 400
+ - -460
+ instances:
+ X0<3:0>:
+ lib_name: PYTEST
+ cell_name: pin_array_0
+ view_name: symbol
+ xform:
+ - 0
+ - -460
+ - R0
+ bbox:
+ - -4
+ - -564
+ - 368
+ - -366
+ connections:
+ VDD: <*4>VDD
+ VSS: <*4>VSS
+ vin: <*4>vin
+ vout: mid<3:0>
+ params:
+ {}
+ is_primitive: false
+ X1:
+ lib_name: PYTEST
+ cell_name: pin_array_0_1
+ view_name: symbol
+ xform:
+ - 400
+ - -460
+ - R0
+ bbox:
+ - 396
+ - -564
+ - 768
+ - -366
+ connections:
+ VDD: VDD
+ VSS: VSS
+ vin<3:0>: mid<3:0>
+ vout: vout
+ params:
+ {}
+ is_primitive: false
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 1113
+ instance#:
+ - 0
+ - 2
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1559712708
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 5
+ schGeometryLastUpdated:
+ - 0
+ - 1113
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 1113
+ _dbvCvTimeStamp:
+ - 0
+ - 1113
+ cdbRevision:
+ - 0
+ - 227612
+pin_array_0__symbol:
+ lib_name: bag_test
+ cell_name: net_bus
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ mid<3:0>:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -44
+ - 284
+ - -36
+ stype: 0
+ ttype: 1
+ vin:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - -40
+ -
+ - 240
+ - -40
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - -40
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: mid<3:0>
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1559712693
+ partName:
+ - 3
+ - net_bus
+ pin#:
+ - 0
+ - 5
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("mid" "vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 99
+ _dbvCvTimeStamp:
+ - 0
+ - 99
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/tests_data/bag_test/schematic/net_bus_01/out_0.cdl b/tests_data/bag_test/schematic/net_bus_01/out_0.cdl
new file mode 100644
index 0000000..5207ce7
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/out_0.cdl
@@ -0,0 +1,142 @@
+*.BIPOLAR
+*.RESI = 2000
+*.SCALE METER
+*.MEGA
+*.RESVAL
+*.CAPVAL
+*.DIOPERI
+*.DIOAREA
+*.EQUATION
+.PARAM
+
+
+
+.SUBCKT nmos4_18 B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n2svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_svt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_lvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_hvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_standard B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_fast B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_low_power B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_18 B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p2svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_svt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_lvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_hvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_standard B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_fast B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_low_power B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT res_metal_1 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm1] l=l w=w r=0.0736*l/w
+.ENDS
+
+.SUBCKT res_metal_2 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm2] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_3 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm3] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_4 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm4] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_5 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm5] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_6 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm6] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_7 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm7] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_8 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resmt] l=l w=w r=0.0214*l/w
+.ENDS
+
+
+.SUBCKT pin_array_0_1 VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+*.PININFO VDD:I VSS:I vin<3>:I vin<2>:I vin<1>:I vin<0>:I vout:O
+.ENDS
+
+
+.SUBCKT pin_array_0_2 VDD VSS vin vout
+*.PININFO VDD:I VSS:I vin:I vout:O
+.ENDS
+
+
+.SUBCKT pin_array_0 VDD VSS vin mid<3> mid<2> mid<1> mid<0> vout
+*.PININFO VDD:I VSS:I vin:I mid<3>:O mid<2>:O mid<1>:O mid<0>:O vout:O
+X0_3 VDD VSS vin mid<3> / pin_array_0_2
+X0_2 VDD VSS vin mid<2> / pin_array_0_2
+X0_1 VDD VSS vin mid<1> / pin_array_0_2
+X0_0 VDD VSS vin mid<0> / pin_array_0_2
+X1 VDD VSS mid<3> mid<2> mid<1> mid<0> vout / pin_array_0_1
+.ENDS
diff --git a/tests_data/bag_test/schematic/net_bus_01/out_0.scs b/tests_data/bag_test/schematic/net_bus_01/out_0.scs
new file mode 100644
index 0000000..6ea5891
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/out_0.scs
@@ -0,0 +1,132 @@
+simulator lang=spectre
+include "/scratch/projects/erichang/cds_ff_mpt/ddr_cds_ff_mpt/cds_ff_mpt/netlist_setup/spectre_prim.scs"
+
+
+
+
+subckt nmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n2svt l=l nfin=w nf=nf m=1
+ends nmos4_18
+
+subckt nmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_svt
+
+subckt nmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_lvt
+
+subckt nmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_hvt
+
+subckt nmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_standard
+
+subckt nmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_fast
+
+subckt nmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_low_power
+
+subckt pmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p2svt l=l nfin=w nf=nf m=1
+ends pmos4_18
+
+subckt pmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_svt
+
+subckt pmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_lvt
+
+subckt pmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_hvt
+
+subckt pmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_standard
+
+subckt pmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_fast
+
+subckt pmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_low_power
+
+subckt res_metal_1 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm1 l=l w=w
+ends res_metal_1
+
+subckt res_metal_2 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm2 l=l w=w
+ends res_metal_2
+
+subckt res_metal_3 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm3 l=l w=w
+ends res_metal_3
+
+subckt res_metal_4 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm4 l=l w=w
+ends res_metal_4
+
+subckt res_metal_5 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm5 l=l w=w
+ends res_metal_5
+
+subckt res_metal_6 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm6 l=l w=w
+ends res_metal_6
+
+subckt res_metal_7 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm7 l=l w=w
+ends res_metal_7
+
+subckt res_metal_8 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm8 l=l w=w
+ends res_metal_8
+
+
+subckt pin_array_0_1 VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+ends pin_array_0_1
+
+
+subckt pin_array_0_2 VDD VSS vin vout
+ends pin_array_0_2
+
+
+subckt pin_array_0 VDD VSS vin mid<3> mid<2> mid<1> mid<0> vout
+X0_3 VDD VSS vin mid<3> pin_array_0_2
+X0_2 VDD VSS vin mid<2> pin_array_0_2
+X0_1 VDD VSS vin mid<1> pin_array_0_2
+X0_0 VDD VSS vin mid<0> pin_array_0_2
+X1 VDD VSS mid<3> mid<2> mid<1> mid<0> vout pin_array_0_1
+ends pin_array_0
diff --git a/tests_data/bag_test/schematic/net_bus_01/out_0.v b/tests_data/bag_test/schematic/net_bus_01/out_0.v
new file mode 100644
index 0000000..399a75e
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/out_0.v
@@ -0,0 +1,180 @@
+
+
+
+module nmos4_18(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_svt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_lvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_hvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_standard(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_fast(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_low_power(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_18(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_svt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_lvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_hvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_standard(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_fast(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_low_power(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+
+module pin_array_0_1(
+ input wire VDD,
+ input wire VSS,
+ input wire [3:0] vin,
+ output wire vout
+);
+
+endmodule
+
+
+module pin_array_0_2(
+ input wire VDD,
+ input wire VSS,
+ input wire vin,
+ output wire vout
+);
+
+endmodule
+
+
+module pin_array_0(
+ input wire VDD,
+ input wire VSS,
+ input wire vin,
+ output wire [3:0] mid,
+ output wire vout
+);
+
+pin_array_0_2 X0_3 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[3] )
+);
+
+pin_array_0_2 X0_2 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[2] )
+);
+
+pin_array_0_2 X0_1 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[1] )
+);
+
+pin_array_0_2 X0_0 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( vin ),
+ .vout( mid[0] )
+);
+
+pin_array_0_1 X1 (
+ .VDD( VDD ),
+ .VSS( VSS ),
+ .vin( mid[3:0] ),
+ .vout( vout )
+);
+
+endmodule
diff --git a/tests_data/bag_test/schematic/net_bus_01/out_1.v b/tests_data/bag_test/schematic/net_bus_01/out_1.v
new file mode 100644
index 0000000..f9d2659
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/out_1.v
@@ -0,0 +1,11 @@
+
+
+module pin_array_0(
+ input wire VDD,
+ input wire VSS,
+ input wire vin,
+ output wire [3:0] mid,
+ output wire vout
+);
+
+endmodule
diff --git a/tests_data/bag_test/schematic/net_bus_01/out_2.scs b/tests_data/bag_test/schematic/net_bus_01/out_2.scs
new file mode 100644
index 0000000..3fb1c2e
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/out_2.scs
@@ -0,0 +1,131 @@
+simulator lang=spectre
+include "/scratch/projects/erichang/cds_ff_mpt/ddr_cds_ff_mpt/cds_ff_mpt/netlist_setup/spectre_prim.scs"
+
+
+
+
+subckt nmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n2svt l=l nfin=w nf=nf m=1
+ends nmos4_18
+
+subckt nmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_svt
+
+subckt nmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_lvt
+
+subckt nmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_hvt
+
+subckt nmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_standard
+
+subckt nmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_fast
+
+subckt nmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_low_power
+
+subckt pmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p2svt l=l nfin=w nf=nf m=1
+ends pmos4_18
+
+subckt pmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_svt
+
+subckt pmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_lvt
+
+subckt pmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_hvt
+
+subckt pmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_standard
+
+subckt pmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_fast
+
+subckt pmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_low_power
+
+subckt res_metal_1 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm1 l=l w=w
+ends res_metal_1
+
+subckt res_metal_2 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm2 l=l w=w
+ends res_metal_2
+
+subckt res_metal_3 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm3 l=l w=w
+ends res_metal_3
+
+subckt res_metal_4 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm4 l=l w=w
+ends res_metal_4
+
+subckt res_metal_5 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm5 l=l w=w
+ends res_metal_5
+
+subckt res_metal_6 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm6 l=l w=w
+ends res_metal_6
+
+subckt res_metal_7 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm7 l=l w=w
+ends res_metal_7
+
+subckt res_metal_8 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm8 l=l w=w
+ends res_metal_8
+
+
+subckt pin_array_0_1 VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+ends pin_array_0_1
+
+
+subckt pin_array_0_2 VDD VSS vin vout
+ends pin_array_0_2
+
+
+X0_3 VDD VSS vin mid<3> pin_array_0_2
+X0_2 VDD VSS vin mid<2> pin_array_0_2
+X0_1 VDD VSS vin mid<1> pin_array_0_2
+X0_0 VDD VSS vin mid<0> pin_array_0_2
+X1 VDD VSS mid<3> mid<2> mid<1> mid<0> vout pin_array_0_1
+
diff --git a/tests_data/bag_test/schematic/net_bus_01/params.yaml b/tests_data/bag_test/schematic/net_bus_01/params.yaml
new file mode 100644
index 0000000..a720ec2
--- /dev/null
+++ b/tests_data/bag_test/schematic/net_bus_01/params.yaml
@@ -0,0 +1,4 @@
+# tests the case where top_cell_name is equal to the generator cell name of an instance.
+top_cell_name: pin_array_0
+params:
+ mult: 4
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/out.yaml b/tests_data/bag_test/schematic/pin_array_0_00/out.yaml
new file mode 100644
index 0000000..f36095f
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/out.yaml
@@ -0,0 +1,643 @@
+PYTEST:
+ lib_name: PYTEST
+ cell_name: pin_array_0
+ view_name: schematic
+ bbox:
+ - -80
+ - -730
+ - 210
+ - -490
+ terminals:
+ VDD:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -500
+ - R0
+ bbox:
+ - -77
+ - -526
+ - -20
+ - -490
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -500
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -520
+ - R0
+ bbox:
+ - -77
+ - -546
+ - -20
+ - -510
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -520
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 2
+ ttype: 0
+ vin<3:0>:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: ipin
+ view_name: symbol
+ xform:
+ - -20
+ - -560
+ - R0
+ bbox:
+ - -77
+ - -586
+ - -20
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - -50
+ - -560
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 1
+ - inst:
+ lib_name: basic
+ cell_name: opin
+ view_name: symbol
+ xform:
+ - 20
+ - -560
+ - R0
+ bbox:
+ - 20
+ - -586
+ - 77
+ - -550
+ connections:
+ {}
+ params:
+ {}
+ is_primitive: true
+ attr:
+ layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 45
+ - -560
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ attr_type: 0
+ format: 1
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vout
+ origin:
+ - 193
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vout
+ points:
+ -
+ - 200
+ - -680
+ -
+ - 200
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: vin
+ points:
+ -
+ - 140
+ - -680
+ -
+ - 140
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VSS
+ origin:
+ - 73
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VDD
+ points:
+ -
+ - 20
+ - -680
+ -
+ - 20
+ - -640
+ -
+ - 5
+ - layer: 228
+ purpose: 4294967295
+ net: VSS
+ points:
+ -
+ - 80
+ - -680
+ -
+ - 80
+ - -640
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: VDD
+ origin:
+ - 13
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 7
+ - layer: 228
+ purpose: 237
+ net: vin
+ origin:
+ - 133
+ - -676
+ alignment: 2
+ orient: R90
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin
+ instances:
+ XIN<3:0>:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 140
+ - -680
+ - R0
+ bbox:
+ - 130
+ - -730
+ - 150
+ - -676
+ connections:
+ noConn: vin<3:0>
+ params:
+ {}
+ is_primitive: true
+ XOUT:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 200
+ - -680
+ - R0
+ bbox:
+ - 190
+ - -730
+ - 210
+ - -676
+ connections:
+ noConn: vout
+ params:
+ {}
+ is_primitive: true
+ XVDD:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 20
+ - -680
+ - R0
+ bbox:
+ - 10
+ - -730
+ - 30
+ - -676
+ connections:
+ noConn: VDD
+ params:
+ {}
+ is_primitive: true
+ XVSS:
+ lib_name: basic
+ cell_name: noConn
+ view_name: symbol
+ xform:
+ - 80
+ - -680
+ - R0
+ bbox:
+ - 70
+ - -730
+ - 90
+ - -676
+ connections:
+ noConn: VSS
+ params:
+ {}
+ is_primitive: true
+ props:
+ connectivityLastUpdated:
+ - 0
+ - 743
+ instance#:
+ - 0
+ - 4
+ lastSchematicExtraction:
+ - 4
+ - time_val: 1557499961
+ net#:
+ - 0
+ - 0
+ pin#:
+ - 0
+ - 4
+ schGeometryLastUpdated:
+ - 0
+ - 743
+ schGeometryVersion:
+ - 3
+ - sch.ds.gm.1.4
+ schXtrVersion:
+ - 3
+ - sch.10.0
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 743
+ _dbvCvTimeStamp:
+ - 0
+ - 743
+ cdbRevision:
+ - 0
+ - 227612
+PYTEST__symbol:
+ lib_name: bag_test
+ cell_name: pin_array_0
+ view_name: symbol
+ bbox:
+ - -4
+ - -104
+ - 368
+ - 94
+ terminals:
+ VDD:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - 86
+ - 144
+ - 94
+ stype: 1
+ ttype: 0
+ VSS:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 136
+ - -104
+ - 144
+ - -96
+ stype: 2
+ ttype: 0
+ vin<3:0>:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - -4
+ - -4
+ - 4
+ - 4
+ stype: 0
+ ttype: 0
+ vout:
+ obj:
+ - 0
+ - layer: 229
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 276
+ - -4
+ - 284
+ - 4
+ stype: 0
+ ttype: 1
+ shapes:
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 0
+ - 0
+ -
+ - 40
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 46
+ - 0
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vin<3:0>
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 280
+ - 0
+ -
+ - 240
+ - 0
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 234
+ - 0
+ alignment: 7
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: vout
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - 90
+ -
+ - 140
+ - 50
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - 44
+ alignment: 1
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VDD
+ -
+ - 5
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ points:
+ -
+ - 140
+ - -100
+ -
+ - 140
+ - -60
+ -
+ - 7
+ - layer: 229
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -54
+ alignment: 7
+ orient: R270
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ text: VSS
+ -
+ - 0
+ - layer: 236
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 0
+ - -100
+ - 280
+ - 90
+ -
+ - 8
+ - layer: 236
+ purpose: 237
+ net: ""
+ origin:
+ - 240
+ - 50
+ alignment: 1
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 8
+ - layer: 231
+ purpose: 237
+ net: ""
+ origin:
+ - 140
+ - -5
+ alignment: 4
+ orient: R0
+ font: 5
+ height: 10
+ overbar: false
+ visible: true
+ drafting: true
+ evaluator: cdsNLPEvalText
+ -
+ - 0
+ - layer: 231
+ purpose: 4294967295
+ net: ""
+ bbox:
+ - 40
+ - -60
+ - 240
+ - 50
+ instances:
+ {}
+ props:
+ interfaceLastChanged:
+ - 4
+ - time_val: 1557496184
+ partName:
+ - 3
+ - pin_array_0
+ pin#:
+ - 0
+ - 4
+ portOrder:
+ - 5
+ - name: ILList
+ bin_val: ("vout" "VDD" "VSS" "vin")
+ vendorName:
+ - 3
+ - ""
+ app_defs:
+ _dbLastSavedCounter:
+ - 0
+ - 71
+ _dbvCvTimeStamp:
+ - 0
+ - 71
+ cdbRevision:
+ - 0
+ - 227612
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/out_0.cdl b/tests_data/bag_test/schematic/pin_array_0_00/out_0.cdl
new file mode 100644
index 0000000..b3c4abb
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/out_0.cdl
@@ -0,0 +1,127 @@
+*.BIPOLAR
+*.RESI = 2000
+*.SCALE METER
+*.MEGA
+*.RESVAL
+*.CAPVAL
+*.DIOPERI
+*.DIOAREA
+*.EQUATION
+.PARAM
+
+
+
+.SUBCKT nmos4_18 B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n2svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_svt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_lvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_hvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_standard B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_fast B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT nmos4_low_power B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B n1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_18 B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p2svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_svt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_lvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_hvt B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_standard B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1svt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_fast B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1lvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT pmos4_low_power B D G S
+*.PININFO B:B D:B G:B S:B
+MM0 D G S B p1hvt l=l nfin=w nf=nf m=1
+.ENDS
+
+.SUBCKT res_metal_1 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm1] l=l w=w r=0.0736*l/w
+.ENDS
+
+.SUBCKT res_metal_2 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm2] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_3 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm3] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_4 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm4] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_5 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm5] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_6 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm6] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_7 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resm7] l=l w=w r=0.0604*l/w
+.ENDS
+
+.SUBCKT res_metal_8 MINUS PLUS
+*.PININFO MINUS:B PLUS:B
+RR0 PLUS MINUS $[resmt] l=l w=w r=0.0214*l/w
+.ENDS
+
+
+.SUBCKT PYTEST VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+*.PININFO VDD:I VSS:I vin<3>:I vin<2>:I vin<1>:I vin<0>:I vout:O
+.ENDS
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/out_0.scs b/tests_data/bag_test/schematic/pin_array_0_00/out_0.scs
new file mode 100644
index 0000000..15cdfbc
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/out_0.scs
@@ -0,0 +1,119 @@
+simulator lang=spectre
+include "/mnt/tools/projects/erichang/cds_ff_mpt/ddr_cds_ff_mpt/cds_ff_mpt/netlist_setup/spectre_prim.scs"
+
+
+
+
+subckt nmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n2svt l=l nfin=w nf=nf m=1
+ends nmos4_18
+
+subckt nmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_svt
+
+subckt nmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_lvt
+
+subckt nmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_hvt
+
+subckt nmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_standard
+
+subckt nmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_fast
+
+subckt nmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_low_power
+
+subckt pmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p2svt l=l nfin=w nf=nf m=1
+ends pmos4_18
+
+subckt pmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_svt
+
+subckt pmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_lvt
+
+subckt pmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_hvt
+
+subckt pmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_standard
+
+subckt pmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_fast
+
+subckt pmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_low_power
+
+subckt res_metal_1 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm1 l=l w=w
+ends res_metal_1
+
+subckt res_metal_2 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm2 l=l w=w
+ends res_metal_2
+
+subckt res_metal_3 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm3 l=l w=w
+ends res_metal_3
+
+subckt res_metal_4 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm4 l=l w=w
+ends res_metal_4
+
+subckt res_metal_5 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm5 l=l w=w
+ends res_metal_5
+
+subckt res_metal_6 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm6 l=l w=w
+ends res_metal_6
+
+subckt res_metal_7 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm7 l=l w=w
+ends res_metal_7
+
+subckt res_metal_8 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm8 l=l w=w
+ends res_metal_8
+
+
+subckt PYTEST VDD VSS vin<3> vin<2> vin<1> vin<0> vout
+ends PYTEST
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/out_0.v b/tests_data/bag_test/schematic/pin_array_0_00/out_0.v
new file mode 100644
index 0000000..6217492
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/out_0.v
@@ -0,0 +1,124 @@
+
+
+
+module nmos4_18(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_svt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_lvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_hvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_standard(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_fast(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module nmos4_low_power(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_18(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_svt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_lvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_hvt(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_standard(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_fast(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+module pmos4_low_power(
+ inout B,
+ inout D,
+ inout G,
+ inout S
+);
+endmodule
+
+
+module PYTEST(
+ input wire VDD,
+ input wire VSS,
+ input wire [3:0] vin,
+ output wire vout
+);
+
+endmodule
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/out_1.v b/tests_data/bag_test/schematic/pin_array_0_00/out_1.v
new file mode 100644
index 0000000..6a6574e
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/out_1.v
@@ -0,0 +1,10 @@
+
+
+module PYTEST(
+ input wire VDD,
+ input wire VSS,
+ input wire [3:0] vin,
+ output wire vout
+);
+
+endmodule
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/out_2.scs b/tests_data/bag_test/schematic/pin_array_0_00/out_2.scs
new file mode 100644
index 0000000..796e115
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/out_2.scs
@@ -0,0 +1,118 @@
+simulator lang=spectre
+include "/mnt/tools/projects/erichang/cds_ff_mpt/ddr_cds_ff_mpt/cds_ff_mpt/netlist_setup/spectre_prim.scs"
+
+
+
+
+subckt nmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n2svt l=l nfin=w nf=nf m=1
+ends nmos4_18
+
+subckt nmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_svt
+
+subckt nmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_lvt
+
+subckt nmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_hvt
+
+subckt nmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1svt l=l nfin=w nf=nf m=1
+ends nmos4_standard
+
+subckt nmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1lvt l=l nfin=w nf=nf m=1
+ends nmos4_fast
+
+subckt nmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_n1hvt l=l nfin=w nf=nf m=1
+ends nmos4_low_power
+
+subckt pmos4_18 B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p2svt l=l nfin=w nf=nf m=1
+ends pmos4_18
+
+subckt pmos4_svt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_svt
+
+subckt pmos4_lvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_lvt
+
+subckt pmos4_hvt B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_hvt
+
+subckt pmos4_standard B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1svt l=l nfin=w nf=nf m=1
+ends pmos4_standard
+
+subckt pmos4_fast B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1lvt l=l nfin=w nf=nf m=1
+ends pmos4_fast
+
+subckt pmos4_low_power B D G S
+parameters l w nf
+MM0 D G S B cds_ff_mpt_p1hvt l=l nfin=w nf=nf m=1
+ends pmos4_low_power
+
+subckt res_metal_1 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm1 l=l w=w
+ends res_metal_1
+
+subckt res_metal_2 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm2 l=l w=w
+ends res_metal_2
+
+subckt res_metal_3 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm3 l=l w=w
+ends res_metal_3
+
+subckt res_metal_4 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm4 l=l w=w
+ends res_metal_4
+
+subckt res_metal_5 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm5 l=l w=w
+ends res_metal_5
+
+subckt res_metal_6 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm6 l=l w=w
+ends res_metal_6
+
+subckt res_metal_7 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm7 l=l w=w
+ends res_metal_7
+
+subckt res_metal_8 MINUS PLUS
+parameters l w
+RR0 PLUS MINUS cds_ff_mpt_resm8 l=l w=w
+ends res_metal_8
+
+
+
diff --git a/tests_data/bag_test/schematic/pin_array_0_00/params.yaml b/tests_data/bag_test/schematic/pin_array_0_00/params.yaml
new file mode 100644
index 0000000..9f71df6
--- /dev/null
+++ b/tests_data/bag_test/schematic/pin_array_0_00/params.yaml
@@ -0,0 +1,2 @@
+params:
+ mult: 4
diff --git a/tests_gen/__init__.py b/tests_gen/__init__.py
new file mode 100644
index 0000000..7672dee
--- /dev/null
+++ b/tests_gen/__init__.py
@@ -0,0 +1,42 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
diff --git a/tests_gen/conftest.py b/tests_gen/conftest.py
new file mode 100644
index 0000000..21ae509
--- /dev/null
+++ b/tests_gen/conftest.py
@@ -0,0 +1,209 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, Any, List
+
+import importlib
+from pathlib import Path
+
+import pytest
+
+from bag.io.file import read_yaml
+from bag.core import BagProject
+
+
+def pytest_assertrepr_compare(op, left, right):
+ if isinstance(left, dict) and isinstance(right, dict) and op == '==':
+ return get_dict_diff_msg(left, right)
+
+
+def pytest_addoption(parser):
+ parser.addoption(
+ '--data_root', action='store', default='', help='test data root directory',
+ )
+ parser.addoption(
+ '--package', action='store', default='', help='generator package to test',
+ )
+ parser.addoption(
+ '--gen_output', action='store_true', default=False,
+ help='True to generate expected outputs',
+ )
+ parser.addoption(
+ '--run_lvs', action='store_true', default=False,
+ help='True to run LVS',
+ )
+
+
+def get_dict_diff_msg(left: Dict[str, Any], right: Dict[str, Any]) -> List[str]:
+ ans = ['Comparing (Nested) Dictionaries:']
+ get_dict_diff_msg_helper(left, right, ans, [])
+ return ans
+
+
+def get_dict_diff_msg_helper(left: Dict[str, Any], right: Dict[str, Any], msgs: List[str],
+ prefix: List[str]) -> None:
+ keys1 = sorted(left.keys())
+ keys2 = sorted(right.keys())
+
+ idx1 = 0
+ idx2 = 0
+ n1 = len(keys1)
+ n2 = len(keys2)
+ prefix_str = ','.join(prefix)
+ while idx1 < n1 and idx2 < n2:
+ k1 = keys1[idx1]
+ k2 = keys2[idx2]
+ v1 = left[k1]
+ v2 = right[k2]
+ if k1 == k2:
+ if v1 != v2:
+ if isinstance(v1, dict) and isinstance(v2, dict):
+ next_prefix = prefix.copy()
+ next_prefix.append(k1)
+ get_dict_diff_msg_helper(v1, v2, msgs, next_prefix)
+ else:
+ msgs.append(f'L[{prefix_str},{k1}]:')
+ msgs.append(f'{v1}')
+ msgs.append(f'R[{prefix_str},{k1}]:')
+ msgs.append(f'{v2}')
+ idx1 += 1
+ idx2 += 1
+ elif k1 < k2:
+ msgs.append(f'R[{prefix_str}] missing key: {k1}')
+ idx1 += 1
+ else:
+ msgs.append(f'L[{prefix_str}] missing key: {k2}')
+ idx2 += 1
+ while idx1 < n1:
+ msgs.append(f'R[{prefix_str}] missing key: {keys1[idx1]}')
+ idx1 += 1
+ while idx2 < n2:
+ msgs.append(f'L[{prefix_str}] missing key: {keys2[idx2]}')
+ idx2 += 1
+
+
+def get_test_data_id(data: Dict[str, Any]) -> str:
+ return data['pytest']['test_id']
+
+
+def pytest_generate_tests(metafunc):
+ if 'dsn_specs' not in metafunc.fixturenames:
+ return
+
+ pkg_name = metafunc.config.getoption('package')
+ root_dir = Path(metafunc.config.getoption('--data_root'))
+
+ # get list of packages
+ if pkg_name:
+ # check package is importable
+ try:
+ importlib.import_module(pkg_name)
+ except ImportError:
+ raise ImportError(f"Cannot find python package {pkg_name}, "
+ "make sure it's on your PYTHONPATH")
+
+ # check data directory exists
+ tmp = root_dir / pkg_name
+ if not tmp.is_dir():
+ raise ValueError(f'package data directory {tmp} is not a directory')
+ pkg_iter = [pkg_name]
+ else:
+ pkg_iter = (d.name for d in root_dir.iterdir() if d.is_dir())
+
+ data = []
+
+ for pkg in pkg_iter:
+ cur_dir = root_dir / pkg
+ if not cur_dir.is_dir():
+ continue
+
+ for p in cur_dir.iterdir():
+ if p.is_dir():
+ test_id = p.name
+ # noinspection PyTypeChecker
+ content = read_yaml(p / 'specs.yaml')
+ # inject fields
+ root_path = Path('pytest_output', pkg, test_id).resolve()
+ content['root_dir'] = str(root_path)
+ content['impl_lib'] = 'PYTEST'
+ content['impl_cell'] = test_id
+ content['layout_file'] = str(root_path / 'out.gds')
+ content['yaml_file'] = str(root_path / 'out.yaml')
+ content['netlist_file'] = str(root_path / 'out.netlist')
+ content['model_file'] = str(root_path / 'out.model')
+ pytest_info = dict(test_id=f'{pkg}__{test_id}')
+ for fpath in p.iterdir():
+ if fpath.stem.startswith('out'):
+ pytest_info[f'{fpath.stem}_{fpath.suffix[1:]}'] = fpath.absolute()
+ content['pytest'] = pytest_info
+
+ data.append(content)
+ if data:
+ metafunc.parametrize('dsn_specs', data, indirect=True, ids=get_test_data_id)
+
+
+@pytest.fixture(scope='session')
+def root_test_dir() -> Path:
+ ans = Path('pytest_output')
+ ans.mkdir(parents=True, exist_ok=True)
+ return ans
+
+
+@pytest.fixture(scope='session')
+def gen_output(request) -> bool:
+ return request.config.getoption("--gen_output")
+
+
+@pytest.fixture(scope='session')
+def run_lvs(request) -> bool:
+ return request.config.getoption("--run_lvs")
+
+
+@pytest.fixture(scope='session')
+def bag_project() -> BagProject:
+ return BagProject()
+
+
+@pytest.fixture(scope='session')
+def dsn_specs(request) -> Dict[str, Any]:
+ return request.param
diff --git a/tests_gen/test_dsn.py b/tests_gen/test_dsn.py
new file mode 100644
index 0000000..908ad35
--- /dev/null
+++ b/tests_gen/test_dsn.py
@@ -0,0 +1,133 @@
+# SPDX-License-Identifier: BSD-3-Clause AND Apache-2.0
+# Copyright 2018 Regents of the University of California
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# * Neither the name of the copyright holder nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+# Copyright 2019 Blue Cheetah Analog Design Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import Dict, Any
+
+import re
+from pathlib import Path
+
+from pybag.enum import DesignOutput
+from pybag.core import read_gds
+
+from bag.env import get_bag_work_dir, get_gds_layer_map, get_gds_object_map
+from bag.io.file import read_file, read_yaml
+from bag.core import BagProject
+
+
+def check_netlist(output_type: DesignOutput, actual: str, expect: str) -> None:
+ if output_type == DesignOutput.CDL:
+ inc_line = '^\\.INCLUDE (.*)$'
+ elif (output_type == DesignOutput.VERILOG or
+ output_type == DesignOutput.SYSVERILOG or
+ output_type == DesignOutput.SPECTRE):
+ inc_line = '^include "(.*)"$'
+ else:
+ inc_line = ''
+
+ if not inc_line:
+ assert actual == expect
+ else:
+ bag_work_dir = get_bag_work_dir()
+ pattern = re.compile(inc_line)
+ actual_lines = actual.splitlines()
+ expect_lines = expect.splitlines()
+ for al, el in zip(actual_lines, expect_lines):
+ am = pattern.match(al)
+ if am is None:
+ assert al == el
+ else:
+ em = pattern.match(el)
+ if em is None:
+ assert al == el
+ else:
+ # both are include statements
+ apath = am.group(1)
+ epath = em.group(1)
+ arel = Path(apath).relative_to(bag_work_dir)
+ assert epath.endswith(str(arel))
+
+
+def test_dsn(bag_project: BagProject, dsn_specs: Dict[str, Any], gen_output: bool, run_lvs: bool
+ ) -> None:
+ impl_lib: str = dsn_specs['impl_lib']
+ root_dir: str = dsn_specs['root_dir']
+ lay_str: str = dsn_specs.get('lay_class', '')
+ pytest_info: Dict[str, Path] = dsn_specs['pytest']
+ model_type: str = dsn_specs.get('model_type', 'SYSVERILOG')
+ root_path = Path(root_dir)
+ mod_type: DesignOutput = DesignOutput[model_type]
+
+ lay_db = bag_project.make_template_db(impl_lib)
+ bag_project.generate_cell(dsn_specs, raw=True, gen_lay=bool(lay_str), gen_sch=True,
+ run_drc=False, run_lvs=run_lvs, run_rcx=False, lay_db=lay_db,
+ gen_model=True)
+
+ if not gen_output:
+ for key, expect_path in pytest_info.items():
+ if key == 'test_id':
+ continue
+
+ out_path = root_path / key.replace('_', '.')
+ if not out_path.is_file():
+ raise ValueError(f'Cannot find output file: {out_path}')
+ if key.endswith('yaml'):
+ actual_dict = read_yaml(out_path)
+ expect_dict = read_yaml(expect_path)
+ assert actual_dict == expect_dict
+ elif key.endswith('gds'):
+ lay_map = get_gds_layer_map()
+ obj_map = get_gds_object_map()
+ grid = lay_db.grid
+ tr_colors = lay_db.tr_colors
+ expect_cv_list = read_gds(str(expect_path), lay_map, obj_map, grid, tr_colors)
+ actual_cv_list = read_gds(str(out_path), lay_map, obj_map, grid, tr_colors)
+ assert expect_cv_list == actual_cv_list
+ else:
+ if key.endswith('netlist'):
+ output_type = DesignOutput.CDL
+ else:
+ output_type = mod_type
+
+ actual = read_file(out_path)
+ expect = read_file(expect_path)
+ check_netlist(output_type, actual, expect)