Start of public OpenTitan development history

Code contributors:
Alex Bradbury <asb@lowrisc.org>
Cindy Chen <chencindy@google.com>
Eunchan Kim <eunchan@google.com>
Gaurang Chitroda <gaurangg@google.com>
Mark Hayter <mark.hayter@gmail.com>
Michael Schaffner <msf@google.com>
Miguel Osorio <miguelosorio@google.com>
Nils Graf <nilsg@google.com>
Philipp Wagner <phw@lowrisc.org>
Pirmin Vogel <vogelpi@lowrisc.org>
Ram Babu Penugonda <rampenugonda@google.com>
Scott Johnson <scottdj@google.com>
Shail Kushwah <kushwahs@google.com>
Srikrishna Iyer <sriyer@google.com>
Steve Nelson <Steve.Nelson@wdc.com>
Tao Liu <taliu@google.com>
Timothy Chen <timothytim@google.com>
Tobias Wölfel <tobias.woelfel@mailbox.org>
Weicai Yang <weicai@google.com>
diff --git a/util/build_docs.py b/util/build_docs.py
new file mode 100755
index 0000000..b1fe566
--- /dev/null
+++ b/util/build_docs.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+#
+# pip3 install --user livereload
+# Usage:
+#   run './build_docs.py' to generate the documentation and keep it updated
+#   open 'http://localhost:5500/' to check live update (this opens the top
+#   level index page). you can also directly access a specific document by
+#   accessing 'http://localhost:5500/path/to/doc.html',
+#       e.g. http://localhost:5500/hw/ip/uart/doc/uart.html
+
+import argparse
+import logging
+import os
+import shutil
+from pathlib import Path
+
+import livereload
+
+import docgen.generate
+
+USAGE = """
+  build_docs [options]
+"""
+
+MARKDOWN_EXTENSIONS = [
+    '.md',
+    '.mkd',
+]
+STATIC_ASSET_EXTENSIONS = [
+    '.svg',
+    '.png',
+    '.jpg',
+    '.css',
+]
+HJSON_EXTENSIONS = ['.hjson']
+
+# Configurations
+# TODO(eunchan): Move to config.yaml
+SRCTREE_TOP = Path(__file__).parent.joinpath('..').resolve()
+config = {
+    # Toplevel source directory
+    "topdir": SRCTREE_TOP,
+
+    # A list of directories containing documentation within topdir. To ensure
+    # the top-level sitemap doesn't have broken links, this should be kept
+    # in-sync with the doctree tag in sitemap.md.
+    "incdirs": ['./doc', './hw', './sw', './util'],
+
+    # Output directory for documents
+    "outdir": SRCTREE_TOP.joinpath('opentitan-docs'),
+    "verbose": False,
+}
+
+
+def get_doc_files(extensions=MARKDOWN_EXTENSIONS + STATIC_ASSET_EXTENSIONS):
+    """Get the absolute path of files containing documentation
+    """
+    file_list = []
+    # doc files on toplevel
+    for ext in extensions:
+        file_list += config["topdir"].glob('*' + ext)
+    # doc files in include dirs
+    for incdir in config['incdirs']:
+        for ext in extensions:
+            file_list += config["topdir"].joinpath(incdir).rglob('*' + ext)
+    return file_list
+
+
+def ensure_dest_dir(dest_pathname):
+    os.makedirs(dest_pathname.parent, exist_ok=True)
+
+
+def path_src_to_dest(src_pathname, dest_filename_suffix=None):
+    """Get the destination pathname from a source pathname
+    """
+    src_relpath = Path(src_pathname).relative_to(config["topdir"])
+    dest_pathname = Path(config["outdir"]).joinpath(src_relpath)
+    if dest_filename_suffix:
+        dest_pathname = dest_pathname.with_suffix(dest_filename_suffix)
+    return dest_pathname
+
+
+def process_file_markdown(src_pathname):
+    """Process a markdown file and copy it to the destination
+    """
+    dest_pathname = path_src_to_dest(src_pathname, '.html')
+
+    logging.info("Processing Markdown file: %s -> %s" %
+                 (str(src_pathname), str(dest_pathname)))
+
+    ensure_dest_dir(dest_pathname)
+
+    with open(dest_pathname, 'w', encoding='UTF-8') as f:
+        outstr = docgen.generate.generate_doc(str(src_pathname),
+                                              verbose=config['verbose'],
+                                              inlinecss=True,
+                                              inlinewave=True,
+                                              asdiv=False)
+        f.write(outstr)
+
+    return dest_pathname
+
+
+def process_file_copytodest(src_pathname):
+    """Copy a file to the destination directory with no further processing
+    """
+    dest_pathname = path_src_to_dest(src_pathname)
+
+    logging.info("Copying %s -> %s" % (str(src_pathname), str(dest_pathname)))
+
+    ensure_dest_dir(dest_pathname)
+    shutil.copy(src_pathname, dest_pathname)
+
+
+def process_all_files():
+    """Process all files
+
+    The specific processing action depends on the file type.
+    """
+    src_files = get_doc_files()
+
+    for src_pathname in src_files:
+        if src_pathname.suffix in MARKDOWN_EXTENSIONS:
+            process_file_markdown(src_pathname)
+        elif src_pathname.suffix in STATIC_ASSET_EXTENSIONS:
+            process_file_copytodest(src_pathname)
+
+
+def main():
+    logging.basicConfig(level=logging.INFO,
+                        format="%(asctime)s - %(message)s",
+                        datefmt="%Y-%m-%d %H:%M")
+
+    parser = argparse.ArgumentParser(
+        prog="build_docs",
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        usage=USAGE)
+    parser.add_argument(
+        '--preview',
+        action='store_true',
+        help="""starts a local server with live reload (updates triggered upon
+             changes in the documentation files). this feature is intended
+             to preview the documentation locally.""")
+
+    args = parser.parse_args()
+
+    # Initial processing of all files
+    process_all_files()
+
+    if args.preview:
+        # Setup livereload watcher
+        server = livereload.Server()
+        exts_to_watch = MARKDOWN_EXTENSIONS +       \
+                        STATIC_ASSET_EXTENSIONS +   \
+                        HJSON_EXTENSIONS
+        for src_pathname in get_doc_files(exts_to_watch):
+            server.watch(str(src_pathname), process_all_files)
+        server.serve(root=config['topdir'].joinpath(config['outdir']))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/util/container/Dockerfile b/util/container/Dockerfile
new file mode 100644
index 0000000..7472813
--- /dev/null
+++ b/util/container/Dockerfile
@@ -0,0 +1,111 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# Docker container containing various hardware and software development tools
+# for OpenTitan.
+
+# Global configuration options.
+ARG VERILATOR_VERSION=4.010
+
+# The RISCV toolchain version should match the release tag used in GitHub.
+ARG RISCV_TOOLCHAIN_TAR_VERSION=20190807-1
+
+# Build OpenOCD
+# OpenOCD is a tool to connect with the target chip over JTAG and similar
+# transports.
+FROM ubuntu:16.04 AS openocd
+RUN apt-get update && apt-get install -y \
+    autoconf
+    git \
+    libftdi1-dev \
+    libtool \
+    libusb-1.0.0-dev \
+    pkg-config \
+    texinfo
+RUN git clone --depth=1 https://github.com/riscv/riscv-openocd.git /usr/local/src/openocd
+RUN cd /usr/local/src/openocd && ./bootstrap && mkdir build && cd build && \
+    ../configure --enable-ftdi --enable-verbose-jtag-io --disable-vsllink \
+    --enable-remote-bitbang --prefix=/tools/openocd && \
+    make -j$(nproc) && make install
+
+# Build Verilator.
+FROM ubuntu:16.04 as verilator
+ARG VERILATOR_VERSION
+RUN apt-get update && apt-get install -y \
+    autoconf \
+    automake \
+    autotools-dev \
+    bison \
+    build-essential \
+    flex \
+    git
+RUN git clone --depth=1 -b  v${VERILATOR_VERSION} \
+    http://git.veripool.org/git/verilator /usr/local/src/verilator
+RUN cd /usr/local/src/verilator && \
+    autoconf && ./configure --prefix=/tools/verilator/${VERILATOR_VERSION} && \
+    make -j$(nproc) && make install
+
+
+# Main container image.
+FROM ubuntu:16.04 AS opentitan
+ARG VERILATOR_VERSION
+ARG RISCV_TOOLCHAIN_TAR_VERSION
+
+LABEL version="1.0"
+LABEL description="OpenTitan container for hardware development."
+LABEL maintainer="miguelosorio@google.com"
+
+# Copy tools from previous build stages
+WORKDIR /tools
+COPY --from=openocd /tools/openocd openocd
+COPY --from=verilator /tools/verilator/${VERILATOR_VERSION} verilator/${VERILATOR_VERSION}
+
+# Required packages
+RUN apt-get update && apt-get install -y \
+    build-essential \
+    curl \
+    git \
+    gnupg2 \
+    libc6-i386 \
+    libelf-dev \
+    libftdi-dev \
+    libftdi1-dev \
+    libftdi1 \
+    libssl-dev \
+    libtool \
+    libusb-1.0-0-dev \
+    libxml2-dev \
+    minicom \
+    ninja-build \
+    pkgconf \
+    screen \
+    srecord \
+    zlib1g-dev
+
+# Install Python3 and support libraries. Cleanup install in place to reduce
+# binary size.
+RUN apt-get install -y \
+    python3 \
+    python3-pip \
+    python3-setuptools && \
+    apt-get clean; rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /usr/share/doc/*
+
+# Copy repo into tmp directory to execute additional install steps.
+COPY python-requirements.txt /tmp/python-requirements.txt
+RUN pip3 install -r /tmp/python-requirements.txt
+
+COPY util/get-toolchain.py /tmp/get-toolchain.py
+RUN /tmp/get-toolchain.py -r ${RISCV_TOOLCHAIN_TAR_VERSION}
+RUN rm /tmp/python-requirements.txt /tmp/get-toolchain.py
+
+# Use bash as default shell
+RUN ln -sf /bin/bash /bin/sh
+
+# Include tools in PATH.
+ENV PATH "/tools/verilator/${VERILATOR_VERSION}/bin:${PATH}"
+
+# Configures default container user.
+ENV USER ot
+
+ENTRYPOINT /bin/bash
diff --git a/util/container/README.md b/util/container/README.md
new file mode 100644
index 0000000..9a0b958
--- /dev/null
+++ b/util/container/README.md
@@ -0,0 +1,38 @@
+# Docker Container
+
+Docker container based on Ubuntu 16.04 LTS containing various hardware and
+software development tools for OpenTitan. Current list of tools:
+
+* Python3
+* fusesoc
+* OpenOCD
+* RISCV toolchain
+* Verilator
+
+## Local Build Instructions
+
+Skip this step if planning to use the pre-built container. To build in local
+mode:
+
+```shell
+$ cd ${REPO_TOP}
+$ sudo docker build -t opentitan -f util/container/Dockerfile .
+```
+
+## Using the Container
+
+To run container in interactive mode:
+
+```shell
+$ docker run -it -v ${REPO_TOP}:/repo -w /repo opentitan --user $(id -u):$(id -g)
+```
+
+## Pre-built Container
+
+There is an experimental version of the container available. To download, run:
+
+```shell
+$ time docker pull gcr.io/opentitan/hw_dev
+```
+
+Use `gcr.io/opentitan/hw_dev` as the container name in any Docker commands.
diff --git a/util/diff_generated_util_output.py b/util/diff_generated_util_output.py
new file mode 100755
index 0000000..4a0feab
--- /dev/null
+++ b/util/diff_generated_util_output.py
@@ -0,0 +1,165 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Show a diff between the generated output from utils in the current working
+tree versus a previous revision (HEAD by default). This makes it easy to
+inspect the impact of modifications to either the utility implementation or
+its input files on the generated output (e.g. HTML).
+"""
+
+import argparse
+import os
+import shlex
+import subprocess
+import sys
+import tempfile
+
+from reggen import version
+
+# Test list format:
+# output, outisdir, commandpre, commandpost
+# if outisdir then it will be mkdired
+# command is commandpre + fullpath_output + commandpost
+
+testlist = [
+    ["eguart.html", False,
+     "./docgen.py docgen/examples/uart.md > ", ""],
+    ["eguartcfg.html", False,
+     "./docgen.py docgen/examples/uartcfg.md > ", ""],
+    ["uart.html", False,
+     "./docgen.py ../hw/ip/uart/doc/uart.md > ", ""],
+    ["uart-d.html", False,
+     "./docgen.py -d docgen/examples/uart.md > ", ""],
+    ["uart16550-j.html", False,
+     "./docgen.py -j docgen/examples/uart16550.md > ", ""],
+    ["uart16550.html", False,
+     "./docgen.py docgen/examples/uart16550.md > ", ""],
+    ["regdoc.md", False,
+     "./regtool.py --doc > ", ""],
+    ["wave.html", False,
+     "./wavetool.py wavegen/examples/* > ", ""],
+    ["uart_rtl", True,
+     "./regtool.py -r -t ", " ../hw/ip/uart/doc/uart.hjson"],
+    ["uart_dv", True,
+     "./regtool.py -s -t ", " ../hw/ip/uart/doc/uart.hjson"],
+    # gp test multireg
+    ["gp.html", False,
+     "./docgen.py docgen/examples/gp.md > ", ""],
+    ["gp_rtl", True,
+     "./regtool.py -r -t ", " docgen/examples/gp.hjson"],
+    ["gp_dv", True,
+     "./regtool.py -s -t ", " docgen/examples/gp.hjson"],
+    # errors tests error detection
+    ["errors.html", False,
+     "./docgen.py docgen/examples/errors.md > ", ""],
+    # window tests
+    ["window.html", False,
+     "./docgen.py docgen/examples/test_win.md > ", ""],
+    # include tests
+    ["includes.html", False,
+     "./docgen.py docgen/examples/test_inc.md > ", ""],
+    # bad write enable tests
+    ["badwen.html", False,
+     "./docgen.py docgen/examples/badwen.md > ", ""],
+    # generating include define headers
+    ["uart.h", False,
+     "./regtool.py -D ../hw/ip/uart/doc/uart.hjson > ", ""],
+    ["gpio.h", False,
+     "./regtool.py -D ../hw/ip/gpio/doc/gpio.hjson > ", ""],
+    ["spi_device.h", False,
+     "./regtool.py -D ../hw/ip/spi_device/doc/spi_device.hjson > ", ""]
+] # yapf: disable
+
+
+def generate_output(outdir, verbose):
+    for t in testlist:
+        out = shlex.quote(os.path.join(outdir, t[0]))
+        if t[1]:
+            # in new tmpdir so the directory should never be there already
+            os.mkdir(out)
+        errors_out = open(out + ".STDERR", 'w', encoding='UTF-8')
+        with errors_out:
+            err = subprocess.call(t[2] + out + t[3],
+                                  stderr=errors_out,
+                                  shell=True)
+            # write a file so it pops up in the diff
+            # if it is different
+            # (i.e. won't mention any that always return same error)
+            if err != 0:
+                rtn_out = open(out + ".RETURN", 'w', encoding='UTF-8')
+                with rtn_out:
+                    rtn_out.write("Non-Zero Return code " + str(err) + "\n")
+
+    # useful for debug:
+    if (verbose):
+        subprocess.call("ls -l " + outdir, shell=True)
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument("treeish",
+                        default="HEAD",
+                        nargs="?",
+                        help="git tree or commit to compare against")
+    parser.add_argument('--version',
+                        action='store_true',
+                        help='Show version and exit')
+    parser.add_argument('-v',
+                        '--verbose',
+                        action='store_true',
+                        help='Verbose output: ls the output directories')
+
+    args = parser.parse_args()
+    if args.version:
+        version.show_and_exit(__file__, [])
+    args.treeish = shlex.quote(args.treeish)
+
+    util_path = os.path.dirname(os.path.realpath(__file__))
+    repo_root = os.path.abspath(os.path.join(util_path, os.pardir))
+    os.chdir(repo_root)
+
+    if not os.path.isdir(os.path.join(repo_root, '.git')):
+        print("Script not in expected location in a git repo", file=sys.stderr)
+        sys.exit(1)
+
+    # Exit early if there are no diffs between the working tree and
+    # args.treeish.
+    output = subprocess.check_output("git diff " + args.treeish, shell=True)
+    if not output:
+        sys.exit(0)
+
+    # Create temporary directories in util_path rather than defaulting to /tmp.
+    # /tmp may be small and may may be mounted noexec.
+    tempfile.tempdir = util_path
+
+    with tempfile.TemporaryDirectory() as tmpdir:
+        tmpdir_basename = os.path.basename(tmpdir)
+        subprocess.check_call("git archive " + args.treeish +
+                              " | tar -x -C util/" + tmpdir_basename,
+                              shell=True)
+
+        # Execute commands for working tree, saving output
+        os.chdir(util_path)
+        newoutdir = os.path.join(tmpdir, "newout")
+        os.mkdir(newoutdir)
+        generate_output(newoutdir, args.verbose)
+
+        # Execute commands for previous revision, saving output
+        os.chdir(os.path.join(tmpdir_basename, "util"))
+        oldoutdir = os.path.join(tmpdir, "oldout")
+        os.mkdir(oldoutdir)
+        generate_output(oldoutdir, args.verbose)
+
+        # Show diff (if any)
+        os.chdir(tmpdir)
+        # Don't use a checked call because the exit code indicates whether there
+        # is a diff or not, rather than indicating error.
+        subprocess.call('git diff -p --stat --no-index oldout newout',
+                        shell=True)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/util/docgen.py b/util/docgen.py
new file mode 100755
index 0000000..7e62889
--- /dev/null
+++ b/util/docgen.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Command-line tool to merge markdown and register spec to html
+
+"""
+import argparse
+import logging as log
+import os.path
+import shutil
+import sys
+
+from pkg_resources import resource_filename
+
+from docgen import generate, html_data, lowrisc_renderer
+from reggen import version
+
+USAGE = """
+  docgen [options]
+  docgen [options] <file>
+  docgen (-h | --help)
+  docgen --version
+"""
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        prog="docgen",
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        usage=USAGE,
+        description=__doc__,
+        epilog='defaults or the filename - can be used for stdin/stdout')
+    parser.add_argument(
+        '--version', action='store_true', help='Show version and exit')
+    parser.add_argument(
+        '-v',
+        '--verbose',
+        action='store_true',
+        help='Verbose output during processing')
+    parser.add_argument(
+        '-c',
+        '--inline-css',
+        action='store_true',
+        help='Put CSS inline in output file')
+    parser.add_argument(
+        '-d',
+        '--asdiv',
+        action='store_true',
+        help='Output as a <div> without html header/trailer')
+    parser.add_argument(
+        '-j',
+        '--wavesvg-usejs',
+        action='store_true',
+        help='Waveforms should use javascript wavedrom '
+        'rather than generating inline svg')
+    parser.add_argument(
+        '-o',
+        '--output',
+        type=argparse.FileType('w'),
+        default=sys.stdout,
+        metavar='file',
+        help='Output file (default stdout)')
+    parser.add_argument(
+        'srcfile',
+        nargs='?',
+        metavar='file',
+        default='-',
+        help='source markdown file (default stdin)')
+    args = parser.parse_args()
+
+    if args.version:
+        version.show_and_exit(__file__, ["Hjson", "Mistletoe"])
+
+    if (args.verbose):
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    outfile = args.output
+
+    with outfile:
+        outfile.write(
+            generate.generate_doc(args.srcfile, args.verbose, args.inline_css,
+                                  not args.wavesvg_usejs, args.asdiv))
+
+
+if __name__ == '__main__':
+    main()
diff --git a/util/docgen/LICENSE.mistletoe b/util/docgen/LICENSE.mistletoe
new file mode 100644
index 0000000..9ad0f5a
--- /dev/null
+++ b/util/docgen/LICENSE.mistletoe
@@ -0,0 +1,21 @@
+The MIT License
+
+Copyright 2017 Mi Yu
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/util/docgen/README.md b/util/docgen/README.md
new file mode 100644
index 0000000..4e566e6
--- /dev/null
+++ b/util/docgen/README.md
@@ -0,0 +1,76 @@
+# Docgen -- lowRISC Document generator
+
+Docgen is a python3 tool to read markdown documentation and generate html.
+
+It works in conjunction with reggen to generate documentation with the
+register information inserted. Examples are described in the README.md
+in the examples subdirectory.
+
+The lowRISC markdown is based on CommonMark (A strongly defined, highly
+compatible specification of Markdown defined at
+https://commonmark.org/ ) parsed by mistletoe (a fast, extensible and
+spec-compliant Markdown parser in pure Python).
+
+Mistletoe already adds tables using the Github markdown syntax.
+
+The following extensions have been made for the lowRISC version:
+* `{{% lowrisc-doc-hdr Title Of Doc }}` Insert a standard title header
+  and give
+  the document a title. Expected to extend this to have lowrisc-doc-hdr=type
+  (type could be component, core, guide,...) to allow the tool to
+  validate required sections are in the document.
+
+* `{{% regfile filename.hjson }}` Pointer to the register definition
+  json/hjson. This is expected to go early in the document. After this line
+  the registers are available as markup items.
+
+* `{{% registers x }}` Insert the register tables at this point in the
+  document. Must be after the regfile extension! TODO fix the need for `x`
+
+* `{{% include file }}` Insert the file into the markdown
+  document. Any other text on the same line as the include directive
+  will be inserted, then a newline and then the included file. The
+  file is included before any other processing so the result is a
+  single file processed by the markdown processor (thus all
+  definitions like anchor links are global and not confined to the
+  file they are in). Includes may be nested. The filename is relative
+  to the directory that the markdown file currently being processed is
+  in (so relative links work from inside included files). If the
+  include file is not found then an error is reported and a line
+  indicating the error will be inserted in the markdown.
+
+* `{{% include !command -options }}` Use the shell to cd to the
+  directory that the markdown file is in and run the command with
+  given options (everything from the `!` to the closing `}}` is used
+  as the shell command). Insert the output (stdout) from the command
+  into the markdown document. Any other text on the same line as the
+  include directive will be inserted, then a newline and then the
+  command output. (As a result, if the triple back-tick to start a
+  code block immediately follows the `}}` then the output from the
+  command will be inserted inside that code block.) Error returns from
+  the command will be ignored, and any output on stderr will be
+  reported in the docgen stderr output.
+
+
+* `!!Reg` or `!!Reg.Field` Insert Component.Reg or Component.Reg.Field
+  in the output file as a link to the register table for Reg and
+  tagged for special CSS decoration (currently makes them blue,
+  monospace and a little smaller). If Reg is not in the list of
+  registers read from the regfile directive then a warning is printed
+  and the output is not transformed. (Note the use of period rather
+  than underline as the separator was to avoid syntax highlighter
+  issuses because of markdown's use of underline for italics.
+
+* ` ```lang ` Code blocks are highlighted by pygments (a generic
+  syntax highlighter in python). Background colour can be set using the
+  {.good} and {.bad} tags after the lang.
+
+* ` ```wavejson ` Code blocks describing waveforms are converted into
+  an svg picture in the output file. The `-j` or `--wavesvg-usejs` flag
+  will instead generate the <script> output needed by the WaveDrom
+  javascript parser and include invocation of wavedrom in the output
+  html.
+
+
+* Anticipate adding support for ToC generation and insertion (there is
+  partial support for this already in mistletoe)
diff --git a/util/docgen/__init__.py b/util/docgen/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/util/docgen/__init__.py
@@ -0,0 +1 @@
+
diff --git a/util/docgen/examples/README.md b/util/docgen/examples/README.md
new file mode 100644
index 0000000..5646693
--- /dev/null
+++ b/util/docgen/examples/README.md
@@ -0,0 +1,84 @@
+# Examples using docgen
+
+This directory has some examples of using docgen. The example commands
+assume $REPO_TOP is set to the toplevel directory of the repo.
+
+### Setup
+
+If packages have not previously been installed you will need to set a
+few things up. First use `pip3` to install some required packages:
+```
+$ pip3 install --user hjson
+$ pip3 install --user mistletoe
+$ pip3 install --user pygments
+```
+
+The examples all use the -c flag to ensure CSS styles are included
+inline in the html file generated. If building a site this flag will
+likely not be used. If this flag is not given then `md_html.css` from
+docgen and `reg_html.css` from reggen should be put in the same
+directory as the html file.
+
+### Build example1
+
+Example 1 is the Simple Uart. There are a couple of changes from the
+actuall implemmentation: the registers are declared as 64-bit and the
+STATECLR register bits have been moved up by 16 bits.
+
+```
+$ cd $REPO_TOP/util/docgen/examples
+$ ../../docgen.py -c uart.md > /tmp/uartout.html
+```
+
+If you want it to tell more about progress add the verbose flag. Note
+that this causes a second validation pass that checks the output of
+the first pass, there should be fewer things to do on the second pass.
+
+```
+$ cd $REPO_TOP/util/docgen/examples
+$ ../../docgen.py -c -v uart.md > /tmp/uartout.html
+```
+
+You can open the output using file:///tmp/uartout.html
+
+### Build example2
+
+Example 2 is a 16550 style Uart. This shows 8-bit registers and
+registers at the same address.
+
+Note this example has a deliberate error in the second waveform description.
+
+```
+$ cd $REPO_TOP/util/docgen/examples
+$ ../../docgen.py -c uart16550.md > /tmp/uart16550out.html
+```
+
+If you want it to tell more about progress add the verbose flag. Note
+that this causes a second validation pass that checks the output of
+the first pass, there should be fewer things to do on the second pass.
+
+```
+$ cd $REPO_TOP/util/docgen/examples
+$ ../../docgen.py -c -v uart16550.md > /tmp/uart16550out.html
+```
+
+The waveforms can also be generated using the browser-based wavedom
+javascript.
+
+```
+$ cd $REPO_TOP/util/docgen/examples
+$ ../../docgen.py -cj uart16550.md > /tmp/uart16550out.html
+```
+
+
+You can open the output using file:///tmp/uart16550out.html
+
+### Build example 3
+
+Example 3 is an example of using the multireg key to generate registers.
+
+
+```
+$ cd $REPO_TOP/util/docgen/examples
+$ ../../docgen.py -c gp.md > /tmp/gpout.html
+```
diff --git a/util/docgen/examples/badwen.hjson b/util/docgen/examples/badwen.hjson
new file mode 100644
index 0000000..99e4ef9
--- /dev/null
+++ b/util/docgen/examples/badwen.hjson
@@ -0,0 +1,57 @@
+{
+  name: "BADWEN",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+
+  regwidth: "32",
+  registers: [
+    {name: "RDATA", desc: "UART read data",
+      swaccess: "ro", fields: [
+      {bits: "7:0", resval: "0x0"}
+    ]},
+    {name: "WDATA", desc: "UART write data", swaccess: "wo", fields: [
+      {bits: "7:0", resval: "0x0"}
+    ]},
+    {name: "NCO", desc: "Baud clock rate control",
+      swaccess: "rw", regwen: "GOODWEN", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {name: "NCO1", desc: "Baud clock rate control",
+      swaccess: "rw", regwen: "BADWEN1", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {name: "NCO2", desc: "Baud clock rate control",
+      swaccess: "rw", regwen: "BADWEN2", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {name: "NCO3", desc: "Baud clock rate control",
+      swaccess: "rw", regwen: "BADWEN3", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {name: "NCO4", desc: "Baud clock rate control",
+      swaccess: "rw", regwen: "NONE", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {name: "GOODWEN", desc: "Write enable control",
+      swaccess: "rw1c", fields: [
+      {name: "wen", desc: "wr enable", bits: "0", resval: "1"}
+    ]},
+    {name: "BADWEN1", desc: "Write enable control too many bits",
+      swaccess: "rw1c", fields: [
+      {name: "wen", desc: "wr enable", bits: "0", resval: "1"}
+      {name: "foo", desc: "wr enable", bits: "1", resval: "1"}
+    ]},
+    {name: "BADWEN2", desc: "Write enable control not rw1c",
+      swaccess: "rw", fields: [
+      {name: "wen", desc: "wr enable", bits: "0", resval: "1"}
+    ]},
+    {name: "BADWEN3", desc: "Write enable control not default to 1",
+      swaccess: "rw1c", fields: [
+      {name: "wen", desc: "wr enable", bits: "0", resval: "0"}
+    ]},
+    {name: "DVREG", desc: "DV-accessible test register", swaccess: "rw",
+     fields: [
+      {bits: "7:0", name: "", desc: "-" }
+    ]}
+  ]
+}
diff --git a/util/docgen/examples/badwen.md b/util/docgen/examples/badwen.md
new file mode 100644
index 0000000..c8cc971
--- /dev/null
+++ b/util/docgen/examples/badwen.md
@@ -0,0 +1,31 @@
+{{% lowrisc-doc-hdr Test for register write enable errors }}
+{{% regfile badwen.hjson }}
+
+Blah
+
+{{% toc 3 }}
+
+## Compatibility
+
+## Theory of operation
+
+Check for cross reference to !!RDATA
+
+## Programmer Guide
+
+
+### Initialization
+
+
+### Interrupts
+
+
+
+### Debug Features
+
+
+## Implementation Guide
+
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/errors.hjson b/util/docgen/examples/errors.hjson
new file mode 100644
index 0000000..0730a8e
--- /dev/null
+++ b/util/docgen/examples/errors.hjson
@@ -0,0 +1,205 @@
+{
+  name: "ERR",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+
+  regwidth: "32",
+  registers: [
+    { name: "OE",
+      desc: '''GPIO Output Enable
+
+            bit[i]=1'b0: Input mode for GPIO[i]
+            bit[i]=1'b1: Output mode for GPIO[i]
+            ''',
+      swaccess: "rw",
+      fields: [
+        { bits: "31:0" }
+      ],
+    },
+    { reserved: "4" }
+    { reserved: "4" name: "error" }
+    { name: "DATA_IN",
+      desc: "GPIO Input data read bitfield",
+      swaccess: "ro",
+      fields: [
+        { bits: "31:0",
+          resval: "x"
+        }
+      ],
+    },
+    { name: "DATA_IN",
+      desc: "Duplicate name",
+      swaccess: "ro",
+      fields: [
+        { bits: "0", name: "abit", desc: "a bit"},
+        { bits: "1", name: "abit", desc: "a bit duplicate name"},
+	{ bits: "32", name: "bbit", desc: "out of bounds bit"},
+      ],
+    },
+    { name: "DATA_BB",
+      desc: "should be ok",
+      swaccess: "ro",
+      fields: [
+        { bits: "0", name: "abit", desc: "a bit"},
+	{ bits: "1", name: "bbit", desc: "ok"},
+      ],
+    },
+    { name: "data_bb",
+      desc: "Duplicate name although the case differs",
+      swaccess: "ro",
+      fields: [
+        { bits: "0", name: "abit", desc: "a bit"},
+	{ bits: "1", name: "bbit", desc: "ok"},
+      ],
+    },
+    { name: "DATA_YY",
+      desc: "Name ok should show field errors",
+      swaccess: "ro",
+      fields: [
+        { bits: "0", name: "abit", desc: "a bit"},
+        { bits: "1", name: "abit", desc: "a bit duplicate name"},
+	{ bits: "32", name: "bbit", desc: "out of bounds bit"},
+      ],
+    },
+    { name: "DATA_ZZ",
+      desc: "More errors two swaccess",
+      swaccess: "ro",
+      swaccess: "rw",
+      fields: [
+        { bits: "0", name: "abit", desc: "a bit"},
+        { bits: 1, name: "intbit", desc: "bit is an integer"},
+	{ bits: "32:20", name: "bbit", desc: "out of bounds bit range"},
+      ],
+    },
+    { name: "DATA_QQ",
+      desc: "No fields",
+      swaccess: "rw",
+      fields: [
+      ],
+    },
+    # multireg for single bit instance?
+    { multireg: {
+        name: "DATA_OUT",
+        desc: "GPIO output data",
+        count: "32",
+        cname: "GPIO",
+        swaccess: "rw",
+        fields: [
+          { bits: "0", name: "D" desc: "Output data" }
+        ],
+      }
+    },
+    {sameaddr: [
+	{name: "IIR", desc: "Interrupt identification register",
+	 resval: "0xa0", swaccess: "ro", fields: [
+	     {bits: "0", name: "INT", resval: "1",
+	      desc: "This bit is clear if the UART is interrupting."
+	     }
+	     {bits: "3:1", name: "TYPE",
+	      desc: '''
+              If the INT bit is clear, these bits indicate the highest
+              priority pending interrupt.
+              '''
+	      enum: [
+      		 { value: "0", name: "mdm", desc: "Modem status (lowest)" },
+       		 { value: "1", name: "txe", desc: "TX holding register empty" },
+       		 { value: "2", name: "rxd", desc: "RX data available" },
+       		 { value: "3", name: "rxl", desc: "RX line status (highest)" }
+	      ]
+	     }
+	     {bits: "3", name: "TO",
+	      desc: "This bit overlaps."
+	      resval: "NaN"
+	     }
+	     {bits: "5", name: "F64", resval: "1",
+	      desc: "Will always be clear because the FIFO is not 64 bytes."
+	     }
+	     {bits: "7:6", resval: "6", name: "FEN", desc: '''
+	      These bits will both be clear if the FIFO is disabled
+	      and both be set if it is enabled.
+              '''
+	      enum: [
+      		 { value: "0", name: "mdm", desc: "Modem status (lowest)" },
+       		 { value: "1", name: "txe", desc: "TX holding register empty" },
+       		 { value: "2", name: "rxd", desc: "RX data available" },
+       		 { value: "3", name: "rxl" }
+	      ]
+	     }
+	 ]}
+	{name: "FCR", desc: "FIFO Control Register",
+	 swaccess: "ab", fields: [
+	     {bits: "0", name: "FEN",
+	      desc: "This bit must be set to enable the FIFOs."
+	     }
+	     {bits: "1", name: "CRX", desc: '''
+               Writing this bit with a 1 will reset the RX fifo. The
+               bit will clear after the FIFO is reset.
+              '''
+	     }
+	     {bits: "2", name: "CTX", desc: '''
+               Writing this bit with a 1 will reset the TX fifo. The
+               bit will clear after the FIFO is reset.
+              '''
+	     }
+	     {bits: "3", name: "DMAS",
+	      desc: "DMA Mode Select. This bit is not used."
+	     }
+	     {bits: "5", name: "E64",
+	      desc: "This bit is reserved because the FIFO is not 64 bytes."
+	     }
+	     {bits: "6:7", name: "NOTPPC",
+	      desc: '''
+                These two bits set the interrupt trigger level for the
+                receive FIFO. The received data available interrupt
+                will be set when there are at least this number of
+                bytes in the receive FIFO.
+              '''
+	      enum: [
+      		 { value: "0", name: "rxlvl1", desc: "1 Byte" },
+       		 { value: "1", name: "rxlvl4", desc: "4 Bytes" },
+       		 { value: "2", name: "rxlvl8", desc: "8 bytes" },
+       		 { value: "3", name: "rxlvl14", desc: "14 bytes" }
+	      ]
+	     }
+	 ]}
+    ]}
+    // skipto bad offset
+    { skipto: "0x41" }
+    // Backwards skip is an error
+    { skipto: "16" }
+    {window: {
+    	     name: "FCR"
+	     items: "16"
+	     validbits: "48"
+	     byte-write: "True"
+	     swaccess: "rw"
+	     desc: '''
+	     	   Duplicate name, too wide.
+		   '''
+	}
+    },
+    {window: {
+    	     name: "win1"
+	     items: "16"
+	     validbits: "48"
+	     byte-write: "True"
+	     swaccess: "rw"
+	     desc: '''
+	     	   Too wide.
+		   '''
+	}
+    },
+    {window: {
+    	     name: "win2"
+	     items: "big"
+	     validbits: "x"
+	     byte-write: "True"
+	     swaccess: "read"
+	     desc: '''
+	     	   size, width not a number, bad swaccess
+		   '''
+	}
+    },
+
+  ]
+}
diff --git a/util/docgen/examples/errors.md b/util/docgen/examples/errors.md
new file mode 100644
index 0000000..db9ee1d
--- /dev/null
+++ b/util/docgen/examples/errors.md
@@ -0,0 +1,55 @@
+{{% lowrisc-doc-hdr Example with lots of errors }}
+{{% regfile errors.hjson }}
+
+Blah
+
+Error in lowRISC tag
+{{% bob 3 }}
+
+## Compatibility
+
+
+## Theory of operation
+
+Check for cross reference to !!DATA_OUT and !!INT_CTRL and !!INT_CTRL2 blah
+
+For testing, this version should report an error:
+```wavejson
+{signal: [
+  {name:'Baud Clock',  wave: 'p...........' },
+  {name:'Data 8 bit',        wave: '10========1=',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "next" ] )
+  {name:'Data 7 bit',        wave: '10=======1=.',
+   data: [ "lsb", "", "", "", "", "", "msb", "next" ] },
+  {name:'Data 6 bit',        wave: '10======1=..',
+   data: [ "lsb", "", "", "", "", "msb", "next" ] },
+  {name:'Data 5 bit',        wave: '10=====1=...',
+   data: [ "lsb", "", "", "", "msb", "next" ] },
+  {name:'8 with Parity', wave: '10=========1',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "par" ] },
+ ],
+ head:{
+   text:'Serial Line format (one stop bit)',
+   tock:-1,
+ }
+}
+```
+
+## Programmer Guide
+
+
+### Initialization
+
+
+### Interrupts
+
+
+
+### Debug Features
+
+
+## Implementation Guide
+
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/gp.hjson b/util/docgen/examples/gp.hjson
new file mode 100644
index 0000000..0ccad3a
--- /dev/null
+++ b/util/docgen/examples/gp.hjson
@@ -0,0 +1,84 @@
+{
+  name: "GP",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+
+  regwidth: "32",
+  registers: [
+    { name: "OE",
+      desc: '''GPIO Output Enable
+
+            bit[i]=1'b0: Input mode for GPIO[i]
+            bit[i]=1'b1: Output mode for GPIO[i]
+            ''',
+      swaccess: "rw",
+      fields: [
+        { bits: "31:0" }
+      ],
+    },
+    { reserved: "4" }
+    { name: "DATA_IN",
+      desc: "GPIO Input data read bitfield",
+      swaccess: "ro",
+      fields: [
+        { bits: "31:0",
+          resval: "x"
+        }
+      ],
+    },
+    # multireg for single bit instance?
+    { multireg: {
+        name: "DATA_OUT",
+        desc: "GPIO output data",
+        count: "32",
+        cname: "GPIO",
+        swaccess: "rw",
+        fields: [
+          { bits: "0", name: "D" desc: "Output data" }
+        ],
+      }
+    },
+
+    { multireg: {
+          name: "INT_CTRL",
+	  desc: "GPIO Interrupt control",
+	  count: "32",
+	  cname: "GPIO",
+	  swaccess: "rw",
+	  fields: [
+	      { bits: "0", name: "POS", resval: "0",
+	        desc: "Set to interrupt on rising edge"
+	      }
+	      { bits: "1", name: "NEG", resval: "0",
+	        desc: "Set to interrupt on falling edge"
+	      }
+	      { bits: "4:2", name: "TYPE", resval: "0",
+	        desc: "Type of interrupt to raise"
+		enum: [
+		  {value: "0", name: "none", desc: "log but no interrupt" },
+		  {value: "1", name: "low", desc: "low priotiry interrupt" },
+		  {value: "2", name: "high", desc: "high priotiry interrupt" },
+		  {value: "3", name: "nmi", desc: "non maskable interrupt" }
+		]
+	      }
+	  ]
+      }
+    },
+    { multireg: {
+          name: "WDATA",
+	  desc: "Write with mask to GPIO out register",
+	  count: "32",
+	  cname: "GPIO",
+	  swaccess: "rw",
+	  fields: [
+	      { bits: "0", name: "DATA", resval: "0",
+	        desc: "Data to write if mask bit is 1"
+	      }
+	      { bits: "16", name: "MASK", resval: "0",
+	        desc: "Set to allow data write"
+	      }
+	  ]
+      }
+    }
+  ]
+}
diff --git a/util/docgen/examples/gp.md b/util/docgen/examples/gp.md
new file mode 100644
index 0000000..510a48c
--- /dev/null
+++ b/util/docgen/examples/gp.md
@@ -0,0 +1,32 @@
+{{% lowrisc-doc-hdr Example like first gpio }}
+{{% regfile gp.hjson }}
+
+Blah
+
+{{% toc 3 }}
+
+## Compatibility
+
+
+## Theory of operation
+
+Check for cross reference to !!DATA_OUT and !!INT_CTRL and !!INT_CTRL2 blah
+
+## Programmer Guide
+
+
+### Initialization
+
+
+### Interrupts
+
+
+
+### Debug Features
+
+
+## Implementation Guide
+
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/include/inc2.md b/util/docgen/examples/include/inc2.md
new file mode 100644
index 0000000..0a9e37f
--- /dev/null
+++ b/util/docgen/examples/include/inc2.md
@@ -0,0 +1,8 @@
+### depth 2 include
+
+[Anchor 3]: https://google.com
+
+mumble
+* Try out [Anchor 1][] defined in outer
+
+### end depth 2 include
diff --git a/util/docgen/examples/include/included.md b/util/docgen/examples/include/included.md
new file mode 100644
index 0000000..89c8096
--- /dev/null
+++ b/util/docgen/examples/include/included.md
@@ -0,0 +1,14 @@
+
+### This is the **included** md file!
+
+Check from included for cross reference to !!DATA_OUT and !!INT_CTRL and !!INT_CTRL2 blah
+
+[Anchor 2]: https://github.com/mdhayter
+
+mumble
+* Try out [Anchor 1][] defined later
+* Try out [Anchor 2][] defined in include
+
+{{% include inc2.md }}
+
+### end first include
diff --git a/util/docgen/examples/ls b/util/docgen/examples/ls
new file mode 100755
index 0000000..66127dc
--- /dev/null
+++ b/util/docgen/examples/ls
@@ -0,0 +1,2 @@
+#!/bin/sh
+ls $*
diff --git a/util/docgen/examples/test_inc.md b/util/docgen/examples/test_inc.md
new file mode 100644
index 0000000..3873b21
--- /dev/null
+++ b/util/docgen/examples/test_inc.md
@@ -0,0 +1,58 @@
+{{% lowrisc-doc-hdr Example like first gpio }}
+{{% regfile gp.hjson }}
+
+Blah
+
+{{% toc 3 }}
+
+## Compatibility
+
+* Try out [Anchor 1][] defined in outer
+* Try out [Anchor 2][] defined in include depth 1
+* Try out [Anchor 3][] defined in include depth 2
+
+
+{{% include include/included.md }}
+
+[Anchor 1]: https://github.com/lowRisc
+
+## this include should fail
+
+{{% include no_such_file.md }}
+
+## exec include
+
+This should work since there is an exec ls in the directory
+{{% include !ls -1 errors.md errorsregs.hjson }}```
+```
+
+This should fail because command does not exist {{% include !ps au }}
+
+This should fail for trying to escape the repo {{% include !../../../../foo }}
+
+# big include
+
+{{% include !../../regtool.py --doc }}
+
+## Theory of operation
+
+Check for cross reference to !!DATA_OUT and !!INT_CTRL and !!INT_CTRL2 blah
+
+## Programmer Guide
+
+
+### Initialization
+
+
+### Interrupts
+
+
+
+### Debug Features
+
+
+## Implementation Guide
+
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/test_win.md b/util/docgen/examples/test_win.md
new file mode 100644
index 0000000..3c9c92e
--- /dev/null
+++ b/util/docgen/examples/test_win.md
@@ -0,0 +1,31 @@
+{{% lowrisc-doc-hdr Test for register space windows }}
+{{% regfile win.hjson }}
+
+Blah
+
+{{% toc 3 }}
+
+## Compatibility
+
+## Theory of operation
+
+Check for cross reference to !!win1 and !!RDATA and !!win2 blah
+
+## Programmer Guide
+
+
+### Initialization
+
+
+### Interrupts
+
+
+
+### Debug Features
+
+
+## Implementation Guide
+
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/uart.hjson b/util/docgen/examples/uart.hjson
new file mode 100644
index 0000000..9b39dfc
--- /dev/null
+++ b/util/docgen/examples/uart.hjson
@@ -0,0 +1,168 @@
+{
+  name: "UART",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+  regwidth: "64",
+  registers: [
+    {name: "RDATA", desc: "UART read data",
+      swaccess: "ro", fields: [
+      {bits: "7:0", resval: "0x0"}
+    ]},
+    {name: "WDATA", desc: "UART write data", swaccess: "wo", fields: [
+      {bits: "7:0", resval: "0x0"}
+    ]},
+    {name: "NCO", desc: "Baud clock rate control", swaccess: "rw", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {name: "CTRL", desc: "UART control register", swaccess: "rw", fields: [
+      {bits: "0", name: "TX", desc: '''
+        TX enable has a really long description that will go on over
+	several lines and really want to wrap to be seen well in the
+	source format.
+	'''
+	}
+      {bits: "1", name: "RX", desc: "RX enable"}
+      {bits: "2", name: "CTS", desc: "CTS hardware flow-control enable"}
+      {bits: "3", name: "RTS", desc: "RTS hardware flow-control enable"}
+      {bits: "4", name: "SLPBK", desc: "System loopback enable"}
+      {bits: "5", name: "LLPBK", desc: "Line loopback enable"}
+      {bits: "6", name: "RCOS", desc: "Oversample enable for RX and CTS"}
+      {bits: "7", name: "NF", desc: "RX noise filter enable"}
+      {bits: "8", name: "PARITY_EN", desc: "Parity enable"}
+      {bits: "9", name: "PARITY_ODD", desc: "1 for odd parity, 0 for even."}
+    ]}
+    {name: "ICTRL", desc: "UART Interrupt control register", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "TX", desc: "TX interrupt enable" }
+      {bits: "1", name: "RX", desc: "RX interrupt enable"}
+      {bits: "2", name: "TXO", desc: "TX overflow interrupt enable"}
+      {bits: "3", name: "RXO", desc: "RX overflow interrupt enable"}
+      {bits: "4", name: "RXF", desc: "RX frame error interrupt enable"}
+      {bits: "5", name: "RXB", desc: "RX break error interrupt enable"}
+      {bits: "7:6", name: "RXBLVL", desc: '''
+       Trigger level for rx break detection. Sets the number of character
+       times the line must be low to detect a break
+       ''',
+       enum: [
+       	       { value: "0", name: "break2", desc: "2 characters" },
+       	       { value: "1", name: "break4", desc: "4 characters" },
+       	       { value: "2", name: "break8", desc: "8 characters" },
+       	       { value: "3", name: "break16", desc: "16 characters" }
+	     ]
+      }
+      {bits: "8", name: "RXTO", desc: "RX timeout interrupt enable"}
+      {bits: "9", name: "RXPE", desc: "RX parity error interrupt enable"}
+    ]}
+    {name: "STATE", desc: "UART state register", swaccess: "ro",
+     fields: [
+      {bits: "0", name: "TX", desc: "TX buffer full" }
+      {bits: "1", name: "RX", desc: "RX buffer full"}
+      {bits: "2", name: "TXO", desc: "TX buffer overflow"}
+      {bits: "3", name: "RXO", desc: "RX buffer overflow"}
+      {bits: "4", name: "TXEMPTY", desc: "TX buffer empty"}
+      {bits: "5", name: "TXIDLE", desc: "TX idle"}
+      {bits: "6", name: "RXIDLE", desc: "RX idle"}
+      {bits: "7", name: "RXEMPTY", desc: "RX fifo empty"}
+    ]}
+    // I suspect STATECLR should be r0w1c or something
+    {name: "STATECLR", desc: "UART state register", swaccess: "rw",
+     fields: [
+      {bits: "19", name: "TXO", desc: "Clear TX buffer overflow"}
+      {bits: "20", name: "RXO", desc: "Clear RX buffer overflow"}
+    ]}
+    {name: "ISTATE", desc: "UART Interrupt state register", swaccess: "ro",
+     fields: [
+      {bits: "0", name: "TX", desc: "TX interrupt state" }
+      {bits: "1", name: "RX", desc: "RX interrupt state"}
+      {bits: "2", name: "TXO", desc: "TX overflow interrupt state"}
+      {bits: "3", name: "RXO", desc: "RX overflow interrupt state"}
+      {bits: "4", name: "RXF", desc: "RX frame error interrupt state"}
+      {bits: "5", name: "RXB", desc: "RX break error interrupt state"}
+      {bits: "6", name: "RXTO", desc: "RX timeout interrupt state"}
+      {bits: "7", name: "RXPE", desc: "RX parity error interrupt state"}
+    ]}
+    {name: "ISTATECLR", desc: "UART Interrupt clear register",
+     swaccess: "r0w1c",
+     fields: [
+      {bits: "0", name: "TX", desc: "Clear TX interrupt" }
+      {bits: "1", name: "RX", desc: "Clear RX interrupt"}
+      {bits: "2", name: "TXO", desc: "Clear TX overflow interrupt"}
+      {bits: "3", name: "RXO", desc: "Clear RX overflow interrupt"}
+      {bits: "4", name: "RXF", desc: "Clear RX frame error interrupt"}
+      {bits: "5", name: "RXB", desc: "Clear RX break error interrupt"}
+      {bits: "6", name: "RXTO", desc: "Clear RX timeout interrupt"}
+      {bits: "7", name: "RXPE", desc: "Clear RX parity error interrupt"}
+    ]}
+    {name: "FIFO", desc: "UART FIFO control register", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "RXRST", swaccess: "r0w1c", desc: "RX fifo reset" }
+      {bits: "1", name: "TXRST", swaccess: "r0w1c", desc: "TX fifo reset" }
+      {bits: "4:2", name: "RXILVL",
+       desc: "Trigger level for RX interrupts"
+       enum: [
+       	       { value: "0", name: "rxlvl1", desc: "1 character" },
+       	       { value: "1", name: "rxlvl4", desc: "4 characters" },
+       	       { value: "2", name: "rxlvl8", desc: "8 characters" },
+       	       { value: "3", name: "rxlvl16", desc: "16 characters" }
+       	       { value: "4", name: "rxlvl30", desc: "30 characters" }
+	       // TODO expect generator to make others reserved
+	     ]
+      }
+      {bits: "6:5", name: "TXILVL",
+       desc: "Trigger level for TX interrupts"
+       enum: [
+       	       { value: "0", name: "txlvl1", desc: "1 character" },
+       	       { value: "1", name: "txlvl4", desc: "4 characters" },
+       	       { value: "2", name: "txlvl8", desc: "8 characters" },
+       	       { value: "3", name: "txlvl16", desc: "16 characters" }
+	     ]
+      }
+    ]}
+    {name: "RFIFO", desc: "UART FIFO status register", swaccess: "ro",
+     fields: [
+      {bits: "5:0", name: "TXLVL", desc: "Current fill level of TX fifo" }
+      {bits: "11:6", name: "RXLVL", desc: "Current fill level of RX fifo" }
+    ]}
+    {name: "OVRD", desc: "UART override control register", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "TXEN", desc: "Override the TX signal" }
+      {bits: "1", name: "TXVAL", desc: "Value for TX Override" }
+      {bits: "2", name: "RTSEN", desc: "Override the RTS signal" }
+      {bits: "3", name: "RTSVAL", desc: "Value for RTS Override" }
+    ]}    
+    {name: "VAL", desc: "UART oversampled values", swaccess: "ro",
+     fields: [
+      {bits: "15:0", name: "RX", desc: '''
+       Last 16 oversampled values of RX. Most recent bit is bit 0, oldest 15.
+      ''' }
+      {bits: "31:16", name: "CTS", desc: '''
+       Last 16 oversampled values of CTS. Most recent bit is bit 16, oldest 31.
+      ''' }
+    ]}
+    {name: "RXTO", desc: "UART RX timeout control", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "EN", desc: "Enable RX timeout feature" }
+      {bits: "24:1", name: "VAL", desc: "RX timeout value in UART bit times" }
+    ]}    
+    { skipto: "0x0f00" }
+    {name: "ITCR", desc: "UART Integration test control", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "", desc: "-" }
+    ]}    
+    {name: "ITOP", desc: "UART Integration test overrides", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "TX", desc: "Drive txint when UART_ITCR asserted" }
+      {bits: "1", name: "RX", desc: "Drive rxint when UART_ITCR asserted" }
+      {bits: "2", name: "TXO", desc: "Drive txoint when UART_ITCR asserted" }
+      {bits: "3", name: "RXO", desc: "Drive rxoint when UART_ITCR asserted" }
+      {bits: "4", name: "RXF", desc: "Drive rxfint when UART_ITCR asserted" }
+      {bits: "5", name: "RXB", desc: "Drive rxbint when UART_ITCR asserted" }
+      {bits: "6", name: "RXTO", desc: "Drive rxtoint when UART_ITCR asserted" }
+      {bits: "7", name: "RXPE", desc: "Drive rxpeint when UART_ITCR asserted" }
+    ]}
+    {name: "DVREG", desc: "DV-accessible test register", swaccess: "rw",
+     fields: [
+      {bits: "56:0", name: "", desc: "-" }
+    ]}    
+  ]
+}
diff --git a/util/docgen/examples/uart.md b/util/docgen/examples/uart.md
new file mode 100644
index 0000000..e0be560
--- /dev/null
+++ b/util/docgen/examples/uart.md
@@ -0,0 +1,264 @@
+{{% lowrisc-doc-hdr Simple UART }}
+{{% regfile uart.hjson }}
+
+The simple UART provides an asynchronous serial interface that can
+operate at programmable BAUD rates. The main features are:
+
+- 32 byte transmit FIFO
+- 32 byte receive FIFO
+- Programmable fractional baud rate generator
+- Hardware flow control (when enabled)
+- 8 data bits
+- optional parity bit (even or odd)
+- 1 stop bit
+
+{{% toc 3 }}
+
+## Compatibility
+
+The simple UART is compatible with the H1 Secure Microcontroller UART
+used in the Chrome OS cr50 codebase
+(https://chromium.googlesource.com/chromiumos/platform/ec/+/master/chip/g/).
+The parity option has been added.
+
+## Theory of operation
+
+*TODO block diagram of UART*
+
+The UART can connect to four external pins:
+* TX: transmit data output.
+* RX: receive data input.
+* RTS: request to send flow control output. This pin is active low.
+* CTS: clear to send flow control input. This pin is active low.
+
+### Serial data format
+
+The serial line is high when idle. Characters are sent using a start
+bit (low) followed by 8 data bits sent least significant
+first. Optionally there may be a parity bit which is computed to give
+either even or odd parity. Finally there is a stop bit (high). The
+start bit for the next character may immediately follow the stop bit,
+or the line may be in the idle (high) state for some time.
+
+#### Serial waveform
+
+```wavejson
+{signal: [
+  {name:'Baud Clock',  wave: 'p...........' },
+  {name:'Data',        wave: '10========1.',
+   data: [ "lsb", "", "", "", "", "", "", "msb" ] },
+  {name:'With Parity', wave: '10=========1',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "par" ] },
+ ],
+ head:{
+   text:'Serial Line format',
+   tick:0,
+ }
+}
+```
+
+### Transmission
+
+The UART will normally format characters (add start, parity and stop
+bits) and transmit them whenever the line is idle and there is a
+character available in the transmit FIFO.
+
+If !!CTRL.CTS is set then the CTS input is checked before
+starting a transmission. If CTS is active (low) then the character is
+transmitted as usual. If CTS is inactive (high) then tranmission is
+delayed until CTS is asserted again. Note that once transmission of a
+character has started the state of the CTS line will have no effect
+until the stop bit has been transmitted.
+
+### Reception
+
+The internal clock runs at 16x the baud rate. This is used to sample
+the receive data. While the line is idle the data is sampled
+high. After the line goes low the data and parity bits are sampled in
+the middle of their bit time and the character received. The stop bit
+is checked in the middle of its bit time and must be high or a framing
+error will be reported.
+
+If there is space in the receive FIFO then the received character is
+written to the FIFO otherwise it is discarded and the RX overrun
+interrupt set. *TODO what happens if it has a parity or framing
+error, is the character added to the FIFO or not?*.
+
+For a character to be correctly recieved the local clock and the
+peer's transmit clock must drift by no more than half a bit time
+between the start of the start bit and the mid-point of the stop
+bit. Thus without parity the clocks can differ by no more than 5.2%
+(0.5 bit-times / 9.5 bit-times), and with parity they can differ by no
+more than 4.7% (0.5 bit-times / 10.5 bit-times).
+
+The stop bit in the serial line format ensures that the line will
+never be low for more than 9 (10 with parity) bit-times. If the line
+is detected low for multiple character times (configured in the
+!!ICTRL.RXBLVL field) then the receiver will detect a break
+condition and signal an interrupt. *TODO will any zero characters be
+received? Depends on answer to framing error question?*
+
+If !!CTRL.CTS is set, the receiver provides flow control by
+driving the RTS output. When the number of characters in the receive
+FIFO is below the level set in !!FIFO.RXILVL the UART will drive
+this pin low (active) to indicate it is ready to accept data. Once the
+FIFO has reached the programmed level the UART will drive the pin high
+(inactive) to stop the remote device sending more data.
+
+If !!CTRL.CTS is clear, active flow control is disabled. The UART
+will drive the RTS pin low (active) whenever its receiver is enabled
+(!!CTRL.RX set) and high (inactive) whenever the receiver is
+disabled.
+
+*TODO say something about the RX noise filter enable bit*
+
+## Programmer Guide
+
+
+### Initialization
+
+The internal clock must be configured to run at 16x the required BAUD
+rate. This is done by programming the Numerically Controlled
+Oscillator (!!NCO register). The register should be set to
+(2<sup>20</sup>*f<sub>baud</sub>)/f<sub>pclk</sub>, where
+f<sub>baud</sub> is the required baud rate and f<sub>pclk</sub> is the
+peripheral clock provided to the UART.
+
+Care should be taken not to overflow the registers during the baud
+rate setting, 64-bit arithmetic may need to be forced. For example:
+
+```c
+	long long setting = (16 * (1 << UART_NCO_WIDTH) *
+			     (long long)CONFIG_UART_BAUD_RATE / PCLK_FREQ);
+	/* set frequency */
+	GR_UART_NCO(uart) = setting;
+```
+
+During initialization the !!FIFO register should be written to
+clear the FIFOs and set the trigger levels for interrupt and flow
+control. This should be done before enabling the UART and flow control
+by setting the !!CTRL register.
+
+### Character FIFOs
+
+The transmit and recieve FIFOs are always used and each are always 32
+characters deep.
+
+Prior to adding a character to the transmit FIFO the !!STATE.TX
+bit can be checked to ensure there is space in the FIFO. If a
+character is written to a full FIFO then the character is discarded,
+the !!STATE.TXO bit is set and a TX overrun interrupt raised.
+
+If the receive FIFO is full when a new character is received thenthe
+!!STATE.RXO bit is set and a RX overrun interrupt raised.
+
+The overrun status is latched, so will persist to indicate characters
+have been lost, even if characters are removed from the corresponding
+FIFO. The state must be cleared by writing 1 to !!STATECLR.TXO or
+!!STATECLR.RXO.
+
+The number of characters in the FIFO selects the interrupt
+behaviour. The TX interrupt will be raised when there are fewer
+characters in the FIFO than configured in the !!FIFO.TXILVL
+field. The RX interrupt will be raised when there are the same or more
+characters in the FIFO than configured in the !!FIFO.RXILVL
+field. *TODO check I understand these levels*
+
+The number of characters currently in each FIFO can always be read
+from the !!RFIFO register.
+
+### Receive timeout
+
+The receiver timeout mechanism can raise an interrupt if the receiver
+detects the line to be idle for a long period. This is enabled and the
+timeout configured in the !!RXTO register.
+
+### Interrupts
+
+The UART has eight interrupts:
+- TX: raised if the transmit FIFO is past the trigger level
+- RX: raised if the receive FIFO is past the trigger level
+- TXOV: raised if the transmit FIFO has overflowed
+- RXOV: raised if the receive FIFO has overflowed
+- RXF: raised if a framing error has been detected on receive
+- RXB: raised if a break condition is detected on receive
+- RXTO: raised if the receiver has not received any characters in a time
+- RXPE: raised if the receiver has detected a parity error
+
+The current state of the interrupts can be read from the !!ISTATE
+register. Each interrupt has a corresponding bit in the
+!!ISTATECLR register that must be written with a 1 to clear the
+interrupt.
+
+Interrupts are enabled in the !!ICTRL register (note the bit
+assignment does not match the other registers bacuse this register
+also configurs the break condition). This registe just configures if
+the interrupt will be signalled to the system interrupt controller, it
+will not change or mask the value in the !!ISTATE register.
+
+### Debug Features
+
+There are two loopback modes that may be useful during debugging.
+
+System loopback is enabled by setting !!CTRL.SLPBK. Any
+characters written to the transmit buffer will be copied to the
+receive buffer. The state of the RX and CTS pins are ignored. Hardware
+flow control should be disabled when this mode is used.
+
+Line loopback is enabled by setting !!CTRL.LLPBK. Any data
+received on the RX pin is transmitted on the TX pin. Data is retimed
+by the peripheral clock, so this is only reliable if f<sub>pclk</sub>
+is more than twice the baud rate being received. Hardware flow
+control, the TX and RX fifos and interrupts should be disabled when
+this mode is used.
+
+Direct control of the TX pin can be done by setting the value to drive
+in the !!OVRD.TXVAL bit with the !!OVRD.TXEN bit set.  Direct control
+of the RTS pin can be done by setting the value to drive in the
+!!OVRD.RTSVAL bit with the !!OVRD.RTSEN bit set.
+
+The most recent samples from the receive and CTS pins gathered at 16x
+the baud clock can be read from the !!VAL register if
+!!CTRL.RCOS is set.
+
+Interrupts can be tested by configuring the interrupts to be raised in
+the !!ITOP register and setting the !!ITCR bit. This will
+raise the corresponding interrupts to the system. It is recommended
+the regular sources are disabled in the !!ICTRL register when
+this feature is used. *TODO is this implementation?*
+
+
+
+## Implementation Guide
+
+The toplevel of the UART has the following signals that connect to
+external pins:
+- TX data output connects to external pin
+- RX: receive data input connects to external pin
+- RTS: request to send flow control output. This pin is active
+  low. Connects to external pin.
+- CTS: clear to send flow control input. This pin is active
+  low. Connects to external pin.
+
+The following signals connect to the interrupt controller:
+- txint
+- rxint
+- txoint
+- rxoint
+- rxfint
+- rxbint
+- rxtoint
+- rxpeint
+
+A clock with some spec must be provided:
+- pckl
+
+The main register interface is an APB slave using:
+- APB signals
+
+An additional 32-bit scratch register !!DVREG is provided.
+
+The UART code has no build-time options. (Unless it does.)
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/uart16550.hjson b/util/docgen/examples/uart16550.hjson
new file mode 100644
index 0000000..4a9f160
--- /dev/null
+++ b/util/docgen/examples/uart16550.hjson
@@ -0,0 +1,331 @@
+{
+  name: "UART16550",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+
+  regwidth: 8,
+  registers: [
+    {name: "DATA", desc: "UART data",
+      swaccess: "rw", fields: [
+	  { bits: "7:0", resval: "x", desc: '''
+             A read returns the **bold character** in the receive buffer if
+             !!LCR.DLAB is clear, or the *italic low byte* of the ** bold
+	     divisor split over line** if
+	     !!LCR.DLAB is set.  Writes send characters to the
+	     transmitter holding
+             buffer if !!LCR.DLAB is clear, or set the low byte of the
+             divisor if DLAB in !!LCR is set.
+            '''
+	  }
+      ]
+    },
+    {name: "IER", desc: "Interrupt enable register", swaccess: "rw", fields: [
+      {bits: "0", name: "RDAE", desc: '''
+        Enable Received Data Available interrupt. If this bit is set
+        an interrupt will be raised whenever the receive FIFO contains
+        data. This reference is to a !!BOGUS register
+	'''
+      }
+      {bits: "1", name: "TXEE", desc: '''
+        Enable Transmit holding register empty interrupt. If this bit
+        is set then an interrupt will be raised whenever the
+        transmitter buffer is empty.
+	'''
+      }
+      {bits: "2", name: "RLE", desc: '''
+        Enable Receiver Line Status. If this bit is set then an
+        interrupt will be raised whenever the receive side line status
+        changes.
+	'''
+      }
+      {bits: "3", name: "MSE", desc: '''
+        Enable Modem Status interrupt. If this bit is set then an
+        interrupt will be raised whenever the modem status changes.
+	'''
+      }
+      {bits: "4", name: "SLP", desc: '''
+        If this bit is set the UART will enter sleep mode. Operation
+        will be stopped until a transition is detected on DIN, CTS_L,
+        DSR_L, DCD_L or RI_L.
+	'''
+      }
+      {bits: "5", name: "LPE", desc: '''
+        If this bit is set the UART will enter low power mode, the
+        same as sleep.
+	'''
+      }
+    ]},
+    {sameaddr: [
+	{name: "IIR", desc: "Interrupt identification register",
+	 swaccess: "ro", fields: [
+	     {bits: "0", name: "INT", resval: "1",
+	      desc: "This bit is clear if the UART is interrupting."
+	     }
+	     {bits: "2:1", name: "TYPE",
+	      desc: '''
+              If the INT bit is clear, these bits indicate the highest
+              priority pending interrupt.
+              '''
+	      enum: [
+      		 { value: "0", name: "mdm", desc: "Modem status (lowest)" },
+       		 { value: "1", name: "txe", desc: "TX holding register empty" },
+       		 { value: "2", name: "rxd", desc: "RX data available" },
+       		 { value: "3", name: "rxl", desc: "RX line status (highest)" }
+	      ]
+	     }
+	     {bits: "3", name: "TO",
+	      desc: "This bit is set if there is a timeout interrupt pending."
+	     }
+	     {bits: "5", name: "F64",
+	      desc: "Will always be clear because the FIFO is not 64 bytes."
+	     }
+	     {bits: "7:6", name: "FEN", desc: '''
+	      These bits will both be clear if the FIFO is disabled
+	      and both be set if it is enabled.
+              '''
+	     }
+	 ]}
+	{name: "FCR", desc: "FIFO Control Register",
+	 swaccess: "wo", fields: [
+	     {bits: "0", name: "FEN",
+	      desc: "This bit must be set to enable the FIFOs."
+	     }
+	     {bits: "1", name: "CRX", desc: '''
+               Writing this bit with a 1 will reset the RX fifo. The
+               bit will clear after the FIFO is reset.
+              '''
+	     }
+	     {bits: "2", name: "CTX", desc: '''
+               Writing this bit with a 1 will reset the TX fifo. The
+               bit will clear after the FIFO is reset.
+              '''
+	     }
+	     {bits: "3", name: "DMAS",
+	      desc: "DMA Mode Select. This bit is not used."
+	     }
+	     {bits: "5", name: "E64",
+	      desc: "This bit is reserved because the FIFO is not 64 bytes."
+	     }
+	     {bits: "7:6", name: "TL",
+	      desc: '''
+                These two bits set the interrupt trigger level for the
+                receive FIFO. The received data available interrupt
+                will be set when there are at least this number of
+                bytes in the receive FIFO.
+              '''
+	      enum: [
+      		 { value: "0", name: "rxlvl1", desc: "1 Byte" },
+       		 { value: "1", name: "rxlvl4", desc: "4 Bytes" },
+       		 { value: "2", name: "rxlvl8", desc: "8 bytes" },
+       		 { value: "3", name: "rxlvl14", desc: "14 bytes" }
+	      ]
+	     }
+	 ]}
+    ]}
+      {name: "LCR", desc: "Line control register", swaccess: "rw", fields: [
+       {bits: "1:0", name: "WSIZE", desc: '''
+        These two bits set the word size for both transmission and reception.
+	'''
+       enum: [
+      	   { value: "0", name: "bits5", desc: "5 bits" },
+       	   { value: "1", name: "bits6", desc: "6 bits" },
+       	   { value: "2", name: "bits7", desc: "7 bits" },
+       	   { value: "3", name: "bits8", desc: "8 bits" }
+       ]
+      }
+      {bits: "2", name: "STOP", desc: '''
+        If this bit is clear one stop bit is used. If this bit is set
+        then two stop bits are used for 6,7, and 8 bit transmission
+        and 1.5 stop bits for 5 bit transmission.
+	'''
+      }
+      {bits: "3", name: "PAR", desc: '''
+        If this bit is clear parity is not used. If this bit is set
+        then parity is added according to the PTYPE field.
+	'''
+      }
+      {bits: "5:4", name: "PTYPE", desc: '''
+        These two bits set the parity for both transmission and reception.
+	'''
+       enum: [
+      	   { value: "0", name: "parodd", desc: "Odd parity" },
+       	   { value: "1", name: "pareven", desc: "Even parity" },
+       	   { value: "2", name: "parhigh", desc: "Parity bit always 1" },
+       	   { value: "3", name: "parlow", desc: "Parity bit always 0" }
+       ]
+      }
+      {bits: "6", name: "BRKEN", desc: '''
+        If this bit is clear the line runs as normal. If set then TX
+        is forced low, sending a break condition.
+	'''
+      }
+      {bits: "7", name: "DLAB", desc: '''
+        If this bit is clear the normal registers are accessed at
+        offset 0 and 1. If set then the divisor latch can be accessed.
+	'''
+      }
+    ]},
+    {name: "MCR", desc: "Modem control register", swaccess: "rw", fields: [
+      {bits: "0", name: "FDTR", desc: '''
+        The state of this bit sets the state of the DTR pin. When
+        loopback mode is selected this drives the DSR_L input.
+	'''
+      }
+      {bits: "1", name: "FRTS", desc: '''
+        The state of this bit sets the state of the RTS_L pin. When
+        loopback mode is selected this drives the CTS_L input.
+	'''
+      }
+      {bits: "2", name: "OUT1", desc: '''
+        The state of this bit sets the state of the OUT1 signal. This
+        is only used in loopback mode when it drives the RI input.
+	'''
+      }
+      {bits: "3", name: "OUT2", desc: '''
+        The state of this bit sets the state of the OUT2 signal. This
+        is only used in loopback mode when it drives the DCD_L input.
+	'''
+      }
+      {bits: "4", name: "LOOP", desc: '''
+        This bit should be clear for normal operation. If this bit is
+        set the TX pin will go high, the handshake outputs will go
+        inactive and the receiver inputs are ignorred. Internally the
+        transmitter is looped back to the receiver, allowing testing
+        of the UART.
+	'''
+      }
+      {bits: "5", name: "AFC", desc: '''
+        If this bit is set and the FRTS bit is set the UART will
+        generate RTS based on the trigger level set for the receive
+        FIFO. (If FRTS is clear then RTS will be deasserted whatever
+        the state of this bit.) RTS will be asserted while the FIFO
+        contains fewer bytes than set in the FCR TL field. If a start
+        bit is seen while the FIFO is at or above the trigger level
+        then RTS will be deasserted. If the FIFO is not being emptied
+        one byte above the trigger level will be received, and there
+        could be one byte more to cover the source being slow to
+        respond to the RTS deassertion. In addition when this bit is
+        set the UART will only start trasmission of a character when
+        CTS is asserted.
+	'''
+      }
+
+    ]},
+    {name: "LSR", desc: "Line status register", swaccess: "ro", fields: [
+	{bits: "0", name: "DRDY", desc: '''
+           If this bit is set there is data ready to be read from the
+           receive buffer.
+	   '''
+	}
+	{bits: "1", name: "OVR", swaccess:"rc", desc: '''
+           If this bit is set then the receive FIFO has overrun and
+           data has been lost. The overflow status is raised when a
+           character is received and the FIFO is full, and cleared
+           whenever the LSR is read. Thus the OVR bit does not
+           indicate where in the data the lost character(s) happened.
+	   '''
+	}
+	{bits: "2", name: "PE", desc: '''
+           If this bit is set then the character at the head of the
+           receive FIFO (i.e. the next character to be read from the
+           receive buffer) has a parity error.
+	'''
+	}
+	{bits: "3", name: "FE", desc: '''
+           If this bit is set then the character at the head of the
+           receive FIFO (i.e. the next character to be read from the
+           receive buffer) has a framing error. The STOP bit was not
+           seen as set.
+	'''
+	}
+	{bits: "4", name: "BRK", desc: '''
+           If this bit is set then the character at the head of the
+           receive FIFO (i.e. the next character to be read from the
+           receive buffer) is zero and was the start of a line break condition.
+	   '''
+	}
+	{bits: "5", name: "THRE", resval: "1", desc: '''
+           If this bit is set the transmitter FIFO is empty (but there may
+           be a character being transmitted).
+	   '''
+      }
+	{bits: "6", name: "TEMT", resval: "1", desc: '''
+         If this bit is set the transmitter is empty. There are no
+         characters in the holding register, FIFO or currently
+         being transmitted.
+	 '''
+      }
+	{bits: "7", name: "RFE", swaccess:"rc", desc: '''
+        If this bit is set there is at least one character in the
+        receive FIFO with a parity error or framing error or break
+        condition. This bit is cleared by reading the LSR if there
+        are no subsequent errors in the FIFO.
+	'''
+      }
+    ]},
+    {name: "MSR", desc: "Modem status register", swaccess: "ro", fields: [
+	{bits: "0", name: "DCTS", swaccess:"rc", desc: '''
+          If this bit is set the CTS input has changed since this
+          register was last read. (Note that this bit is set by a
+          transition on the line. If multiple transitions have happened
+          then the velue in the CTS bit may be the same as in the
+          last read even though this bit is set.) In loopback mode a write
+          to the register with a 1 in this bit will cause the bit to be
+          set (and potentially an interrupt raised), writes of 0 are ignorred.
+	'''
+	}
+	{bits: "1", name: "DDSR", swaccess:"rc", desc: '''
+          If this bit is set the DSR input has changed since this
+          register was last read. (See note for DCTS) In loopback mode a write
+          to the register with a 1 in this bit will cause the bit to be
+          set (and potentially an interrupt raised), writes of 0 are ignorred.
+	'''
+	}
+	{bits: "2", name: "TRI", swaccess:"rc", desc: '''
+          If this bit is set a low to high transition was seen on the RI input
+          since this
+          register was last read. (See note for DCTS) In loopback mode a write
+          to the register with a 1 in this bit will cause the bit to be
+          set (and potentially an interrupt raised), writes of 0 are ignorred.
+	  '''
+	}
+	{bits: "3", name: "DDCD", swaccess:"rc", desc: '''
+          If this bit is set the DCD input has changed since this
+          register was last read. (See note for DCTS) In loopback mode a write
+          to the register with a 1 in this bit will cause the bit to be
+          set (and potentially an interrupt raised), writes of 0 are ignorred.
+	'''
+	}
+	{bits: "4", name: "CTS", desc: '''
+           This bit reflects the state of the CTS_L input. Note that since
+           CTS_L is active low, the value of this bit is the inverse of the pin.
+	   '''
+	}
+	{bits: "5", name: "DSR", desc: '''
+        This bit reflects the state of the DSR_L input. Note that since
+        DSR_L is active low, the value of this bit is the inverse of the pin.
+	'''
+      }
+      {bits: "6", name: "RI", desc: '''
+        This bit reflects the state of the RI_L input. Note that since
+        RI_L is active low, the value of this bit is the inverse of the pin.
+	In loopback mode this bit reflects the value of OUT1 in the MCR.
+        '''
+      }
+      {bits: "7", name: "DCD", desc: '''
+        This bit reflects the state of the DCD_L input. Note that since
+        DCD_L is active low, the value of this bit is the inverse of the pin.
+	In loopback mode this bit reflects the value of OUT2 in the MCR.
+	'''
+      }
+    ]},
+    {name: "SCR", desc: "Scratch register", swaccess: "rw", fields: [
+	{bits: "7:0", name: "scratch", resval: "x", desc: '''
+        This register is not used by the hardware. Software may use it
+        as a scratch register.
+	'''
+      }
+    ]},
+
+  ]
+}
diff --git a/util/docgen/examples/uart16550.md b/util/docgen/examples/uart16550.md
new file mode 100644
index 0000000..8fb3395
--- /dev/null
+++ b/util/docgen/examples/uart16550.md
@@ -0,0 +1,280 @@
+{{% lowrisc-doc-hdr UART with 16550 style interface }}
+{{% regfile uart16550.hjson }}
+
+The UART16550 provides an asynchronous serial interface that can
+operate at programmable BAUD rates. The main features are:
+
+- 16 byte transmit FIFO
+- 16 byte receive FIFO
+- Programmable baud rate generator
+- Hardware flow control (when enabled)
+- 5, 6, 7, or 8 data bits
+- optional parity bit (even, odd, mark or space)
+- 1 or 2 stop bits when used with 6, 7 or 8 data bits
+- 1 or 1.5 stop bits when used with 5 data bits
+
+## Compatibility
+
+The UART16550 is compatible with the de-facto standard 16550 driver
+with registers at byte offsets.
+
+
+## Theory of operation
+
+*TODO block diagram of UART*
+
+The UART can connect to eight external pins:
+* TX: transmit data output.
+* RX: receive data input.
+* RTS_L: request to send flow control output. This pin is active low.
+* CTS_L: clear to send flow control input. This pin is active low.
+* DTR_L: data terminal ready output. This pin is active low.
+* DSR_L: data set ready input. This pin is active low.
+* DCD_L: data carrier detect input.  This pin is active low.
+* RI_L: ring indicate. This pin is active low.
+
+### Baud Rate
+
+The serial line timing is based on a 16x baud rate clock. The
+programmable baud rate generator is driven by a 133.33MHz clock and
+has a 16 bit divider to generate the 16xbaud rate reference. This
+allows generation of the standard baud rates from 300 to 921600 baud
+with less than 1% error. The divisor is accessed by setting the DLAB
+bit in the line control register which makes the low and high parts of
+the divisor value available for read and write through the byte
+registers at offset 0 (low byte of divisor)and 1 (high byte). Writing
+either of the divisor registers causes the divider counter to be
+reloaded.
+
+Required Baud | Divisor | Actual Baud | Error
+--------------|---------|-------------|------
+B |D = INT(0.5 + 133.33MHz/(16*B)) | A = (133.33MHz/D)/16 | (A-B)/B
+300    | 27778 | 300       | 0%
+600    | 13889 | 600.04    | 0.01%
+1200   | 6944  | 1200.08   | 0.01%
+1800   | 4630  | 1799.86   | -0.01%
+2400   | 3472  | 2400.15   | 0.01%
+4800   | 1736  | 4800.31   | 0.01%
+9600   | 868   | 9600.61   | 0.01%
+19200  | 434   | 19201.23  | 0.01%
+38400  | 217   | 38402.46  | 0.01%
+57600  | 145   | 57471.26  | 0.47%
+115200 | 72    | 115740.74 | 0.47%
+230400 | 36    | 231481.48 | 0.47%
+460800 | 18    | 462962.96 | 0.47%
+921600 | 9     | 925925.92 | 0.47%
+
+If the baud rate divisor is set to zero the baud rate clock is stopped
+and the UART will be disabled, this is the default. The baud rate
+clock is automatically stopped to save power when the UART is idle.
+
+### Serial data format
+
+The serial line is high when idle. Characters are sent using a start
+bit (low) followed by 5, 6, 7 or 8 data bits sent least significant
+first. Optionally there may be a parity bit which is computed to give
+either even or odd parity or may be always high or always low. Finally
+there is a stop sequence during which the line is high or one or two
+(1.5 for 5 bit characters) bit times. The start bit for the next
+character may immediately follow the stop sequence, or the line may be
+in the idle (high) state for some time. The data format and (for
+reference) the baud clock are illustrated for the different number of
+data bits with no parity and a single stop bit, and for 8 data bits
+with parity. The line could go idle (high) or the next character start
+after the stop bit where "next" is indicated. All formatting
+parameters are controlled in the !!LCR.
+
+```wavejson
+{signal: [
+  {name:'Baud Clock',  wave: 'p...........' },
+  {name:'Data 8 bit',        wave: '10========1=',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "next" ] },
+  {name:'Data 7 bit',        wave: '10=======1=.',
+   data: [ "lsb", "", "", "", "", "", "msb", "next" ] },
+  {name:'Data 6 bit',        wave: '10======1=..',
+   data: [ "lsb", "", "", "", "", "msb", "next" ] },
+  {name:'Data 5 bit',        wave: '10=====1=...',
+   data: [ "lsb", "", "", "", "msb", "next" ] },
+  {name:'8 with Parity', wave: '10=========1',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "par" ] },
+ ],
+ head:{
+   text:'Serial Line format (one stop bit)',
+   tock:-1,
+ }
+}
+```
+
+The data formatting ensures that in normal operation the line cannot
+be low for more than the number of data bits plus two (low start bit
+plus all zero character plus even or low parity bit) before the stop
+sequence forces the line high. If the line remains low for longer than
+this time the condition is known as a Break. The uart can be set to
+generate a (continuous) break on its output line by setting BrkEn in
+the !!LCR. Detection of a break is signalled by reception of a
+character containing all zeros that is accompanied by the Break Detect
+Flag.
+
+### Serial Data Reception
+
+The UART detect the RX line transitioning from high to low as the
+start of a potential reception. The line is checked after half a bit
+time (8 cycles of the 16xbaud rate clock) and if still low then a
+start bit is detected. Every bit-time (16 cycles of the 16x baud rate
+clock) the data is sampled. One additional bit is sampled following
+the data and parity bits. This should be the stop bit and should
+therefore be set. If the line is detected low when the stop bit is
+expected then the data is still received but is marked with a framing
+error (note that only one bit is checked even if the stop sequence is
+set to two bits). If parity is enabled and the bit does not match the
+expected value then the received character is marked with a parity
+error.
+
+### Serial Data Transmission
+
+The UART will normally format characters (add start, parity and stop
+bits) and transmit them whenever characters are available to be sent
+and the line is idle. However, setting the AFC bit in the !!MCR
+enables automatic flow control. With this setting the transmitter will
+only start to send a character if the CTS_L line is asserted
+(i.e. low) indicating that the peer device is able to receive data.
+
+### Interface FIFOs
+
+The interface has a FIFO to hold characters waiting to be transmitted
+and a FIFO to hold characters that have been received but not yet read
+by software. These FIFOs are 16 characters deep. By default the FIFOs
+are disabled (effectively one character deep) and should be enabled by
+setting the FEN bit in the FIFO Control Register. Note that when the
+FEN bit is set any character that is currently in the holding register
+will be transmitted before the FIFO is enabled (this was not the case
+prior to revision 16 of the UART where it was advised to check the
+TEMT bit in the LSR to ensure there are no characters in-flight when
+the FIFO is enabled).
+
+Writes to the Data Register when the Transmit Holding Register Empty
+(THRE) status bit is set will add characters to the transmit
+FIFO. This status bit will be clear when the FIFO is full, and any
+writes to the Data Register will be discarded.
+
+Reads from the Data Register will return the next received character
+and will remove it from the receive FIFO. Prior to reading the Data
+Register a read should be done of the Line Status Register which will
+indicate if there is data available and give the error flags that
+accompany the character at the head of the FIFO. (The error flags flow
+through the FIFO with their corresponding character.)
+
+Once the FIFOs are enabled the TL field in the FCR can be used to
+configure the number of characters that must bein the receive FIFO to
+trigger two events:
+
+1. The receive data available interrupt is raised (if the FIFO is
+   disabled this is done when a single character is received).
+
+2. If automatic flow control is enabled the RTS_L output is deasserted
+   (i.e. set high) on reception of a start bit.
+
+### Modem/Handshake Signals
+
+The UART has two output lines (RTS\_L and DTR\_L) and four input lines
+(CTS\_L, DSR\_L, DCD\_L and RI\_L) that can be used for modem
+control. However, only the RTS\_L output and CTS\_L input are given
+dedicated pins. The other lines are shared with GPIO signals and the
+GPIO configuration register must be set correctly to enable their
+use. (See section on the GPIO pins.)
+
+The state of the input signals can be read in the Modem Status
+Register which also reports if any of the lines have changed since the
+previous read of the register. Detection of a change in state can
+generate an interrupt. The state of the output lines can be set in the
+Modem Control Register.
+
+If automatic flow control is enabled then the hardware will control
+the RTS\_L output and use the state of the CTS\_L input. RTS\_L will be
+asserted whenever the receive FIFO is full to the threshold level set
+in the TL field of the FIFO Control Register and a start bit is
+detected. RTS\_L will be deasserted whenever the receive FIFO is below
+the threshold. The transmitter will check the CTS\_L signal prior to
+sending a character and will wait for CTS\_L to be asserted before
+starting the character (once a character has been started it will be
+completed before the CTS_L is checked again).
+
+
+### Interrupts and powerdown
+
+The UART can generate an interrupt to the CPU. The Interrupt Enable
+Register configures which UART events cause the interrupt to be raised
+and the Interrupt Identification Register allows detection of the
+cause of the interrupt.  In addition to the normal UART register
+controls, the interrupt may be disabled by setting the intd control
+bit in the PCI header Command register and the state of the interrupt
+may be detected in the is bit of the PCI header Status register. The
+interrupt source number that the UART will use can be read as the
+default value in the iline field of the PCI header.
+
+The UART may be forced into a low power mode by setting either or both
+of the SLP and LPE bits in the Interrupt Enable Register.
+
+### Scratch Register
+
+The UART contains an 8 bit read/write register that is not used by the
+hardware. Software may use this register as it sees fit. The value in
+the scratch register is unpredictable following a reset.
+
+Testing cross reference to !!DATA (or with punctuation !!DATA). The
+strange case will be !!LCR. Which is a different period than in
+!!LCR.DLAB or could be used twice in !!LCR.DLAB. How about
+!!LCR-!!DATA? Phew!
+
+## Programmer Guide
+
+
+### Initialization
+
+The baud rate should be set as previously outlined to enable the UART.
+
+### Interrupts
+
+The UART raises a single interrupt to the system based on the four
+sources that can be enabled in !!IER:
+
+- TXEE: raised if the transmit buffer is empty
+- RDAE: raised if received data is available (if the FIFO is enabled the
+  TL field in the FCR sets the number of characters in the FIFO before
+  this is raised)
+- RLE: The receiver line status has changed
+- MSE: The modem status has changed
+
+
+### Debug Features
+
+A loopback mode can be enabled. In this the output serial data is
+internally looped back to the receiver and the output control lines
+(and two addition signals) are looped back to the four handshake
+inputs. This allows software testing. In this mode the output pins
+will be in their inactive state (i.e. high).
+
+
+## Implementation Guide
+
+The toplevel of the UART has the following signals that connect to
+external pins:
+- TX data output connects to external pin
+- RX: receive data input connects to external pin
+- RTS_L: request to send flow control output. This pin is active
+  low. Connects to external pin.
+- CTS_L: clear to send flow control input. This pin is active
+  low. Connects to external pin.
+- DTR_L: data terminal ready output. This pin is active low.
+- DSR_L: data set ready input. This pin is active low.
+- DCD_L: data carrier detect input.  This pin is active low.
+- RI_L: ring indicate. This pin is active low.
+
+The int signal connects to the interrupt controller.
+
+The 133.33MHz peripheral clock is connected to pclk.
+
+The main register interface is connected on the I/O ring.
+
+## Registers
+{{% registers x }}
diff --git a/util/docgen/examples/uartcfg.hjson b/util/docgen/examples/uartcfg.hjson
new file mode 100644
index 0000000..c06be60
--- /dev/null
+++ b/util/docgen/examples/uartcfg.hjson
@@ -0,0 +1,283 @@
+{
+  name: "uart",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+  bus_host: "none",
+  available_input_list: [
+    { name: "rx", desc: "Serial receive bit" }
+  ],
+  available_output_list: [
+    { name: "tx", desc: "Serial transmit bit" }
+  ],
+  interrupt_list: [
+    { name: "tx_watermark",  desc: "raised if the transmit FIFO is past the programmed highwater mark."}
+    { name: "rx_watermark",  desc: "raised if the receive FIFO is past the programmed highwater mark."}
+    { name: "tx_overflow",   desc: "raised if the transmit FIFO has overflowed."}
+    { name: "rx_overflow",   desc: "raised if the receive FIFO has overflowed."}
+    { name: "rx_frame_err",  desc: "raised if a framing error has been detected on receive."}
+    { name: "rx_break_err",  desc: "raised if break condition has been detected on receive."}
+    { name: "rx_timeout",    desc: "raised if RX FIFO has characters remaining inFIFO without being retrieved for the programmed time period."}
+    { name: "rx_parity_err", desc: "raised if the receiver has detected a parity error."}
+  ],
+
+  regwidth: "32",
+  registers: [
+    { name: "CTRL",
+      desc: "UART control register",
+      swaccess: "rw",
+      hwaccess: "hro",
+      fields: [
+        { bits: "0",
+          name: "TX",
+          desc: "TX enable"
+        }
+        { bits: "1",
+          name: "RX",
+          desc: "RX enable"
+        }
+        { bits: "2",
+          name: "NF",
+          desc: "RX noise filter enable"
+        }
+        { bits: "4",
+          name: "SLPBK",
+          desc: '''System loopback enable.
+
+                If this bit is turned on, any outgoing bits to TX are received through RX.
+                See Block Diagram.
+                '''
+        }
+        { bits: "5",
+          name: "LLPBK",
+          desc: '''Line loopback enable.
+
+                If this bit is turned on, incoming bits are forwarded to TX for testing purpose.
+                See Block Diagram.
+                '''
+        }
+        { bits: "6",
+          name: "PARITY_EN",
+          desc: "If true, parity is enabled in both RX and TX directions."
+        }
+        { bits: "7",
+          name: "PARITY_ODD",
+          desc: "If PARITY_EN is true, this determines the type, 1 for odd parity, 0 for even."
+        }
+        { bits: "9:8",
+          name: "RXBLVL",
+          desc: '''
+                Trigger level for RX break detection. Sets the number of character
+                times the line must be low to detect a break.
+                ''',
+          enum: [
+            { value: "0",
+              name: "break2",
+              desc: "2 characters"
+            },
+            { value: "1",
+              name: "break4",
+              desc: "4 characters"
+            },
+            { value: "2",
+              name: "break8",
+              desc: "8 characters"
+            },
+            { value: "3",
+              name: "break16",
+              desc: "16 characters"
+            }
+          ]
+        }
+        { bits: "31:16",
+          name: "NCO",
+          desc: "BAUD clock rate control."
+        }
+      ]
+    },
+    { name:     "STATUS"
+      desc:     "UART live status register"
+      swaccess: "ro"
+      hwaccess: "hrw"
+      hwext:    "true"
+      hwre:     "true"
+      fields: [
+        { bits: "0"
+          name: "TXFULL"
+          desc: "TX buffer is full"
+        }
+        { bits: "1"
+          name: "RXFULL"
+          desc: "RX buffer is full"
+        }
+        { bits: "2"
+          name: "TXOVERFLOW"
+          desc: "TX buffer overflow"
+        }
+        { bits: "3"
+          name: "RXOVERFLOW"
+          desc: "RX buffer overflow"
+        }
+        { bits: "4"
+          name: "TXEMPTY"
+          desc: "TX FIFO is empty"
+        }
+        { bits: "5"
+          name: "TXIDLE"
+          desc: "TX is idle"
+        }
+        { bits: "6"
+          name: "RXIDLE"
+          desc: "RX is idle"
+        }
+        { bits: "7"
+          name: "RXEMPTY"
+          desc: "RX FIFO is empty"
+        }
+      ]
+    }
+    { name: "RDATA",
+      desc: "UART read data",
+      swaccess: "ro",
+      hwaccess: "hrw",
+      hwext: "true",
+      hwre: "true",
+      fields: [
+        { bits: "7:0" }
+      ]
+    }
+    { name: "WDATA",
+      desc: "UART write data",
+      swaccess: "wo",
+      hwaccess: "hro",
+      hwqe: "true",
+      fields: [
+        { bits: "7:0" }
+      ]
+    }
+    { name: "FIFO_CTRL",
+      desc: "UART FIFO control register",
+      swaccess: "rw",
+      hwaccess: "hrw",
+      hwqe:     "true",
+      fields: [
+        { bits: "0",
+          name: "RXRST",
+          desc: "RX fifo reset"
+        }
+        { bits: "1",
+          name: "TXRST",
+          desc: "TX fifo reset"
+        }
+        { bits: "4:2",
+          name: "RXILVL",
+          desc: "Trigger level for RX interrupts",
+          enum: [
+            { value: "0",
+              name: "rxlvl1",
+              desc: "1 character"
+            },
+            { value: "1",
+              name: "rxlvl4",
+              desc: "4 characters"
+            },
+            { value: "2",
+              name: "rxlvl8",
+              desc: "8 characters"
+            },
+            { value: "3",
+              name: "rxlvl16",
+              desc: "16 characters"
+            },
+            { value: "4",
+              name: "rxlvl30",
+              desc: "30 characters"
+            },
+            // TODO expect generator to make others reserved
+          ]
+        }
+        { bits: "6:5",
+          name: "TXILVL",
+          desc: "Trigger level for TX interrupts",
+          enum: [
+            { value: "0",
+              name: "txlvl1",
+              desc: "1 character"
+            },
+            { value: "1",
+              name: "txlvl4",
+              desc: "4 characters"
+            },
+            { value: "2",
+              name: "txlvl8",
+              desc: "8 characters"
+            },
+            { value: "3",
+              name: "txlvl16",
+              desc: "16 characters"
+            }
+          ]
+        }
+      ]
+    }
+    { name: "FIFO_STATUS",
+      desc: "UART FIFO status register",
+      swaccess: "ro",
+      hwaccess: "hwo",
+      hwext: "true",
+      fields: [
+        { bits: "4:0",
+          name: "TXLVL",
+          desc: "Current fill level of TX fifo"
+        }
+        { bits: "10:6",
+          name: "RXLVL",
+          desc: "Current fill level of RX fifo"
+        }
+      ]
+    }
+    { name: "OVRD",
+      desc: "UART override control register",
+      swaccess: "rw",
+      hwaccess: "hro",
+      fields: [
+        { bits: "0",
+          name: "TXEN",
+          desc: "Override the TX signal"
+        }
+        { bits: "1",
+          name: "TXVAL",
+          desc: "Value for TX Override"
+        }
+      ]
+    }
+    { name: "VAL",
+      desc: "UART oversampled values",
+      swaccess: "ro",
+      hwaccess: "hwo",
+      hwext:    "true",
+      fields: [
+        { bits: "15:0",
+          name: "RX",
+          desc: '''
+                Last 16 oversampled values of RX. Most recent bit is bit 0, oldest 15.
+                '''
+        }
+      ]
+    }
+    { name: "TIMEOUT_CTRL",
+      desc: "UART RX timeout control",
+      swaccess: "rw",
+      hwaccess: "hro",
+      fields: [
+        { bits: "23:0",
+          name: "VAL",
+          desc: "RX timeout value in UART bit times"
+        }
+        { bits: "31",
+          name: "EN",
+          desc: "Enable RX timeout feature"
+        }
+      ]
+    }
+  ]
+}
diff --git a/util/docgen/examples/uartcfg.md b/util/docgen/examples/uartcfg.md
new file mode 100644
index 0000000..2ac94e9
--- /dev/null
+++ b/util/docgen/examples/uartcfg.md
@@ -0,0 +1,309 @@
+{{% lowrisc-doc-hdr UART HWIP Technical Specification }}
+{{% regfile uartcfg.hjson}}
+
+{{% section1 Overview }}
+
+This document specifies UART hardware IP functionality. This module
+conforms to the
+[Comportable guideline for peripheral device functionality.](../../../doc/rm/comportability_specification.md)
+See that document for integration overview within the broader
+top level system.
+
+{{% toc 3 }}
+
+{{% section2 Features }}
+
+- 2-pin full duplex external interface
+- 8-bit data word, optional even or odd parity bit per byte
+- 1 stop bit
+- 32 x 8b RX buffer
+- 32 x 8b TX buffer
+- Programmable baud rate
+- Interrupt for overflow, frame error, parity error, break error, receive
+  timeout
+
+{{% section2 Description }}
+
+The UART module is a serial-to-parallel receive (RX) and parallel-to-serial
+(TX) full duplex design intended to communicate to an outside device, typically
+for basic terminal-style communication. It is programmed to run at a particular
+BAUD rate and contains only a transmit and receive signal to the outside world,
+i.e. no synchronizing clock. The programmable BAUD rate guarantees to be met up
+to 1Mbps.
+
+{{% section2 Compatibility }}
+
+The UART is compatible with the feature set of H1 Secure Microcontroller UART as
+used in the [Chrome OS cr50][chrome-os-cr50] codebase. Additional features such
+as parity have been added.
+
+[chrome-os-cr50]: https://chromium.googlesource.com/chromiumos/platform/ec/+/master/chip/g/
+
+{{% section1 Theory of Operations }}
+
+{{% section2 Block Diagram }}
+
+![UART Block Diagram](block_diagram.svg)
+
+{{% section2 Hardware Interfaces }}
+
+{{% hwcfg uart}}
+
+{{% section2 Design Details }}
+
+### Serial interface (both directions)
+
+TX/RX serial lines are high when idle. Data starts with START bit (1-->0)
+followed by 8 data bits. Least significant bit is sent first. If parity feature
+is turned on, at the end of the data bit, odd or even parity bit follows then
+STOP bit completes one byte data transfer.
+
+```wavejson
+{
+  signal: [
+    { name: 'Baud Clock',     wave: 'p............'                                                        },
+    { name: 'tx',             wave: '10333333331..', data: [ "lsb", "", "", "", "", "", "", "msb" ]        },
+    { name: 'Baud Clock',     wave: 'p............'                                                        },
+    { name: 'tx (w/ parity)', wave: '103333333341.', data: [ "lsb", "", "", "", "", "", "", "msb", "par" ] },
+  ],
+  head: {
+    text: 'Serial Transmission Frame',
+  },
+  foot: {
+    text: 'start bit ("0") at cycle -1, stop bit ("1") at cycle 8, or after parity bit',
+    tock: -2
+  },
+  foot: {
+    text: [
+      'tspan',
+        ['tspan', 'start bit '],
+        ['tspan', {class:'info h4'}, '0'],
+        ['tspan', ' at cycle -1, stop bit '],
+        ['tspan', {class:'info h4'}, '1'],
+        ['tspan', ' at cycle 8, or at cycle 9 after parity bit'],
+      ],
+    tock: -2,
+  }
+}
+```
+
+### Transmission
+
+A write to !!WDATA enqueues a data byte into the 32 depth write
+FIFO, which triggers the transmit module to start UART TX serial data
+transfer. The TX module dequeues the byte from the FIFO and shifts it
+bit by bit out to the UART TX pin when BAUD tick is asserted.
+
+### Reception
+
+The RX module samples the RX input pin with 16x oversampled BAUD
+clock. After it detects START bit, RX module gathers incoming serial
+bits into one byte data and pushes to 32 depth RX FIFO if it receives
+optional parity bit and correct STOP bit.  These pushed data can be read
+out by reading !!RDATA register.
+
+### Interrupts
+
+UART module has a few interrupts including general data flow interrupts
+and unexpected event interrupts.
+
+If the TX or RX FIFO hits the designated depth of entries, interrupts
+`tx_watermark` or `rx_watermark` are raised to inform FW.  FW can
+configure the watermark value via registers !!FIFO_CTRL.RXILVL or
+!!FIFO_CTRL.TXILVL .
+
+If either FIFO receives an additional write request when its FIFO is full,
+the interrupt `tx_overflow` or `rx_overflow` is asserted and the character
+is dropped.
+
+The `rx_frame_err` interrupt is triggered if RX module receives the
+`START` bit and series of data bits but did not detect `STOP` bit (`1`).
+
+```wavejson
+{
+  signal: [
+    { name: 'Baud Clock',        wave: 'p............'                                                 },
+    { name: 'rx',                wave: '10333333330..', data: [ "lsb", "", "", "", "", "", "", "msb" ] },
+    {},
+    { name: 'intr_rx_frame_err', wave: '0..........1.'},
+  ],
+  head: {
+    text: 'Serial Receive with Framing Error',
+  },
+  foot: {
+    text: [
+      'tspan',
+        ['tspan', 'start bit '],
+        ['tspan', {class:'info h4'}, '0'],
+        ['tspan', ' at cycle -1, stop bit '],
+        ['tspan', {class:'error h4'}, '1'],
+        ['tspan', ' missing at cycle 8'],
+      ],
+    tock: -2,
+  }
+}
+```
+
+The `rx_break_err` interrupt is triggered if a break condition has
+been detected. A break condition is defined as a programmable number
+of characters (via !!CTRL.RXBLVL, either 2, 4, 8, or 16) all equal to
+`0` during a frame error. This typically indicates that the UART is not
+being driven at this time.
+
+The `rx_timeout` interrupt is triggered when the RX FIFO has data sitting
+in it without software reading it for a programmable number of bit times
+(with baud rate clock as reference, programmable via !!TIMEOUT_CTRL). This
+is used to alert software that it has data still waiting in the FIFO that
+has not been handled yet. The timeout counter is reset whenever software
+reads a character from the FIFO, or if a new character is received from
+the line.
+
+The `rx_parity_err` interrupt is triggered if parity is enabled and
+the RX parity bit does not match the expected polarity as programmed
+in !!CTRL.PARITY_ODD.
+
+{{% section1 Programmers Guide }}
+
+{{% section2 Initialization }}
+
+The following code snippet shows initializing the UART to a programmable
+baud rate, clearing the RX and TX FIFO, setting up the FIFOs for interrupt
+levels, and enabling some interrupts. The NCO register controls the baud
+rate, and should be set to `(2^20*baud)/freq`, where `freq` is the fixed
+clock frequency. The UART uses `clock_primary` as a clock source.
+
+$$ NCO = {{2^{20} * f_{baud}} \over {f_{pclk}}} $$
+
+```cpp
+#define CLK_FIXED_FREQ_MHZ 48
+
+void uart_init(int baud) {
+  // set baud rate. NCO = baud * 2^20 / clock_freq =~ baud / freq_mhz
+  int setting = baud / CLK_FIXED_FREQ_MHZ;
+  *UART_CTRL_NCO_REG = setting;
+
+  // clear FIFOs and set up to interrupt on any RX, half-full TX
+  *UART_FIFO_CTRL_REG =
+      UART_FIFO_CTRL_RXRST                 | // clear both FIFOs
+      UART_FIFO_CTRL_TXRST                 |
+      (UART_FIFO_CTRL_RXILVL_RXFULL_1 <<3) | // intr on RX 1 character
+      (UART_FIFO_CTRL_TXILVL_TXFULL_16<<5) ; // intr on TX 16 character
+
+  // enable only RX, overflow, and error interrupts
+  *UART_INTR_ENABLE_REG =
+      UART_INTR_ENABLE_RX_WATERMARK_MASK  |
+      UART_INTR_ENABLE_TX_OVERFLOW_MASK   |
+      UART_INTR_ENABLE_RX_OVERFLOW_MASK   |
+      UART_INTR_ENABLE_RX_FRAME_ERR_MASK  |
+      UART_INTR_ENABLE_RX_PARITY_ERR_MASK;
+
+  // at the processor level, the UART interrupts should also be enabled
+}
+```
+
+{{% section2 Common Examples }}
+
+The following code shows the steps to transmit a string of characters.
+
+```cpp
+int uart_tx_rdy() {
+  return ((*UART_FIFO_STATUS_REG & UART_FIFO_STATUS_TXLVL_MASK) == 32) ? 0 : 1;
+}
+
+void uart_send_char(char val) {
+  while(!uart_tx_rdy()) {}
+  *UART_WDATA_REG = val;
+}
+
+void uart_send_str(char *str) {
+  while(*str != \0) {
+    uart_send_char(*str++);
+}
+```
+
+Do the following to receive a character, with -1 returned if RX is empty.
+
+```cpp
+int uart_rx_empty() {
+  return ((*UART_FIFO_STATUS_REG & UART_FIFO_STATUS_RXLVL_MASK) ==
+          (0 << UART_FIFO_STATUS_RXLVL_LSB)) ? 1 : 0;
+}
+
+char uart_rcv_char() {
+  if(uart_rx_empty())
+    return 0xff;
+  return *UART_RDATA_REG;
+}
+```
+
+{{% section2 Interrupt Handling }}
+
+The code below shows one example of how to handle all UART interrupts
+in one service routine.
+
+```cpp
+void uart_interrupt_routine() {
+  volatile uint32 intr_state = *UART_INTR_STATE_REG;
+  uint32 intr_state_mask = 0;
+  char uart_ch;
+  uint32 intr_enable_reg;
+
+  // Turn off Interrupt Enable
+  intr_enable_reg = *UART_INTR_ENABLE_REG;
+  *UART_INTR_ENABLE_REG = intr_enable_reg & 0xFFFFFF00; // Clr bits 7:0
+
+  if (intr_state & UART_INTR_STATE_RX_PARITY_ERR_MASK) {
+    // Do something ...
+
+    // Store Int mask
+    intr_state_mask |= UART_INTR_STATE_RX_PARITY_ERR_MASK;
+  }
+
+  if (intr_state & UART_INTR_STATE_RX_BREAK_ERR_MASK) {
+    // Do something ...
+
+    // Store Int mask
+    intr_state_mask |= UART_INTR_STATE_RX_BREAK_ERR_MASK;
+  }
+
+  // .. Frame Error
+
+  // TX/RX Overflow Error
+
+  // RX Int
+  if (intr_state & UART_INTR_STATE_RX_WATERMARK_MASK) {
+    while(1) {
+      uart_ch = uart_rcv_char();
+      if (uart_ch == 0xff) break;
+      uart_buf.append(uart_ch);
+    }
+    // Store Int mask
+    intr_state_mask |= UART_INTR_STATE_RX_WATERMARK_MASK;
+  }
+
+  // Clear Interrupt State
+  *UART_INTR_STATE_REG = intr_state_mask;
+
+  // Restore Interrupt Enable
+  *UART_INTR_ENABLE_REG = intr_enable_reg;
+}
+```
+
+One use of the `rx_timeout` interrupt is when the !!FIFO_CTRL.RXILVL
+is set greater than one, so an interrupt is only fired when the fifo
+is full to a certain level. If the remote device sends fewer than the
+watermark number of characters before stopping sending (for example it
+is waiting an acknowledgement) then the usual `rx_watermark` interrupt
+would not be raised. In this case an `rx_timeout` would generate an
+interrupt that allows the host to read these additional characters. The
+`rx_timeout` can be selected based on the worst latency experienced by a
+character. The worst case latency experienced by a character will happen
+if characters happen to arrive just slower than the timeout: the second
+character arrives just before the timeout for the first (resetting the
+timer), the third just before the timeout from the second etc. In this
+case the host will eventually get a watermark interrupt, this will happen
+`((RXILVL - 1)*timeout)` after the first character was received.
+
+{{% section2 Register Table }}
+
+{{% registers x }}
diff --git a/util/docgen/examples/win.hjson b/util/docgen/examples/win.hjson
new file mode 100644
index 0000000..436af39
--- /dev/null
+++ b/util/docgen/examples/win.hjson
@@ -0,0 +1,231 @@
+{
+  name: "WIND",
+  clock_primary: "clk_fixed",
+  bus_device: "tlul",
+
+  regwidth: "32",
+  registers: [
+    {name: "RDATA", desc: "UART read data",
+      swaccess: "ro", fields: [
+      {bits: "7:0", resval: "0x0"}
+    ]},
+    {name: "WDATA", desc: "UART write data", swaccess: "wo", fields: [
+      {bits: "7:0", resval: "0x0"}
+    ]},
+    {window: {
+    	     name: "win1"
+	     items: "64"
+	     swaccess: "rw"
+	     desc: '''
+	     	   A simple 256 byte window that should get aligned.
+		   It references !!RDATA and ** *bold italcs* **
+		   For testing (it also references !!WDATA) and !!NCO1.
+		   '''
+	}
+    },
+    {name: "NCO", desc: "Baud clock rate control", swaccess: "rw", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {window: {
+    	     name: "win2"
+	     items: "15"
+	     validbits: "16"
+	     byte-write: "True"
+	     noalign: "True"
+	     swaccess: "rw1c"
+	     desc: '''
+	     	   A 60 byte window that does not get aligned.
+		   Should generate warnings
+		   '''
+	}
+    },
+    {name: "NCO1", desc: "Baud clock rate control", swaccess: "rw", fields: [
+      {bits: "15:0", resval: "0b0"}
+    ]},
+    {window: {
+    	     name: "win3"
+	     items: "15"
+	     validbits: "16"
+	     byte-write: "True"
+	     unusual: "True"
+	     swaccess: "rw1c"
+	     desc: '''
+	     	   A 60 byte window that does get aligned.
+		   Marked unusual so no warnings
+		   '''
+	}
+    },
+    {name: "CTRL", desc: "UART control register", swaccess: "rw", fields: [
+      {bits: "0", name: "TX", desc: '''
+        TX enable has a really long description that will go on over
+	several lines and really want to wrap to be seen well in the
+	source format.
+	'''
+	}
+      {bits: "1", name: "RX", desc: "RX enable"}
+      {bits: "2", name: "CTS", desc: "CTS hardware flow-control enable"}
+      {bits: "3", name: "RTS", desc: "RTS hardware flow-control enable"}
+      {bits: "4", name: "SLPBK", desc: "System loopback enable"}
+      {bits: "5", name: "LLPBK", desc: "Line loopback enable"}
+      {bits: "6", name: "RCOS", desc: "Oversample enable for RX and CTS"}
+      {bits: "7", name: "NF", desc: "RX noise filter enable"}
+      {bits: "8", name: "PARITY_EN", desc: "Parity enable"}
+      {bits: "9", name: "PARITY_ODD", desc: "1 for odd parity, 0 for even."}
+    ]}
+    {window: {
+    	     name: "win4"
+	     items: "16"
+	     validbits: "16"
+	     byte-write: "True"
+	     swaccess: "rw"
+	     desc: '''
+	     	   A simple 64 byte window that should get aligned.
+		   '''
+	}
+    },
+    {window: {
+    	     name: "win5"
+	     items: "16"
+	     validbits: "16"
+	     byte-write: "True"
+	     swaccess: "rw"
+	     desc: '''
+	     	   A simple 64 byte window that should immediately follow.
+		   '''
+	}
+    },
+    {name: "ICTRL", desc: "UART Interrupt control register", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "TX", desc: "TX interrupt enable" }
+      {bits: "1", name: "RX", desc: "RX interrupt enable"}
+      {bits: "2", name: "TXO", desc: "TX overflow interrupt enable"}
+      {bits: "3", name: "RXO", desc: "RX overflow interrupt enable"}
+      {bits: "4", name: "RXF", desc: "RX frame error interrupt enable"}
+      {bits: "5", name: "RXB", desc: "RX break error interrupt enable"}
+      {bits: "7:6", name: "RXBLVL", desc: '''
+       Trigger level for rx break detection. Sets the number of character
+       times the line must be low to detect a break
+       ''',
+       enum: [
+       	       { value: "0", name: "break2", desc: "2 characters" },
+       	       { value: "1", name: "break4", desc: "4 characters" },
+       	       { value: "2", name: "break8", desc: "8 characters" },
+       	       { value: "3", name: "break16", desc: "16 characters" }
+	     ]
+      }
+      {bits: "8", name: "RXTO", desc: "RX timeout interrupt enable"}
+      {bits: "9", name: "RXPE", desc: "RX parity error interrupt enable"}
+    ]}
+    {name: "STATE", desc: "UART state register", swaccess: "ro",
+     fields: [
+      {bits: "0", name: "TX", desc: "TX buffer full" }
+      {bits: "1", name: "RX", desc: "RX buffer full"}
+      {bits: "2", name: "TXO", desc: "TX buffer overflow"}
+      {bits: "3", name: "RXO", desc: "RX buffer overflow"}
+      {bits: "4", name: "TXEMPTY", desc: "TX buffer empty"}
+      {bits: "5", name: "TXIDLE", desc: "TX idle"}
+      {bits: "6", name: "RXIDLE", desc: "RX idle"}
+      {bits: "7", name: "RXEMPTY", desc: "RX fifo empty"}
+    ]}
+    // I suspect STATECLR should be r0w1c or something
+    {name: "STATECLR", desc: "UART state register", swaccess: "rw",
+     fields: [
+      {bits: "19", name: "TXO", desc: "Clear TX buffer overflow"}
+      {bits: "20", name: "RXO", desc: "Clear RX buffer overflow"}
+    ]}
+    {name: "ISTATE", desc: "UART Interrupt state register", swaccess: "ro",
+     fields: [
+      {bits: "0", name: "TX", desc: "TX interrupt state" }
+      {bits: "1", name: "RX", desc: "RX interrupt state"}
+      {bits: "2", name: "TXO", desc: "TX overflow interrupt state"}
+      {bits: "3", name: "RXO", desc: "RX overflow interrupt state"}
+      {bits: "4", name: "RXF", desc: "RX frame error interrupt state"}
+      {bits: "5", name: "RXB", desc: "RX break error interrupt state"}
+      {bits: "6", name: "RXTO", desc: "RX timeout interrupt state"}
+      {bits: "7", name: "RXPE", desc: "RX parity error interrupt state"}
+    ]}
+    {name: "ISTATECLR", desc: "UART Interrupt clear register",
+     swaccess: "r0w1c",
+     fields: [
+      {bits: "0", name: "TX", desc: "Clear TX interrupt" }
+      {bits: "1", name: "RX", desc: "Clear RX interrupt"}
+      {bits: "2", name: "TXO", desc: "Clear TX overflow interrupt"}
+      {bits: "3", name: "RXO", desc: "Clear RX overflow interrupt"}
+      {bits: "4", name: "RXF", desc: "Clear RX frame error interrupt"}
+      {bits: "5", name: "RXB", desc: "Clear RX break error interrupt"}
+      {bits: "6", name: "RXTO", desc: "Clear RX timeout interrupt"}
+      {bits: "7", name: "RXPE", desc: "Clear RX parity error interrupt"}
+    ]}
+    {name: "FIFO", desc: "UART FIFO control register", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "RXRST", swaccess: "r0w1c", desc: "RX fifo reset" }
+      {bits: "1", name: "TXRST", swaccess: "r0w1c", desc: "TX fifo reset" }
+      {bits: "4:2", name: "RXILVL",
+       desc: "Trigger level for RX interrupts"
+       enum: [
+       	       { value: "0", name: "rxlvl1", desc: "1 character" },
+       	       { value: "1", name: "rxlvl4", desc: "4 characters" },
+       	       { value: "2", name: "rxlvl8", desc: "8 characters" },
+       	       { value: "3", name: "rxlvl16", desc: "16 characters" }
+       	       { value: "4", name: "rxlvl30", desc: "30 characters" }
+	       // TODO expect generator to make others reserved
+	     ]
+      }
+      {bits: "6:5", name: "TXILVL",
+       desc: "Trigger level for TX interrupts"
+       enum: [
+       	       { value: "0", name: "txlvl1", desc: "1 character" },
+       	       { value: "1", name: "txlvl4", desc: "4 characters" },
+       	       { value: "2", name: "txlvl8", desc: "8 characters" },
+       	       { value: "3", name: "txlvl16", desc: "16 characters" }
+	     ]
+      }
+    ]}
+    {name: "RFIFO", desc: "UART FIFO status register", swaccess: "ro",
+     fields: [
+      {bits: "5:0", name: "TXLVL", desc: "Current fill level of TX fifo" }
+      {bits: "11:6", name: "RXLVL", desc: "Current fill level of RX fifo" }
+    ]}
+    {name: "OVRD", desc: "UART override control register", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "TXEN", desc: "Override the TX signal" }
+      {bits: "1", name: "TXVAL", desc: "Value for TX Override" }
+      {bits: "2", name: "RTSEN", desc: "Override the RTS signal" }
+      {bits: "3", name: "RTSVAL", desc: "Value for RTS Override" }
+    ]}
+    {name: "VAL", desc: "UART oversampled values", swaccess: "ro",
+     fields: [
+      {bits: "15:0", name: "RX", desc: '''
+       Last 16 oversampled values of RX. Most recent bit is bit 0, oldest 15.
+      ''' }
+      {bits: "31:16", name: "CTS", desc: '''
+       Last 16 oversampled values of CTS. Most recent bit is bit 16, oldest 31.
+      ''' }
+    ]}
+    {name: "RXTO", desc: "UART RX timeout control", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "EN", desc: "Enable RX timeout feature" }
+      {bits: "24:1", name: "VAL", desc: "RX timeout value in UART bit times" }
+    ]}
+    { skipto: "0x0f00" }
+    {name: "ITCR", desc: "UART Integration test control", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "", desc: "-" }
+    ]}
+    {name: "ITOP", desc: "UART Integration test overrides", swaccess: "rw",
+     fields: [
+      {bits: "0", name: "TX", desc: "Drive txint when UART_ITCR asserted" }
+      {bits: "1", name: "RX", desc: "Drive rxint when UART_ITCR asserted" }
+      {bits: "2", name: "TXO", desc: "Drive txoint when UART_ITCR asserted" }
+      {bits: "3", name: "RXO", desc: "Drive rxoint when UART_ITCR asserted" }
+      {bits: "4", name: "RXF", desc: "Drive rxfint when UART_ITCR asserted" }
+      {bits: "5", name: "RXB", desc: "Drive rxbint when UART_ITCR asserted" }
+      {bits: "6", name: "RXTO", desc: "Drive rxtoint when UART_ITCR asserted" }
+      {bits: "7", name: "RXPE", desc: "Drive rxpeint when UART_ITCR asserted" }
+    ]}
+    {name: "DVREG", desc: "DV-accessible test register", swaccess: "rw",
+     fields: [
+      {bits: "7:0", name: "", desc: "-" }
+    ]}
+  ]
+}
diff --git a/util/docgen/generate.py b/util/docgen/generate.py
new file mode 100644
index 0000000..05f2de7
--- /dev/null
+++ b/util/docgen/generate.py
@@ -0,0 +1,97 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import logging
+import os
+import shutil
+import sys
+
+import mistletoe
+from pkg_resources import resource_filename
+
+from . import html_data, lowrisc_renderer
+
+
+def generate_doc(src_path, verbose, inlinecss, inlinewave, asdiv):
+    """Generate Document for other library to use
+    """
+
+    if src_path == '-':
+        infile = sys.stdin
+    else:
+        if os.path.isfile(src_path):
+            infile = open(src_path, 'r', encoding='UTF-8')
+        else:
+            logging.error("Source is not a file: %s", src_path)
+            return ""
+
+    if (asdiv):
+        outstr = html_data.header_asdiv
+        # no body to add the onload to, so must inline waveforms
+        inlinewave = True
+    elif (inlinewave):
+        outstr = html_data.header_waveinline
+    else:
+        outstr = html_data.header_wavejs
+
+    if (asdiv):
+        logging.info("asdiv: no CSS included")
+    elif (inlinecss):
+        outstr += "<style type='text/css'>"
+        with open(
+                resource_filename('docgen', 'md_html.css'), 'r',
+                encoding='UTF-8') as fin:
+            outstr += fin.read()
+        with open(
+                resource_filename('reggen', 'reg_html.css'), 'r',
+                encoding='UTF-8') as fin:
+            outstr += fin.read()
+        outstr += "</style>"
+    else:
+        outstr += '<link rel="stylesheet" type="text/css" href="md_html.css">'
+        outstr += '<link rel="stylesheet" type="text/css" href="reg_html.css">'
+
+    outstr += html_data.markdown_header
+
+    # lowrisc_renderer.Document rather than mistletoe.Document to get includes
+    with infile:
+        with lowrisc_renderer.LowriscRenderer(
+                srcfile=src_path, wavejs=not inlinewave) as renderer:
+            document = lowrisc_renderer.Document(infile, src_path)
+            rendered = renderer.render(document)
+            tocpos = rendered.find(html_data.toc_mark_head)
+            toc = renderer.toc
+            if tocpos < 0 or len(toc) == 0:
+                outstr += rendered
+            else:
+                tocp = tocpos + len(html_data.toc_mark_head)
+                toci = tocp
+                while rendered[tocp] != '-':
+                    tocp += 1
+                maxlvl = int(rendered[toci:tocp])
+                outstr += rendered[:tocpos]
+                outstr += html_data.toc_title
+                outstr += '<ul>\n'
+                lvl = 2
+                for x in toc:
+                    # don't expect H1, collapse to H2 if it is there
+                    wantlvl = x[0] if x[0] > 1 else 2
+                    if (wantlvl > maxlvl):
+                        continue
+                    while lvl < wantlvl:
+                        outstr += '<ul>\n'
+                        lvl += 1
+                    while lvl > wantlvl:
+                        outstr += '</ul>\n'
+                        lvl -= 1
+                    outstr += '<li><a href=#' + x[2] + '>' + x[1] + '</a>\n'
+                while lvl > 1:
+                    outstr += '</ul>\n'
+                    lvl -= 1
+                outstr += rendered[tocpos:]
+
+    outstr += html_data.markdown_trailer
+    outstr += html_data.trailer_asdiv if asdiv else html_data.trailer
+
+    return outstr
diff --git a/util/docgen/hjson_lexer.py b/util/docgen/hjson_lexer.py
new file mode 100644
index 0000000..23e64c1
--- /dev/null
+++ b/util/docgen/hjson_lexer.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+"""
+     Hjson Lexer for pygments
+     ~~~~~~~~~~~~~~~~~~~~~~~~
+
+     Derived from JsonLexer in pygments.lexers.data
+     which is
+     :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
+     :license: BSD, see pygments LICENSE for details.
+
+     Modifications copyright lowRisc contributors
+"""
+
+import re
+
+from pygments.lexer import RegexLexer, include
+from pygments.token import (Comment, Error, Keyword, Literal, Name, Number,
+                            Punctuation, String, Text)
+
+
+class HjsonLexer(RegexLexer):
+    """
+     For HJSON data structures.
+
+     .. versionadded:: 1.5
+     """
+
+    name = 'HJSON'
+    aliases = ['hjson']
+    filenames = ['*.hjson']
+    mimetypes = ['application/hjson']
+
+    flags = re.DOTALL
+
+    # integer part of a number
+    int_part = r'-?(0|[1-9]\d*)'
+
+    # fractional part of a number
+    frac_part = r'\.\d+'
+
+    # exponential part of a number
+    exp_part = r'[eE](\+|-)?\d+'
+
+    tokens = {
+        'whitespace': [
+            (r'\s+', Text),
+        ],
+
+        # represents a simple terminal value
+        'simplevalue': [
+            (r'(true|false|null)\b', Keyword.Constant),
+            (('%(int_part)s(%(frac_part)s%(exp_part)s|'
+              '%(exp_part)s|%(frac_part)s)') % vars(), Number.Float),
+            (int_part, Number.Integer),
+            (r'"(\\\\|\\"|[^"])*"', String.Double),
+        ],
+
+        # the right hand side of an object, after the attribute name
+        'objectattribute': [
+            include('value'),
+            (r':', Punctuation),
+            # triple quote is a multiline string
+            # accept any non-quote, single quote plus non-quote
+            # two quotes plus non-quote to cover all cases
+            (r"'''([^']|'[^']|''[^'])*'''", Text),
+            # comma terminates the attribute but expects more
+            (r',', Punctuation, '#pop'),
+            # a closing bracket terminates the entire object, so pop twice
+            (r'\}', Punctuation, '#pop:2'),
+            # comma is optional in hjson so terminate on anything else
+            # but use re syntax so this match does not consume it
+            # This is should really only be done if a value or string matched
+            (r'(?=.)', Text, '#pop'),
+        ],
+
+        # a json object - { attr, attr, ... }
+        'objectvalue': [
+            include('whitespace'),
+            # a comment
+            (r'#[^\n]*', Comment.Single),
+            (r'//[^\n]*', Comment.Single),
+            (r'"(\\\\|\\"|[^"])*"|(\\\\|[^:])*', Name.Tag, 'objectattribute'),
+            (r'\}', Punctuation, '#pop'),
+        ],
+
+        # json array - [ value, value, ... }
+        'arrayvalue': [
+            include('whitespace'),
+            (r'#[^\n]*', Comment.Single),
+            (r'//[^\n]*', Comment.Single),
+            include('value'),
+            (r',', Punctuation),
+            (r'\]', Punctuation, '#pop'),
+        ],
+
+        # a json value - either a simple value or a complex value
+        # (object or array)
+        'value': [
+            include('whitespace'),
+            (r'#[^\n]*', Comment.Single),
+            (r'//[^\n]*', Comment.Single),
+            include('simplevalue'),
+            (r'\{', Punctuation, 'objectvalue'),
+            (r'\[', Punctuation, 'arrayvalue'),
+        ],
+
+        # the root of a json document whould be a value
+        'root': [
+            (r'#[^\n]*', Comment.Single),
+            (r'//[^\n]*', Comment.Single),
+            include('value'),
+            # hjson does not require the outer {}
+            # this is also helpful for styleguide examples!
+            include('objectvalue'),
+        ],
+    }
diff --git a/util/docgen/html_data.py b/util/docgen/html_data.py
new file mode 100644
index 0000000..767730e
--- /dev/null
+++ b/util/docgen/html_data.py
@@ -0,0 +1,88 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+header_wavejs = """
+<html>
+<head>
+<meta charset="UTF-8">
+<script src="https://cdnjs.cloudflare.com/ajax/libs/wavedrom/2.1.2/skins/default.js"
+        type="text/javascript"></script>
+<script src="https://cdnjs.cloudflare.com/ajax/libs/wavedrom/2.1.2/wavedrom.min.js"
+       type="text/javascript"></script>
+</head>
+<body onload="WaveDrom.ProcessAll()">
+"""
+
+header_waveinline = """
+<html>
+<head>
+<meta charset="UTF-8">
+</head>
+"""
+
+header_asdiv = """
+<div>
+"""
+
+markdown_header = """
+<div class="mdown">
+"""
+
+markdown_trailer = """
+</div>
+"""
+
+register_header = """
+<div>
+"""
+
+register_trailer = """
+</div>
+"""
+
+hwcfg_header = """
+<div class="mdown">
+"""
+
+hwcfg_trailer = """
+</div>
+"""
+
+trailer = """
+</body>
+</html>
+"""
+
+trailer_asdiv = """
+</div>
+"""
+
+lowrisc_title_head = """
+<table class="section_heading">
+<tr><td>lowRISC Comportable IP Document</td></tr>
+<tr><td>
+"""
+
+lowrisc_title_tail = """
+</td></tr>
+<tr><td>&copy; lowrisc.org Contributors</td></tr></table>
+"""
+
+section_template = """
+<table class="{cls}" id="{id}">
+<tr><td>{inner}</td></tr></table>
+"""
+
+doctree_head = "<ul>"
+doctree_template = """
+<li> <a href="{link}">{text}</a> </li>
+"""
+doctree_tail = "</ul>"
+
+toc_title = """
+<h2>Table of Contents</h2>
+"""
+
+toc_mark_head = "<!--TOC "
+toc_mark_tail = "-->\n"
diff --git a/util/docgen/lowrisc_renderer.py b/util/docgen/lowrisc_renderer.py
new file mode 100644
index 0000000..7fc410b
--- /dev/null
+++ b/util/docgen/lowrisc_renderer.py
@@ -0,0 +1,405 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Provides lowRISC extension support for rendering Markdown to html.
+{{% }} directives
+!!Reg !!Reg.Field to generate cross reference to registers
+Syntax highlighting with pygments
+Conversion of WaveJSON timing diagrams
+Adapted from examples in mistletoe.contrib
+<https://github.com/miyuchina/mistletoe/blob/master/contrib/>
+"""
+
+import io
+import logging as log
+import os.path as path
+import re
+import subprocess
+import sys
+from itertools import chain
+from os import walk
+from pathlib import Path
+from urllib.parse import urlparse, urlunparse
+
+import hjson
+import mistletoe.block_token
+import mistletoe.span_token
+from mistletoe import HTMLRenderer
+from mistletoe.block_token import BlockToken, CodeFence, add_token, tokenize
+from mistletoe.span_token import EscapeSequence, RawText, SpanToken
+from pkg_resources import resource_filename
+from pygments import highlight
+from pygments.formatters.html import HtmlFormatter
+from pygments.lexers import get_lexer_by_name as get_lexer
+from pygments.lexers import guess_lexer
+from pygments.styles import get_style_by_name as get_style
+
+import reggen.gen_cfg_html as gen_cfg_html
+import reggen.gen_html as gen_html
+import reggen.validate as validate
+from docgen import html_data, mathjax
+from docgen.hjson_lexer import HjsonLexer
+from wavegen import wavesvg
+
+
+# mirrors Document but adds includes
+# have to pull all the sub-files in to the main text so cross-links work
+# By default anchor links only resolve within a single file
+# arguably this is correct isolation but we want to be able to include anchors
+class Document(BlockToken):
+    """
+    Document token with includes.
+    """
+
+    # Called when the include directive starts with a !
+    # to indicate execute the first word as a command with rest as opts
+    # To help avoid mistakes (and mimimally help avoid attacks in the case
+    # of a trusted docgen given untrusted input files) the command must
+    # live inside the repo (the example uses a local ls script to
+    # run a command from outside, but the script was reviewed and checkedin)
+    def exec_include(self, include_text, basedir):
+        expand = include_text.split(maxsplit=1)
+        cmd = expand[0]
+        opts = '' if len(expand) < 2 else expand[1]
+        abscmd = path.abspath(path.join(basedir, cmd))
+        if not abscmd.startswith(self.treetop):
+            log.error("Blocked include: " + cmd + ' (' + abscmd +
+                      ") is outside the repo.")
+            raise NameError('Command file must be in the repo')
+        # do the cd in the subprocess to avoid save/restore of cwd
+        res = subprocess.run(
+            'cd ' + basedir + '; ' + abscmd + ' ' + opts,
+            shell=True,
+            universal_newlines=True,
+            stdout=subprocess.PIPE).stdout
+        return res.splitlines(keepends=True)
+
+    def add_include(self, l, pat, basedir):
+        lines = []
+        for line in l:
+            match = pat.search(line)
+            # because this is pre-processed a sepcial case is needed to
+            # allow documentation with include command inside back-ticks
+            if (match and not (match.start() > 0 and
+                               line[match.start() - 1] == '`')):
+                lines.append(line[:match.start()] + line[match.end():])
+                if match.group(1)[0] == "!":
+                    try:
+                        res = self.exec_include(match.group(1)[1:], basedir)
+                        lines.extend(self.add_include(res, pat, basedir))
+                    except NameError:
+                        lines.append("Blocked execution of " + match.group(1))
+                else:
+                    incfname = path.join(basedir, match.group(1))
+                    try:
+                        incfile = open(incfname, 'r', encoding='UTF-8')
+                        with incfile:
+                            newdir = path.dirname(incfname)
+                            lines.extend(
+                                self.add_include(incfile, pat, newdir))
+                    except OSError as err:
+                        log.error("Could not open include file: " + str(err))
+                        lines.append("Failed to include " + incfname + "\n\n")
+            else:
+                lines.append(line)
+        return lines
+
+    def __init__(self, lines, srcfile):
+        docdir = path.dirname(resource_filename('docgen', 'md_html.css'))
+        self.treetop = path.abspath(path.join(docdir, "../.."))
+        pat = re.compile(r"\{\{\% *include +(.+?) *\}\}")
+        basedir = ""
+        if len(srcfile) > 0:
+            basedir = path.dirname(srcfile)
+        if basedir == '':
+            basedir = '.'
+        if isinstance(lines, str):
+            lines = lines.splitlines(keepends=True)
+
+        lines = self.add_include(lines, pat, basedir)
+        self.footnotes = {}
+        mistletoe.block_token._root_node = self
+        mistletoe.span_token._root_node = self
+        self.children = tokenize(lines)
+        mistletoe.span_token._root_node = None
+        mistletoe.block_token._root_node = None
+
+
+# mirrors the CodeFence in mistletoe but with additional parameter
+# note this maintains the bug with `~` matching the RE
+class CodeFenceDirective(CodeFence):
+    """
+    Code fence with language and directive
+
+    Supports code blocks starting
+    ```language {directive}
+    Up to 3 spaces indentation, minimum of 3 fence characters,
+    optional spaces, language text, optional spaces, open {,
+    optional spaces, directive text, optional spaces, close }
+    at the moment there cannot be spaces inside language or directive
+    """
+    # future may want something like \{ *([^\}]*\} for multiple directives
+    pattern = re.compile(r'( {0,3})((?:`|~){3,}) *(\S+) *\{ *(\S*) *\}')
+    _open_info = None
+
+    def __init__(self, match):
+        lines, open_info = match
+        self.language = EscapeSequence.strip(open_info[2])
+        self.directive = EscapeSequence.strip(open_info[3])
+        self.children = (RawText(''.join(lines)), )
+
+    @classmethod
+    def start(cls, line):
+        match_obj = cls.pattern.match(line)
+        if not match_obj:
+            return False
+        prepend, leader, lang, direct = match_obj.groups()
+        if (leader[0] in lang or leader[0] in direct or
+                leader[0] in line[match_obj.end():]):
+            return False
+        cls._open_info = len(prepend), leader, lang, direct
+        return True
+
+
+class LowriscEscape(SpanToken):
+    pattern = re.compile(r"\{\{\% *(.+?) +(.+?) *\}\}")
+
+    def __init__(self, match):
+        self.type = match.group(1)
+        self.text = match.group(2)
+
+
+class RegRef(SpanToken):
+    pattern = re.compile(r"!!([A-Za-z0-9_.]+)")
+
+    def __init__(self, match):
+        self.rname = match.group(1)
+
+
+class LowriscRenderer(mathjax.MathJaxRenderer):
+    formatter = HtmlFormatter()
+    formatter.noclasses = True
+
+    def __init__(self, *extras, style='default', srcfile='', wavejs=False):
+        # yapf requests different formatting for this code block depending on
+        # the Python3 version. Work around that by disabling yapf for this code
+        # block.
+        # Bug: https://github.com/google/yapf/issues/696
+        # yapf: disable
+        super().__init__(*chain((LowriscEscape, RegRef,
+                                 CodeFenceDirective), extras))
+        # yapf: enable
+        self.formatter.style = get_style(style)
+        self.regs = None
+        self.wavejs = wavejs
+        self.num_svg = 0
+        # compute base of srcfile to allow relative imports
+        basedir = ""
+        if len(srcfile) > 0:
+            basedir = path.dirname(srcfile)
+        self.basedir = basedir
+        self.toc = []
+
+    # Convert the inner text of header or section into id for html href
+    # inner is a flat string but may have html tags
+    # html id rules are:
+    #    Must contain at least one character
+    #    Must not contain any space characters
+    # Want to match github, can't find its exact rules
+    # The id is derived from the heading text by stripping html tags,
+    # changing whitespace to - and lower-casing.
+    # e.g. 'Theory of operation' becomes 'theory-of-operation
+    # TODO worry about & eg 'Foo & Bar' becomes 'foo-&-bar'
+    def id_from_inner(self, inner):
+        return re.sub(r'\s+', '-', re.sub(r'<.+?>', '', inner)).lower()
+
+    def render_lowrisc_code(self, token, directive):
+        code = token.children[0].content
+        # parser seems to get confused (eg by `~`) and makes empty calls
+        if len(code) == 0:
+            log.warn('Unexpected empty code block. Check for `~`')
+            return ""
+        # waveforms look like embedded code in the markdown
+        # but the WaveDrom javascript wants it in a script tag
+        if token.language == "wavejson":
+            if self.wavejs:
+                return '<script type="WaveDrom">' + code + '</script>'
+            else:
+                try:
+                    wvobj = hjson.loads(code, use_decimal=True)
+                except ValueError as err:
+                    log.warn('wavejson parse failed at line ' +
+                             str(err.lineno) + ': ' + err.msg)
+                    return '<pre>Error line '  + str(err.lineno) + \
+                        ': ' + err.msg + " in:\n" + code[:err.pos] + \
+                        '</pre><pre style="color:red">' + \
+                        code[err.pos:] + '</pre>'
+                self.num_svg += 1
+                return wavesvg.convert(wvobj, self.num_svg - 1)
+        else:
+            # pygments.util.ClassNotFound subclass of ValueError
+            lexer = None
+            if (token.language):
+                if token.language == 'hjson':
+                    lexer = HjsonLexer()
+                else:
+                    try:
+                        lexer = get_lexer(token.language)
+                    except ValueError:
+                        log.info('Failed to get lexer for language=' +
+                                 token.language)
+                        lexer = None
+            if lexer == None:
+                try:
+                    lexer = guess_lexer(code)
+                    log.info('Guess lexer as ' + lexer.name)
+                except ValueError:
+                    log.info('Failed to guess lexer for code=' + code)
+                    lexer = None
+            if lexer:
+                if directive == '.good':
+                    self.formatter.cssstyles='background:#e0ffe0; ' \
+                        'border-left-color: #108040;'
+                elif directive == '.bad':
+                    self.formatter.cssstyles='background:#ffe0e0; ' \
+                        'border-left-color: #c04030'
+                else:
+                    self.formatter.cssstyles = ''
+
+                return highlight(code, lexer, self.formatter)
+            else:
+                return super().render_block_code(token)
+
+    def render_code_fence_directive(self, token):
+        return self.render_lowrisc_code(token, token.directive)
+
+    def render_block_code(self, token):
+        return self.render_lowrisc_code(token, '')
+
+    def render_lowrisc_escape(self, token):
+        # plan eventually to allow lowrisc-doc-hdr=doctype
+        if token.type[:15] == "lowrisc-doc-hdr":
+            return html_data.lowrisc_title_head + token.text + \
+                   html_data.lowrisc_title_tail
+        if token.type == "toc":
+            return html_data.toc_mark_head + token.text + \
+                   html_data.toc_mark_tail
+        if token.type == "regfile":
+            regfile = open(
+                path.join(self.basedir, token.text), 'r', encoding='UTF-8')
+            with regfile:
+                try:
+                    obj = hjson.load(
+                        regfile,
+                        use_decimal=True,
+                        object_pairs_hook=validate.checking_dict)
+                except ValueError:
+                    raise SystemExit(sys.exc_info()[1])
+            if validate.validate(obj) == 0:
+                log.info("Generated register object\n")
+                self.regs = obj
+            else:
+                log.warn("Register import failed\n")
+                self.regs = None
+            return ""
+        if token.type == "registers":
+            if self.regs == None:
+                return "<B>Errors parsing registers prevents insertion.</B>"
+            outbuf = io.StringIO()
+            # note for CSS need to escape the mdown class on the div
+            outbuf.write("</div>" + html_data.register_header)
+            gen_html.gen_html(self.regs, outbuf, toclist=self.toc, toclevel=3)
+            outbuf.write(html_data.register_trailer + '<div class="mdown">')
+            generated = outbuf.getvalue()
+            outbuf.close()
+            return generated
+        if token.type == "cfgfile":
+            log.error("Deprecated lowRISC token cfgfile ignored. Config is now"\
+                      " in a single file with the registers!")
+            return ""
+        if token.type == "hwcfg":
+            if self.regs == None:
+                return "<B>Errors parsing configuration prevents insertion.</B>"
+            outbuf = io.StringIO()
+            # note for CSS need to escape the mdown class on the div
+            outbuf.write("</div>" + html_data.hwcfg_header)
+            gen_cfg_html.gen_cfg_html(self.regs, outbuf)
+            outbuf.write(html_data.hwcfg_trailer + '<div class="mdown">')
+            generated = outbuf.getvalue()
+            outbuf.close()
+            return generated
+        if token.type == "section1":
+            # TODO should token.text get parsed to allow markdown in it?
+            id = self.id_from_inner(token.text)
+            self.toc.append((2, token.text, id))
+            return html_data.section_template.format(
+                cls="section_heading", id=id, inner=token.text)
+        if token.type == "section2":
+            # TODO should token.text get parsed to allow markdown in it?
+            id = self.id_from_inner(token.text)
+            self.toc.append((3, token.text, id))
+            return html_data.section_template.format(
+                cls="subsection_heading", id=id, inner=token.text)
+        if token.type == "doctree":
+            md_paths = []
+            return_string = ''
+            subdirs = [path.join(self.basedir, s) for s in token.text.split()]
+            for subdir in sorted(subdirs):
+                md_paths.extend(sorted(Path(subdir).rglob('*.md')))
+            for md_path in md_paths:
+                rel_md_path = md_path.relative_to(self.basedir)
+                return_string += html_data.doctree_template.format(
+                    link=rel_md_path.with_suffix('.html'),
+                    text=rel_md_path.with_suffix(''))
+            return html_data.doctree_head + return_string + html_data.doctree_tail
+
+        bad_tag = '{{% ' + token.type + ' ' + token.text + ' }}'
+        log.warn("Unknown lowRISC tag " + bad_tag)
+        return bad_tag
+
+    def render_reg_ref(self, token):
+        if self.regs == None:
+            log.warn("!!" + token.rname + ": no register import was done.")
+            return '!!' + token.rname
+        cname = self.regs['name']
+        base = token.rname.partition('.')[0].lower()
+        if not base in self.regs['genrnames']:
+            log.warn("!!" + token.rname + " not found in register list.")
+            return '!!' + token.rname
+
+        if token.rname[-1] == ".":
+            return '<a href="#Reg_' + base + '"><code class=\"reg\">' + \
+                cname + "." + token.rname[:-1] + '</code></a>.'
+        else:
+            return '<a href="#Reg_' + base + '"><code class=\"reg\">' + \
+                cname + "." + token.rname + '</code></a>'
+
+    # copied from mistletoe/html_renderer.py and id added
+    # override heading to insert reference for anchor
+    def render_heading(self, token):
+        template = '<h{level} id="{id}">{inner}</h{level}>'
+        inner = self.render_inner(token)
+        id = self.id_from_inner(inner)
+        self.toc.append((token.level, inner, id))
+        return template.format(level=token.level, inner=inner, id=id)
+
+    # decorator for link rendering functions in class HTMLRenderer
+    # converts relative .md link targets to .html link targets
+    def _convert_local_links(func):
+        def _wrapper_convert_local_links(*args, **kwargs):
+            target_url = urlparse(args[1].target)
+            target_path = Path(target_url.path)
+            # check link is not absolute
+            if not target_url.netloc and target_path.suffix in ['.md', '.mkd']:
+                target_url = target_url._replace(
+                    path=str(target_path.with_suffix('.html')))
+                args[1].target = urlunparse(target_url)
+
+            return func(*args, **kwargs)
+
+        return _wrapper_convert_local_links
+
+    # apply to the link rendering functions inherited from HTMLRenderer
+    render_link = _convert_local_links(HTMLRenderer.render_link)
+    render_auto_link = _convert_local_links(HTMLRenderer.render_auto_link)
diff --git a/util/docgen/mathjax.py b/util/docgen/mathjax.py
new file mode 100644
index 0000000..ca3eb8e
--- /dev/null
+++ b/util/docgen/mathjax.py
@@ -0,0 +1,34 @@
+# Copied from mistletoe.contrib
+# <https://github.com/miyuchina/mistletoe/blob/master/contrib/mathjax.py>
+#
+# mistletoe is licenced under the MIT license see LICENSE.mistletoe
+#
+"""
+Provides MathJax support for rendering Markdown with LaTeX to html.
+Taken from mistletoe.contrib <https://github.com/miyuchina/mistletoe/blob/master/contrib/mathjax.py>
+"""
+
+from mistletoe.html_renderer import HTMLRenderer
+from mistletoe.latex_renderer import LaTeXRenderer
+
+
+class MathJaxRenderer(HTMLRenderer, LaTeXRenderer):
+    """
+    MRO will first look for render functions under HTMLRenderer,
+    then LaTeXRenderer.
+    """
+    mathjax_src = '<script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.0/MathJax.js?config=TeX-MML-AM_CHTML"></script>\n'
+
+    def render_math(self, token):
+        """
+        Ensure Math tokens are all enclosed in two dollar signs.
+        """
+        if token.content.startswith('$$'):
+            return self.render_raw_text(token)
+        return '${}$'.format(self.render_raw_text(token))
+
+    def render_document(self, token):
+        """
+        Append CDN link for MathJax to the end of <body>.
+        """
+        return super().render_document(token) + self.mathjax_src
diff --git a/util/docgen/md_html.css b/util/docgen/md_html.css
new file mode 100644
index 0000000..4a5e179
--- /dev/null
+++ b/util/docgen/md_html.css
@@ -0,0 +1,125 @@
+/* Stylesheet for mistletoe output in class=mdown div */
+/* Copyright lowRISC contributors. */
+/* Licensed under the Apache License, Version 2.0, see LICENSE for details.*/
+/* SPDX-License-Identifier: Apache-2.0 */
+
+.mdown {
+  -ms-text-size-adjust: 100%;
+  -webkit-text-size-adjust: 100%;
+  color: #24292e;
+  font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
+  font-size: 16px;
+  word-wrap: break-word;
+  width: 80%;
+  margin-left:auto; 
+  margin-right:auto;
+}
+
+.mdown code,
+.mdown pre {
+  font-family: monospace, monospace;
+  font-size: 1em;
+  display: inline;
+  max-width: auto;
+  padding: 0;
+  margin: 0;
+  overflow: visible;
+  line-height: inherit;
+  word-wrap: normal;
+  background-color: transparent;
+  border: 0;
+}
+
+.mdown table {
+  border-spacing: 0;
+  border-collapse: collapse;
+  display: block;
+  width: 100%;
+  overflow: auto;
+}
+
+.mdown table th {
+  font-weight: 600;
+}
+
+.mdown table th,
+.mdown table td {
+  padding: 6px 13px;
+  border: 1px solid black;
+}
+
+.mdown table tr {
+  background-color: #fff;
+  border-top: 1px solid #c6cbd1;
+}
+
+.mdown table tr:nth-child(even) {
+  background-color: lightgray;
+}
+
+p.titleright {
+  font-size: 1em;
+  text-align: right;
+  font-weight: 600;
+}
+
+h1 {
+    text-align: center;
+}
+p.copy {
+    font-weight: 100;
+    font-size: 1em;
+    text-align: left;
+}
+
+code.reg {
+    font-family: monospace, monospace;
+    font-size: 0.8em;
+    color: blue;
+}
+
+.highlight {
+    border-left: 2px solid;
+    font-size: 80%;
+    padding: 12px 8px;
+}
+
+table.section_heading {
+    border: 2px solid black;
+    width: 100%;
+    font-size: 140%;
+    background-color:#ffe8e8;
+    text-align:center;
+    vertical-align:middle;
+    font-family: serif;
+    display: table;
+}
+
+table.section_heading tr,
+table.section_heading td {
+    border: 1px solid black;
+    width: 100%;
+    background-color:#ffe8e8;
+    border: 0px;
+    float: center;
+}
+
+table.subsection_heading {
+    border: 2px solid black;
+    width: 100%;
+    font-size: 110%;
+    background-color:white;
+    text-align:center;
+    vertical-align:middle;
+    font-family: serif;
+    display: table;
+}
+
+table.subsection_heading tr,
+table.subsection_heading td {
+    border: 1px solid black;
+    width: 100%;
+    background-color:white;
+    border: 0px;
+    float: center;
+}
diff --git a/util/example/tlgen/dv/device_sram.sv b/util/example/tlgen/dv/device_sram.sv
new file mode 100644
index 0000000..3f0037a
--- /dev/null
+++ b/util/example/tlgen/dv/device_sram.sv
@@ -0,0 +1,55 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+
+module device_sram (
+  input clk_i,
+
+  input  tlul_pkg::tl_h2d_t tl_i,
+  output tlul_pkg::tl_d2h_t tl_o
+);
+
+  import tlul_pkg::*;
+
+  logic [31:0] storage [*];
+
+  initial begin
+    tl_o.a_ready = 1'b1;
+    tl_o.d_valid = 1'b0;
+
+    forever begin
+      @(posedge clk_i iff (tl_i.a_valid == 1'b1));
+      tl_o.d_error = 1'b0;
+      tl_o.d_source = tl_i.a_source;
+      tl_o.d_size = tl_i.a_size;
+      tl_o.d_sink = 0;
+      tl_o.d_param = 2'h0;
+      tl_o.d_user = 'h0;
+      if (tl_i.a_opcode == Get) begin
+        if (storage.exists(tl_i.a_address[31:2])) begin
+          tl_o.d_data = storage[tl_i.a_address[31:2]];
+        end else begin
+          tl_o.d_data = $urandom();
+          storage[tl_i.a_address[31:2]] = tl_o.d_data;
+        end
+
+        tl_o.a_ready = 1'b0;
+        tl_o.d_opcode = AccessAckData;
+        tl_o.d_valid = 1'b1;
+        @(posedge clk_i iff (tl_i.d_ready == 1'b1));
+        tl_o.d_valid = 1'b0;
+        tl_o.a_ready = 1'b1;
+      end else if (tl_i.a_opcode == PutFullData) begin
+        storage[tl_i.a_address[31:2]] = tl_i.a_data;
+        tl_o.a_ready = 1'b0;
+        tl_o.d_opcode = AccessAck;
+        tl_o.d_valid = 1'b1;
+        @(posedge clk_i iff (tl_i.d_ready == 1'b1));
+        tl_o.d_valid = 1'b0;
+        tl_o.a_ready = 1'b1;
+      end
+    end
+  end
+
+endmodule
diff --git a/util/example/tlgen/top_jasmine.hjson b/util/example/tlgen/top_jasmine.hjson
new file mode 100644
index 0000000..200022f
--- /dev/null
+++ b/util/example/tlgen/top_jasmine.hjson
@@ -0,0 +1,76 @@
+{ name: "main",
+  clock: "main", // Main clock, used in sockets
+  nodes: [
+    { name:  "ibexif",
+      type:  "host",
+      clock: "main",
+    },
+    { name:  "ibexlsu",
+      type:  "host",
+      clock: "periph",
+    },
+    { name:  "dm_sba", // DM
+      type:  "host",
+      clock: "jtag",
+    },
+    { name:      "rom",
+      type:      "device",
+      clock:     "main",
+      base_addr: "0x00000000",
+      size_byte: "0x10000",   // 64kB
+    },
+    { name: "debug_rom", type: "device", clock: "jtag", base_addr: "0x10040000", size_byte: "0x1000" },
+    { name:      "sram",
+      type:      "device",
+      clock:     "main",
+      base_addr: "0x10000000",
+      size_byte: "0x10000",
+    },
+    { name:      "flash_ctrl",
+      type:      "device",
+      // clock is omitted --> use default clock
+      base_addr: "0x10080000",
+      size_byte: "0x10000",
+    },
+    { name:      "uart0",
+      type:      "device",
+      clock:     "periph",
+      base_addr: "0x40000000",
+      size_byte: "0x10000",
+    },
+    { name:      "gpio0",
+      type:      "device",
+      clock:     "periph",
+      base_addr: "0x40010000",
+      size_byte: "0x10000",
+    },
+    { name:      "spi_device",
+      type:      "device",
+      clock:     "periph",
+      base_addr: "0x40020000",
+      size_byte: "0x10000",
+    },
+    { name:      "spi_host",
+      type:      "device",
+      clock:     "periph",
+      base_addr: "0x40030000",
+      size_byte: "0x10000",
+    },
+    { name: "timer0",   type: "device", clock: "main", base_addr: "0x50000000", size_byte: "0x1000" },
+    { name: "watchdog", type: "device", clock: "main", base_addr: "0x50010000", size_byte: "0x1000" },
+    { name: "alert_h",  type: "device", clock: "main", base_addr: "0x50020000", size_byte: "0x1000" },
+
+    // Crypto domain : use crypt clock
+    { name: "aes",    type: "device", clock: "crypt", base_addr: "0x60000000", size_byte: "0x8000"  },
+    { name: "hmac",   type: "device", clock: "crypt", base_addr: "0x60010000", size_byte: "0x10000" },
+    { name: "keymgr", type: "device", clock: "crypt", base_addr: "0x60080000", size_byte: "0x20000" },
+  ],
+  connections: {
+    ibexif: ["rom", "debug_rom", "sram", "flash_ctrl"],
+    ibexlsu: ["rom", "sram", "flash_ctrl", "uart0", "gpio0", "spi_device", "spi_host", "timer0",
+              "watchdog", "alert_h", "aes", "hmac", "keymgr"],
+    dm_sba: ["sram", "flash_ctrl", "uart0", "gpio0", "spi_device", "spi_host", "timer0",
+             "watchdog", "alert_h", "aes", "hmac", "keymgr"],
+  },
+
+}
diff --git a/util/example/xbar/rtl/xbar.sv b/util/example/xbar/rtl/xbar.sv
new file mode 100644
index 0000000..f58f8d9
--- /dev/null
+++ b/util/example/xbar/rtl/xbar.sv
@@ -0,0 +1,57 @@
+module xbar(
+  input clk,
+  input rst_n,
+
+  xbar_if.device xhif0,
+  xbar_if.device xhif1,
+  xbar_if.host   xdif0,
+  xbar_if.host   xdif1,
+  xbar_if.host   xdif2
+);
+
+  hdb_fifo_sync #(
+    .ReqPass  (1'b1),
+    .RspPass  (1'b1),
+    .ReqDepth (2),
+    .RspDepth (2)
+  ) u_dut (
+    .clk_i    (xhif0.clk),
+    .rst_ni   (rst_n),
+    .hdb_h_i  (xhif0.host.h2d),
+    .hdb_h_o  (xhif0.host.d2h),
+    .hdb_d_o  (xdif0.device.h2d),
+    .hdb_d_i  (xdif0.device.d2h)
+  );
+  /*
+  assign xdif0.req_valid = xhif0.req_valid;
+  assign xdif0.req_addr  = xhif0.req_addr ;
+  assign xdif0.req_wr    = xhif0.req_wr   ;
+  assign xdif0.req_wdata = xhif0.req_wdata;
+  assign xdif0.req_wstrb = xhif0.req_wstrb;
+  assign xdif0.req_attr  = xhif0.req_attr ;
+  assign xdif0.req_id    = xhif0.req_id   ;
+  assign xhif0.req_ready = xdif0.req_ready;
+
+  assign xhif0.rsp_valid = xdif0.rsp_valid;
+  assign xhif0.rsp_rdata = xdif0.rsp_rdata;
+  assign xhif0.rsp_attr  = xdif0.rsp_attr ;
+  assign xhif0.rsp_id    = xdif0.rsp_id   ;
+  assign xdif0.rsp_ready = xhif0.rsp_ready;
+  */
+
+  hdb_fifo_async #(
+    .ReqDepth (2),
+    .RspDepth (2)
+  ) u_dut_async (
+    .clk_h_i  (xhif1.clk),
+    .rst_h_ni (rst_n),
+    .clk_d_i  (xdif1.clk),
+    .rst_d_ni (rst_n),
+
+    .hdb_h_i  (xhif1.host.h2d),
+    .hdb_h_o  (xhif1.host.d2h),
+    .hdb_d_o  (xdif1.device.h2d),
+    .hdb_d_i  (xdif1.device.d2h)
+  );
+
+endmodule
diff --git a/util/fpga/README.md b/util/fpga/README.md
new file mode 100644
index 0000000..969e2af
--- /dev/null
+++ b/util/fpga/README.md
@@ -0,0 +1,19 @@
+# FPGA Splice flow
+This is a FPGA utiity script which embedds the generated rom elf file into FPGA bitstream.
+Script assumes there is pre-generated fpga bit file in the build directory.The boot rom mem file is auto generated.
+
+## How to run the script
+Utility script to load MEM contents into BRAM FPGA bitfile.
+* Usage:
+```console
+$ cd $REPO_TOP
+$ ./util/fpga/splice_nexysvideo.sh
+```
+
+Updated output bitfile located : at the same place as raw vivado bitfile @
+`build/lowrisc_systems_top_earlgrey_nexysvideo_0.1/synth-vivado/lowrisc_systems_top_earlgrey_nexysvideo_0.1.splice.bit`
+
+This directory contains following files
+* splice_nexysvideo.sh - master script
+* bram_load.mmi - format which vivado tool understands on which FPGA BRAM locations the SW contents should go
+* addr4x.py - utility script used underneath to do address calculation to map with FPGA BRAM architecture
diff --git a/util/fpga/addr4x.py b/util/fpga/addr4x.py
new file mode 100755
index 0000000..4f1b14c
--- /dev/null
+++ b/util/fpga/addr4x.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Utility script
+Handles linear addresses generated from srec_cat to suit with fpga BRAM
+architecture which need word addressing.. Example
+0x0 0x00000010
+0x1 0x000000FF
+0x2 0x00000088
+
+get converted to
+
+0x0 0x00000010
+0x4 0x000000FF
+0x8 0x00000088 """
+
+import argparse
+import imp
+import logging
+import os
+import sys
+from pathlib import Path
+
+DESC = """addr4x.py script handles the address generated in mem file from
+srec_cat to suit with BRAM memory architecture which need word addressing"""
+
+
+def main(argv):
+    parser = argparse.ArgumentParser(prog="addr4x.py", description=DESC)
+    parser.add_argument('--infile',
+                        '-i',
+                        dest='inputfile',
+                        type=argparse.FileType('r', encoding='UTF-8'),
+                        required=True,
+                        help='Input Mem file')
+    parser.add_argument('--outfile',
+                        '-o',
+                        dest='outputfile',
+                        type=argparse.FileType('w', encoding='UTF-8'),
+                        required=True,
+                        help='Output Mem file')
+    args = parser.parse_args()
+    in_file_path = Path(args.inputfile.name).resolve()
+    with open(in_file_path) as file:
+        for line in file:
+            if "sourceforge" not in line:
+                a = line.split("@")
+                b = a[1].split(" ")
+                mult = int(b[0], 16)
+                final = "@" + hex(mult * 4)[2:] + " " + b[1]
+                args.outputfile.write(final)
+
+
+if __name__ == "__main__":
+    main(sys.argv)
diff --git a/util/fpga/bram_load.mmi b/util/fpga/bram_load.mmi
new file mode 100644
index 0000000..3f31a1b
--- /dev/null
+++ b/util/fpga/bram_load.mmi
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<MemInfo Version="1" Minor="0">
+  <Processor Endianness="Little" InstPath="dummy">
+  <AddressSpace Name="axi_bram_ctrl_0_bram" Begin="0" End="8191">
+      <BusBlock>
+        <BitLane MemType="RAMB32" Placement="X4Y18">
+          <DataWidth MSB="15" LSB="0"/>
+          <AddressRange Begin="0" End="2047"/>
+          <Parity ON="false" NumBits="0"/>
+        </BitLane>
+        <BitLane MemType="RAMB32" Placement="X4Y19">
+          <DataWidth MSB="31" LSB="16"/>
+          <AddressRange Begin="0" End="2047"/>
+          <Parity ON="false" NumBits="0"/>
+        </BitLane>
+      </BusBlock>
+    </AddressSpace>
+  </Processor>
+<Config>
+  <Option Name="Part" Val="xc7a200tsbg484-1"/>
+</Config>
+</MemInfo>
diff --git a/util/fpga/splice_nexysvideo.sh b/util/fpga/splice_nexysvideo.sh
new file mode 100755
index 0000000..f66440f
--- /dev/null
+++ b/util/fpga/splice_nexysvideo.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+
+# Utility script to load MEM contents into BRAM FPGA bitfile
+# Usage:
+#   cd $REPO_TOP
+#   ./util/fpga/splice_nexysvideo.sh
+
+# Updated bitfile located : at the same place as raw vivado bitfile @
+# $REPO_TOP/build/lowrisc_systems_top_earlgrey_nexysvideo_0.1/synth-vivado/
+#  lowrisc_systems_top_earlgrey_nexysvideo_0.1.splice.bit
+
+
+PROGRAM=boot_rom
+
+cd sw/boot_rom
+make clean ; make
+srec_cat ${PROGRAM}.bin -binary -offset 0x0 -o ${PROGRAM}.brammem \
+  -vmem -Output_Block_Size 4;
+../../util/fpga/addr4x.py -i ${PROGRAM}.brammem -o ${PROGRAM}.mem
+updatemem -force --meminfo ../../util/fpga/bram_load.mmi --data ${PROGRAM}.mem \
+  --bit ../../build/lowrisc_systems_top_earlgrey_nexysvideo_0.1/synth-vivado/\
+lowrisc_systems_top_earlgrey_nexysvideo_0.1.bit  --proc dummy \
+  --out ../../build/lowrisc_systems_top_earlgrey_nexysvideo_0.1/synth-vivado/\
+lowrisc_systems_top_earlgrey_nexysvideo_0.1.splice.bit
diff --git a/util/get-lfsr-coeffs.py b/util/get-lfsr-coeffs.py
new file mode 100755
index 0000000..5c6e96b
--- /dev/null
+++ b/util/get-lfsr-coeffs.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import argparse
+import glob
+import os
+import shutil
+import sys
+
+import wget
+
+USAGE = """./get_lfsr_coeffs.py [-t <temporary folder>] [-o <outfile>] [-f]
+
+Downloads lfsr constants from https://users.ece.cmu.edu/~koopman/lfsr/
+and dumps them in SystemVerilog format (for use in prim_lfsr.sv).
+"""
+
+MIN_LFSR_LEN = 4
+MAX_LFSR_LEN = 64
+BASE_URL = 'https://users.ece.cmu.edu/~koopman/lfsr/'
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        prog="get-lfsr-coeffs",
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        usage=USAGE,
+        description=__doc__,
+        epilog='defaults or the filename - can be used for stdin/stdout')
+    parser.add_argument(
+        '-t',
+        '--tempfolder',
+        help="""temporary folder to download the lfsr constant files
+to (defaults to lfsr_tmp)""",
+        default='lfsr_tmp')
+    parser.add_argument('-f',
+                        '--force',
+                        help='overwrites tempfolder',
+                        action='store_true')
+    parser.add_argument('-o',
+                        '--output',
+                        type=argparse.FileType('w'),
+                        default=sys.stdout,
+                        metavar='file',
+                        help='Output file (default stdout)')
+
+    args = parser.parse_args()
+    outfile = args.output
+
+    if args.force and os.path.exists(args.tempfolder):
+        shutil.rmtree(args.tempfolder)
+
+    if not os.path.exists(args.tempfolder):
+        # download coefficient files
+        os.makedirs(args.tempfolder, exist_ok=args.force)
+        os.chdir(args.tempfolder)
+        for k in range(MIN_LFSR_LEN, MAX_LFSR_LEN + 1):
+            url = '%s%d.txt' % (BASE_URL, k)
+            print("\nDownloading %d bit LFSR coeffs from %s..." % (k, url))
+            wget.download(url)
+        print("")
+
+        # select first coefficient in each file and print to SV LUT
+        with outfile:
+            decl_str = "localparam logic [%d:0] coeffs [%d:%d] = '{ " % (
+                MAX_LFSR_LEN - 1, MIN_LFSR_LEN, MAX_LFSR_LEN)
+            outfile.write(decl_str)
+            comma = ',\n'
+            spaces = ''
+            for k in range(MIN_LFSR_LEN, MAX_LFSR_LEN + 1):
+                filename = '%d.txt' % k
+                with open(filename) as infile:
+                    # read the first line
+                    poly_coeffs = infile.readline().strip()
+                    if k == MAX_LFSR_LEN:
+                        comma = ""
+                    if k == MIN_LFSR_LEN + 1:
+                        for l in range(len(decl_str)):
+                            spaces += ' '
+                    outfile.write("%s%d'h%s%s" %
+                                  (spaces, MAX_LFSR_LEN, poly_coeffs, comma))
+            outfile.write(' };\n')
+    else:
+        print("Temporary directory already exists, abort...")
+
+
+if __name__ == '__main__':
+    main()
diff --git a/util/get-toolchain.py b/util/get-toolchain.py
new file mode 100755
index 0000000..5fc5ef8
--- /dev/null
+++ b/util/get-toolchain.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import argparse
+import json
+import os
+import subprocess
+import sys
+import tempfile
+from urllib.request import urlopen, urlretrieve
+
+TOOLCHAIN_VERSION = 'latest'
+RELEASES_URL_BASE = 'https://api.github.com/repos/lowRISC/lowrisc-toolchains/releases'
+ASSET_PREFIX = "lowrisc-toolchain-gcc-rv32imc-"
+TARGET_DIR = '/tools/riscv'
+
+
+def get_download_url(toolchain_version):
+    if toolchain_version == 'latest':
+        releases_url = '%s/%s' % (RELEASES_URL_BASE, toolchain_version)
+    else:
+        releases_url = '%s/tags/%s' % (RELEASES_URL_BASE, toolchain_version)
+    with urlopen(releases_url) as f:
+        info = json.loads(f.read().decode('utf-8'))
+        return [
+            a["browser_download_url"] for a in info["assets"]
+            if a["name"].startswith(ASSET_PREFIX)
+        ][0]
+
+
+def download(url):
+    print("Downloading toolchain from %s" % (url, ))
+    tmpfile = tempfile.mktemp()
+    urlretrieve(url, tmpfile)
+    return tmpfile
+
+
+def install(archive_file, target_dir):
+    os.makedirs(target_dir)
+
+    cmd = [
+        'tar', '-x', '-f', archive_file, '--strip-components=1', '-C',
+        target_dir
+    ]
+    subprocess.run(cmd, check=True)
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '--target-dir',
+        '-t',
+        required=False,
+        default=TARGET_DIR,
+        help="Target directory (must not exist) (default: %(default)s)")
+    parser.add_argument(
+        '--release-version',
+        '-r',
+        required=False,
+        default=TOOLCHAIN_VERSION,
+        help="Toolchain version (default: %(default)s)")
+    args = parser.parse_args()
+
+    target_dir = args.target_dir
+    toolchain_version = args.release_version
+
+    if os.path.exists(args.target_dir):
+        sys.exit('Target directory %s already exists. Delete it first it you '
+                 'want to re-download the toolchain.' % (target_dir, ))
+
+    download_url = get_download_url(toolchain_version)
+    try:
+        archive_file = download(download_url)
+        install(archive_file, target_dir)
+    finally:
+        os.remove(archive_file)
+
+    print('Toolchain downloaded and installed to %s' % (target_dir, ))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/util/index.md b/util/index.md
new file mode 100644
index 0000000..96eb99b
--- /dev/null
+++ b/util/index.md
@@ -0,0 +1,5 @@
+# Tools
+
+These are predominantly Readme's with details about the tooling scripts. Make sure to also check the corresponding [Reference Manuals](../doc/rm/index.md).
+
+{{% doctree ./ }}
diff --git a/util/lint_commits.py b/util/lint_commits.py
new file mode 100755
index 0000000..bc9a462
--- /dev/null
+++ b/util/lint_commits.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import argparse
+import subprocess
+import sys
+
+error_msg_prefix = ''
+
+
+def error(msg, *args, **kwargs):
+    print(error_msg_prefix + msg, file=sys.stderr)
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description='Check commit meta data for common mistakes')
+    parser.add_argument('--error-msg-prefix',
+                        default='ERROR: ',
+                        required=False,
+                        help='string to prepend to all error messages')
+    parser.add_argument('commitrange',
+                        metavar='commit-range',
+                        help='git log-compatible commit range to check')
+    args = parser.parse_args()
+
+    global error_msg_prefix
+    error_msg_prefix = args.error_msg_prefix
+
+    cmd = ['git', 'log', '--pretty=%H;%ae;%an', args.commitrange]
+    commits = subprocess.run(cmd,
+                             stdout=subprocess.PIPE,
+                             check=True,
+                             universal_newlines=True).stdout
+
+    has_error = False
+    for commit in commits.splitlines():
+        (sha, author_email, author_name) = commit.split(';', 3)
+        print("Checking commit %s by %s <%s>" %
+              (sha, author_name, author_email))
+        if author_email.endswith('users.noreply.github.com'):
+            error('Author of commit %s has no valid email address set: %s. '
+                  'Use "git config user.email user@example.com" to '
+                  'set a valid email address, and update the commit '
+                  'with "git rebase -i" and/or '
+                  '"git commit --amend --reset-author". '
+                  'You also need to disable "Keep my email address '
+                  'private" in the GitHub email settings.' %
+                  (sha, author_email))
+            has_error = True
+
+    if has_error:
+        error('Commit lint failed.')
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/util/lintpy.py b/util/lintpy.py
new file mode 100755
index 0000000..fdaca5e
--- /dev/null
+++ b/util/lintpy.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Lint Python for lowRISC rules"""
+
+import argparse
+import os
+import subprocess
+import sys
+
+import pkg_resources
+
+
+# include here because in hook case don't want to import reggen
+def show_and_exit(clitool, packages):
+    util_path = os.path.dirname(os.path.realpath(clitool))
+    os.chdir(util_path)
+    ver = subprocess.run(
+        ["git", "describe", "--always", "--dirty", "--broken"],
+        stdout=subprocess.PIPE).stdout.strip().decode('ascii')
+    if (ver == ''):
+        ver = 'not found (not in Git repository?)'
+    sys.stderr.write(clitool + " Git version " + ver + '\n')
+    for p in packages:
+        sys.stderr.write(p + ' ' + pkg_resources.require(p)[0].version + '\n')
+    exit(0)
+
+
+def check_linter(cmd, cmdfix, dofix, verbose, files, **kwargs):
+    if not files:
+        return
+    if verbose:
+        print('Running %s' % cmd[0])
+    try:
+        subprocess.check_output(
+            cmd + files, stderr=subprocess.STDOUT, **kwargs)
+        return 0
+    except FileNotFoundError:
+        print('%s not found: do you need to install it?' % cmd[0])
+        return 1
+    except subprocess.CalledProcessError as exc:
+        print('Lint failed:', file=sys.stderr)
+        print(' '.join(exc.cmd), file=sys.stderr)
+        if exc.output:
+            output = exc.output.decode(sys.getfilesystemencoding())
+            print(
+                '\t',
+                '\n\t'.join(output.splitlines()),
+                sep='',
+                file=sys.stderr)
+        if dofix:
+            print("Fixing...", file=sys.stderr)
+            subprocess.check_output(
+                cmdfix + files, stderr=subprocess.STDOUT, **kwargs)
+        return 1
+
+
+def filter_ext(extension, files, exclude=None):
+    files = [f for f in files if f.endswith(extension)]
+    if exclude is not None:
+        files = [i for i in files if exclude not in i]
+    return files
+
+
+def lint_files(changed_files, dofix, verbose):
+    err = 0
+    if not isinstance(changed_files, list):
+        changed_files = [
+            i.strip() for i in changed_files.splitlines()
+            if '/external/' not in i
+        ]
+
+    changed_extensions = {
+        ext
+        for root, ext in map(os.path.splitext, changed_files)
+    }
+    if verbose:
+        print('Changed files: ' + str(changed_files))
+        print('Changed extensions: ' + str(changed_extensions))
+
+    if '.py' in changed_extensions:
+        py_files = filter_ext('.py', changed_files)
+        err += check_linter(['yapf', '-d'], ['yapf', '-i'], dofix, verbose,
+                            py_files)
+        err += check_linter(['isort', '-c', '-w79'], ['isort', '-w79'], dofix,
+                            verbose, py_files)
+
+    # could do similar checks for other file types
+    return err
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    parser.add_argument(
+        '--version', action='store_true', help='Show version and exit')
+    parser.add_argument(
+        '-v',
+        '--verbose',
+        action='store_true',
+        help='Verbose output: ls the output directories')
+    parser.add_argument(
+        '-c',
+        '--commit',
+        action='store_true',
+        help='Only check files staged for commit rather than' \
+             'all modified files (forced when run as git hook)')
+    parser.add_argument(
+        '--fix', action='store_true', help='Fix files detected with problems')
+    parser.add_argument(
+        '--hook',
+        action='store_true',
+        help='Install as ../.git/hooks/pre-commit and exit')
+    parser.add_argument(
+        '-f',
+        '--file',
+        metavar='file',
+        nargs='+',
+        default=[],
+        help='File(s) to check instead of deriving from git')
+
+    args = parser.parse_args()
+    if args.version:
+        show_and_exit(__file__, ['yapf', 'isort'])
+
+    util_path = os.path.dirname(os.path.realpath(__file__))
+    repo_root = os.path.abspath(os.path.join(util_path, os.pardir))
+    # check for running as a hook out of $(TOP)/.git/hooks
+    # (symlink will already have this correct)
+    if repo_root.endswith('.git'):
+        repo_root = os.path.abspath(os.path.join(repo_root, os.pardir))
+    running_hook = sys.argv[0].endswith('hooks/pre-commit')
+
+    if args.verbose:
+        print('argv[0] is ' + sys.argv[0] + ' so running_hook is ' +
+              str(running_hook))
+        print('util_path is ' + util_path)
+        print('repo_root is ' + repo_root)
+
+    if len(args.file) > 0:
+        changed_files = args.file
+    else:
+
+        os.chdir(repo_root)
+        if not os.path.isdir(os.path.join(repo_root, '.git')):
+            print(
+                "Script not in expected location in a git repo",
+                file=sys.stderr)
+            sys.exit(1)
+
+        if args.hook:
+            subprocess.run(
+                'ln -s ../../util/lintpy.py .git/hooks/pre-commit'.split())
+            sys.exit(0)
+
+        if running_hook or args.commit:
+            diff_cmd = 'git diff --cached --name-only --diff-filter=ACM'
+        else:
+            diff_cmd = 'git diff --name-only --diff-filter=ACM'
+
+        changed_files = subprocess.check_output(diff_cmd.split())
+        changed_files = changed_files.decode(sys.getfilesystemencoding())
+
+    sys.exit(lint_files(changed_files, args.fix, args.verbose))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/util/openocd/board/lowrisc-earlgrey-nexysvideo.cfg b/util/openocd/board/lowrisc-earlgrey-nexysvideo.cfg
new file mode 100644
index 0000000..1ab2cf8
--- /dev/null
+++ b/util/openocd/board/lowrisc-earlgrey-nexysvideo.cfg
@@ -0,0 +1,11 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# Board configuration file: the Earl Grey chip on a Nexys Video FPGA board
+
+source [find interface/nexysvideo-ft2232.cfg]
+source [find target/lowrisc-earlgrey.cfg]
+
+# FIXME: use srst and trst
+reset_config none
diff --git a/util/openocd/board/lowrisc-earlgrey-verilator.cfg b/util/openocd/board/lowrisc-earlgrey-verilator.cfg
new file mode 100644
index 0000000..91ac083
--- /dev/null
+++ b/util/openocd/board/lowrisc-earlgrey-verilator.cfg
@@ -0,0 +1,11 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# Board configuration file: the Earl Grey chip in a Verilator simulation
+
+source [find interface/sim-jtagdpi.cfg]
+source [find target/lowrisc-earlgrey.cfg]
+
+# Increase timeouts in simulation
+riscv set_command_timeout_sec 120
diff --git a/util/openocd/interface/nexysvideo-ft2232.cfg b/util/openocd/interface/nexysvideo-ft2232.cfg
new file mode 100644
index 0000000..17fbdff
--- /dev/null
+++ b/util/openocd/interface/nexysvideo-ft2232.cfg
@@ -0,0 +1,58 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# Interface configuration for JTAG on the Nexys Video board through the
+# FT2232H chip. This connection is labeled DTPI/DSPI in the board schematics.
+
+# adapter speed
+# Up to 30 MHz are supported by the FT2232H
+adapter_khz 30000
+
+interface ftdi
+transport select jtag
+
+ftdi_vid_pid 0x0403 0x6010
+
+# Not really required, but makes communication a bit more robust
+ftdi_tdo_sample_edge falling
+
+# Use Channel A (0), channel B (1) is taken by Xilinx JTAG
+ftdi_channel 0
+
+# FT2232H pin configuration
+#
+# Documentation:
+# http://openocd.org/doc-release/html/Debug-Adapter-Configuration.html#Interface-Drivers
+#
+# Our configuration:
+#
+# Bit  MPSSE     FT2232    JTAG    Type   RstVal  Description
+# ----------------------------------------------------------------------
+# Bit0 TCK       ADBUS0    TCK     Out    0       Clock Signal Output
+# Bit1 TDI       ADBUS1    TDI     Out    0       Serial Data Out
+# Bit2 TDO       ADBUS2    TDO     In     0       Serial Data In
+# Bit3 TMS       ADBUS3    TMS     Out    1       Select Signal Out
+# Bit4 GPIOL0    ADBUS4    nTRST   Out    1       General Purpose I/O
+# Bit5 GPIOL1    ADBUS5    nSRST   Out    1       General Purpose I/O
+# Bit6 GPIOL2    ADBUS6    0       Out    0       General Purpose I/O
+# Bit7 GPIOL3    ADBUS7    0       Out    0       General Purpose I/O
+#
+# Bit6 GPIOL2 is used to select between JTAG=1 and SPI=0 and controls
+# the I/O MUX inside the earlgrey_nexsysvideo. Static so it is only
+# mentioned in the init command below (makes it output value 1)
+#
+# Bit7 GPIOL3 is used to enable boot rom boot strap
+#
+# This corresponds to the following reset values (first number), and
+# in/out layout (second number):
+ftdi_layout_init 0x0078 0x00fb
+
+# TAP reset (nTRST)
+ftdi_layout_signal nTRST -ndata 0x0010
+
+# System Reset (nSRST)
+ftdi_layout_signal nSRST -ndata 0x0020
+
+# FT2232 doesn't support open drain GPIO for reset pins
+reset_config srst_push_pull trst_push_pull
diff --git a/util/openocd/interface/sim-jtagdpi.cfg b/util/openocd/interface/sim-jtagdpi.cfg
new file mode 100644
index 0000000..f642b94
--- /dev/null
+++ b/util/openocd/interface/sim-jtagdpi.cfg
@@ -0,0 +1,11 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# "JTAG adapter" for simulation, exposed to OpenOCD through a TCP socket 
+# speaking the remote_bitbang protocol. The adapter is implemented as 
+# SystemVerilog DPI module.
+
+interface remote_bitbang
+remote_bitbang_port 44853
+remote_bitbang_host localhost
diff --git a/util/openocd/target/lowrisc-earlgrey.cfg b/util/openocd/target/lowrisc-earlgrey.cfg
new file mode 100644
index 0000000..64dd383
--- /dev/null
+++ b/util/openocd/target/lowrisc-earlgrey.cfg
@@ -0,0 +1,35 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# Target configuration for the lowRISC "Earl Grey" chip
+
+if { [info exists CHIPNAME] } {
+   set  _CHIPNAME $CHIPNAME
+} else {
+   set  _CHIPNAME riscv
+}
+
+if { [info exists CPUTAPID ] } {
+   set _CPUTAPID $CPUTAPID
+} else {
+   # TODO: This is a dummy JTAG IDCODE value currently used in the HW design.
+   # This needs to be replaced with an assigned JTAG IDCODE value as we get it.
+   set _CPUTAPID 0x00000001
+}
+
+jtag newtap $_CHIPNAME tap -irlen 5 -expected-id $_CPUTAPID
+set _TARGETNAME $_CHIPNAME.tap
+target create $_TARGETNAME.0 riscv -chain-position $_TARGETNAME -rtos riscv
+
+# Configure work area in on-chip SRAM
+$_TARGETNAME.0 configure -work-area-phys 0x80000000 -work-area-size 1000 -work-area-backup 0
+
+# This chip implements system bus access, use it.
+# Accessing the memory through the system bus is faster than through
+# instruction feeding.
+riscv set_prefer_sba on
+
+# Be verbose about GDB errors
+gdb_report_data_abort enable
+gdb_report_register_access_error enable
diff --git a/util/reggen/README.md b/util/reggen/README.md
new file mode 100644
index 0000000..06fd375
--- /dev/null
+++ b/util/reggen/README.md
@@ -0,0 +1,82 @@
+# Reggen -- Register generator
+
+Reggen is a python3 tool to read register descriptions in hjson and
+generate various output formats. Currently it can output html
+documentation, standard json, compact standard json (whitespace
+removed) and hjson.  The example commands
+assume $REPO_TOP is set to the toplevel directory of the repo.
+
+### Setup
+
+If packages have not previously been installed you will need to set a
+few things up. First use `pip3` to install some required packages:
+```
+$ pip3 install --user hjson
+$ pip3 install --user mistletoe
+$ pip3 install --user mako
+```
+
+
+### Register JSON Format
+
+To ensure things stay up to date, the register json format information
+is documented by the tool itself. Documentation can be generated by
+running the tool to produce markdown and processing that into html.
+
+```
+$ cd $REPO_TOP/util
+$ python3 reggen/regtool.py --doc | python3 docgen/docgen.py -c > /tmp/outdoc.html
+```
+
+### Examples using standalone regtool
+
+Normally for documentation the docgen tool will automatically use
+reggen. See the examples in the docgen module.
+
+The regtool provides a standalone way to run reggen. In the future
+this will be used to generate other formats like verilog and header
+files.
+
+Note that the same example inputs are used here as for docgen.
+
+Generate html from the register description and be verbose. Since
+embeddable html is generated the css is not included so the output
+will look messy (TODO should the css be added for the standalone
+case?):
+
+```
+$ cd $REPO_TOP/util
+$ ./regtool.py -v -d docgen/examples/uartregs.hjson > /tmp/outuart.html
+```
+
+Generate standard json from the register description:
+
+```
+$ cd $REPO_TOP/util
+$ ./regtool.py -v -j docgen/examples/uartregs.hjson > /tmp/outuart.json
+```
+
+Generate compact json and use it to generate html:
+
+```
+$ cd $REPO_TOP/util
+$ ./regtool.py -v -c docgen/examples/uartregs.hjson | python3 reggen/regtool.py -v -d > /tmp/outuart.html
+```
+
+Generate RTL from register definition:
+```
+$ cd $REPO_TOP/util
+$ mkdir /tmp/rtl
+$ ./regtool.py -r -t /tmp/rtl ../hw/ip/uart/doc/uart_reg.hjson
+```
+
+Generate DV UVM class from register definition:
+
+```console
+$ cd $REPO_TOP/util
+$ mkdir /tmp/dv
+$ ./regtool.py -s -t /tmp/dv ../hw/ip/uart/doc/uart_reg.hjson
+```
+
+If target directory is not specified, tool creates `{module}_regs.sv` file under
+the `hw/ip/{module}/dv/` directory.
diff --git a/util/reggen/__init__.py b/util/reggen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/util/reggen/__init__.py
diff --git a/util/reggen/field_enums.py b/util/reggen/field_enums.py
new file mode 100644
index 0000000..7aaf7b1
--- /dev/null
+++ b/util/reggen/field_enums.py
@@ -0,0 +1,40 @@
+"""Enumerated types for fields
+Generated by validation, used by backends
+"""
+
+from enum import Enum
+
+
+class JsonEnum(Enum):
+    def for_json(x):
+        return str(x)
+
+
+class SwWrAccess(JsonEnum):
+    WR = 1
+    NONE = 2
+
+
+class SwRdAccess(JsonEnum):
+    RD = 1
+    RC = 2  # Special handling for port
+    NONE = 3
+
+
+class SwAccess(JsonEnum):
+    RO = 1
+    RW = 2
+    WO = 3
+    W1C = 4
+    W1S = 5
+    W0C = 6
+    RC = 7
+    R0W1C = 8
+    NONE = 9
+
+
+class HwAccess(JsonEnum):
+    HRO = 1
+    HRW = 2
+    HWO = 3
+    NONE = 4  # No access allowed
diff --git a/util/reggen/gen_cfg_html.py b/util/reggen/gen_cfg_html.py
new file mode 100644
index 0000000..3cf0308
--- /dev/null
+++ b/util/reggen/gen_cfg_html.py
@@ -0,0 +1,104 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Generate html documentation from validated configuration json tree
+"""
+
+import sys
+
+
+def genout(outfile, msg):
+    outfile.write(msg)
+
+
+def name_width(x):
+    if not 'width' in x or x['width'] == '1':
+        return x['name']
+    return x['name'] + '[' + str(int(x['width'], 0) - 1) + ':0]'
+
+
+# Must have called cfg_validate, so should have no errors
+
+
+def gen_cfg_html(cfgs, outfile):
+    genout(outfile, "<p>Referring to the \n")
+    genout(
+        outfile,
+        "<a href=\"https://github.com/lowRISC/opentitan/blob/master/doc/rm/comportability_specification.md\">\n"
+    )
+    genout(outfile, "Comportable guideline for peripheral device functionality</a>,\n")
+    genout(outfile,
+           "the module <b><code>" + cfgs['name'] + "</code></b> has \n")
+    genout(outfile, "the following hardware interfaces defined.</p>\n")
+    # clocks
+    genout(
+        outfile, "<p><i>Primary Clock:</i> <b><code>" + cfgs['clock_primary'] +
+        "</code></b></p>\n")
+    if 'other_clock_list' in cfgs:
+        genout(outfile, "<p><i>Other Clocks:</i></p>\n")
+    else:
+        genout(outfile, "<p><i>Other Clocks: none</i></p>\n")
+    # bus interfaces
+    genout(
+        outfile, "<p><i>Bus Device Interface:</i> <b><code>" +
+        cfgs['bus_device'] + "</code></b></p>\n")
+    if 'bus_host' in cfgs:
+        genout(
+            outfile, "<p><i>Bus Host Interface:</i> <b><code>" +
+            cfgs['bus_host'] + "</code></b></p>\n")
+    else:
+        genout(outfile, "<p><i>Bus Host Interface: none</i></p>\n")
+    # IO
+    if ('available_input_list' in cfgs or 'available_output_list' in cfgs or
+            'available_inout_list' in cfgs):
+        genout(outfile, "<p><i>Peripheral Pins for Chip IO:</i></p>\n")
+        genout(
+            outfile, "<table class=\"cfgtable\"><tr>" +
+            "<th>Pin name</th><th>direction</th>" +
+            "<th>Description</th></tr>\n")
+        if 'available_input_list' in cfgs:
+            for x in cfgs['available_input_list']:
+                genout(
+                    outfile, "<tr><td>" + name_width(x) +
+                    "</td><td>input</td><td>" + x['desc'] + "</td></tr>\n")
+        if 'available_output_list' in cfgs:
+            for x in cfgs['available_output_list']:
+                genout(
+                    outfile, "<tr><td>" + name_width(x) +
+                    "</td><td>output</td><td>" + x['desc'] + "</td></tr>\n")
+        if 'available_inout_list' in cfgs:
+            for x in cfgs['available_inout_list']:
+                genout(
+                    outfile, "<tr><td>" + name_width(x) +
+                    "</td><td>inout</td><td>" + x['desc'] + "</td></tr>\n")
+        genout(outfile, "</table>\n")
+    else:
+        genout(outfile, "<p><i>Peripheral Pins for Chip IO: none</i></p>\n")
+    # interrupts
+    if 'interrupt_list' in cfgs:
+        genout(outfile, "<p><i>Interrupts:</i></p>\n")
+        genout(
+            outfile, "<table class=\"cfgtable\"><tr><th>Interrupt Name</th>" +
+            "<th>Description</th></tr>\n")
+        for x in cfgs['interrupt_list']:
+            genout(
+                outfile, "<tr><td>" + name_width(x) + "</td><td>" + x['desc'] +
+                "</td></tr>\n")
+        genout(outfile, "</table>\n")
+    else:
+        genout(outfile, "<p><i>Interrupts: none</i></p>\n")
+    if 'alert_list' in cfgs:
+        genout(outfile, "<p><i>Security Alerts:</i></p>\n")
+        genout(
+            outfile, "<table class=\"cfgtable\"><tr><th>Alert Name</th>" +
+            "<th>Description</th></tr>\n")
+        for x in cfgs['alert_list']:
+            genout(
+                outfile, "<tr><td>" + x['name'] + "</td><td>" + x['desc'] +
+                "</td></tr>\n")
+        genout(outfile, "</table>\n")
+    else:
+        genout(outfile, "<p><i>Security Alerts: none</i></p>\n")
+    # interrupts
+    return
diff --git a/util/reggen/gen_cheader.py b/util/reggen/gen_cheader.py
new file mode 100644
index 0000000..3fd3417
--- /dev/null
+++ b/util/reggen/gen_cheader.py
@@ -0,0 +1,148 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Generate C header from validated register json tree
+"""
+
+import io
+import logging as log
+import re
+import sys
+
+
+def genout(outfile, msg):
+    outfile.write(msg)
+
+
+def as_define(s):
+    s = s.upper()
+    r = ''
+    for i in range(0, len(s)):
+        r += s[i] if s[i].isalnum() else '_'
+    return r
+
+
+def clean_desc(s):
+    return s.splitlines()[0]
+
+
+def gen_cdefine_register(outstr, reg, comp, width, rnames):
+    rname = reg['name']
+    offset = reg['genoffset']
+
+    genout(outstr, '// ' + clean_desc(reg['desc']) + '\n')
+    defname = as_define(comp + '_' + rname)
+    genout(
+        outstr, '#define ' + defname + '(id) (' + as_define(comp) +
+        '##id##_BASE_ADDR + ' + hex(offset) + ')\n')
+
+    for field in reg['fields']:
+        fieldlsb = field['bitinfo'][2]
+        fname = field['name']
+        if fname == rname:
+            dname = defname
+        else:
+            dname = defname + '_' + as_define(fname)
+
+        if field['bitinfo'][1] == 1:
+            # single bit
+            genout(outstr, '#define ' + dname + ' ' + str(fieldlsb) + '\n')
+        else:
+            # multiple bits (unless it is the whole register)
+            if field['bitinfo'][1] != width:
+                mask = field['bitinfo'][0] >> fieldlsb
+                genout(outstr,
+                       '#define ' + dname + '_MASK ' + hex(mask) + '\n')
+                genout(
+                    outstr,
+                    '#define ' + dname + '_OFFSET ' + str(fieldlsb) + '\n')
+            if 'enum' in field:
+                for enum in field['enum']:
+                    ename = as_define(enum['name'])
+                    genout(
+                        outstr,
+                        '#define ' + defname + '_' + as_define(field['name'])
+                        + '_' + ename + ' ' + enum['value'] + '\n')
+    genout(outstr, '\n')
+    return
+
+
+def gen_cdefine_window(outstr, win, comp, regwidth, rnames):
+    wname = win['name']
+    offset = win['genoffset']
+
+    genout(outstr, '// Memory area: ' + clean_desc(win['desc']) + '\n')
+    defname = as_define(comp + '_' + wname)
+    genout(
+        outstr,
+        '#define ' + defname + '(base) ((base) + ' + hex(offset) + ')\n')
+    items = int(win['items'])
+    genout(outstr, '#define ' + defname + '_SIZE_WORDS ' + str(items) + '\n')
+    items = items * (regwidth // 8)
+    genout(outstr, '#define ' + defname + '_SIZE_BYTES ' + str(items) + '\n')
+
+    wid = win['genvalidbits']
+    if (wid != regwidth):
+        mask = (1 << wid) - 1
+        genout(outstr, '#define ' + defname + '_MASK ' + hex(mask) + '\n')
+
+
+# Must have called validate, so should have no errors
+
+
+def gen_cdefines(regs, outfile, src_lic, src_copy):
+    component = regs['name']
+    registers = regs['registers']
+    rnames = regs['genrnames']
+    outstr = io.StringIO()
+
+    if 'regwidth' in regs:
+        regwidth = int(regs['regwidth'], 0)
+    else:
+        regwidth = 32
+
+    for x in registers:
+        if 'reserved' in x:
+            continue
+
+        if 'skipto' in x:
+            continue
+
+        if 'sameaddr' in x:
+            for sareg in x['sameaddr']:
+                gen_cdefine_register(outstr, sareg, component, regwidth,
+                                     rnames)
+            continue
+
+        if 'window' in x:
+            gen_cdefine_window(outstr, x['window'], component, regwidth,
+                               rnames)
+            continue
+
+        if 'multireg' in x:
+            for reg in x['multireg']['genregs']:
+                gen_cdefine_register(outstr, reg, component, regwidth, rnames)
+            continue
+
+        gen_cdefine_register(outstr, x, component, regwidth, rnames)
+
+    generated = outstr.getvalue()
+    outstr.close()
+
+    genout(outfile, '// Generated register defines for ' + component + '\n\n')
+    if src_copy != '':
+        genout(outfile, '// Copyright information found in source file:\n')
+        genout(outfile, '// ' + src_copy + '\n\n')
+    if src_lic != None:
+        genout(outfile, '// Licensing information found in source file:\n')
+        for line in src_lic.splitlines():
+            genout(outfile, '// ' + line + '\n')
+        genout(outfile, '\n')
+    genout(outfile, '#ifndef _' + as_define(component) + '_REG_DEFS_\n')
+    genout(outfile, '#define _' + as_define(component) + '_REG_DEFS_\n\n')
+    genout(outfile, generated)
+    genout(outfile, '#endif  // _' + as_define(component) + '_REG_DEFS_\n')
+    genout(outfile, '// End generated register defines for ' + component)
+
+    return
diff --git a/util/reggen/gen_ctheader.py b/util/reggen/gen_ctheader.py
new file mode 100644
index 0000000..3fa1594
--- /dev/null
+++ b/util/reggen/gen_ctheader.py
@@ -0,0 +1,156 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Generate C header (Titan style) from validated register json tree
+"""
+
+import io
+import logging as log
+import re
+import sys
+
+
+def genout(outfile, msg):
+    outfile.write(msg)
+
+
+def as_define(s):
+    s = s.upper()
+    r = ''
+    for i in range(0, len(s)):
+        r += s[i] if s[i].isalnum() else '_'
+    return r
+
+
+def clean_desc(s):
+    return s.splitlines()[0]
+
+
+def gen_cdefine_register(outstr, reg, comp, width, rnames):
+    rname = reg['name']
+    offset = reg['genoffset']
+
+    genout(outstr, '// ' + clean_desc(reg['desc']) + '\n')
+    defname = as_define(comp + '_' + rname)
+    genout(
+        outstr, '#define ' + defname + '(id)\t' + '(' + as_define(comp) +
+        ' ## id ## _BASE_ADDR  + ' + hex(offset) + ')\n')
+    genout(outstr, '#define ' + defname + '_OFFSET\t' + hex(offset) + '\n')
+
+    for field in reg['fields']:
+        fname = field['name']
+        fieldlsb = field['bitinfo'][2]
+        if fname == rname:
+            dname = defname
+        else:
+            dname = defname + '_' + as_define(fname)
+
+        # yapf: disable
+        genout(outstr, '# define ' + dname + '_LSB\t' +
+               hex(fieldlsb) + '\n')
+        genout(outstr, '# define ' + dname + '_MASK\t' +
+               hex(field['bitinfo'][0] >> fieldlsb) + '\n')
+        genout(outstr, '# define ' + dname + '_SIZE\t' +
+               hex(field['bitinfo'][1]) + '\n')
+        genout(outstr, '# define ' + dname + '_DEFAULT\t' +
+               hex(field['genresval']) + '\n')
+        # yapf: enable
+
+        if 'enum' in field:
+            for enum in field['enum']:
+                ename = as_define(enum['name'])
+                genout(
+                    outstr,
+                    '# define ' + defname + '_' + as_define(field['name']) +
+                    '_' + ename + '\t' + hex(int(enum['value'], 0)) + '\n')
+    genout(outstr, '\n')
+    return
+
+
+def gen_cdefine_window(outstr, win, comp, regwidth, rnames):
+    wname = win['name']
+    offset = win['genoffset']
+
+    genout(outstr, '// Memory area: ' + clean_desc(win['desc']) + '\n')
+    defname = as_define(comp + '_' + wname)
+    genout(
+        outstr,
+        '#define ' + defname + '(base)\t' + '((base) + ' + hex(offset) + ')\n')
+    items = int(win['items'])
+    genout(outstr, '#define ' + defname + '_SIZE_WORDS\t' + str(items) + '\n')
+    items = items * (regwidth // 8)
+    genout(outstr, '#define ' + defname + '_SIZE_BYTES\t' + str(items) + '\n')
+
+    wid = win['genvalidbits']
+    if (wid != regwidth):
+        mask = (1 << wid) - 1
+        genout(outstr, '#define ' + defname + '_MASK\t' + hex(mask) + '\n')
+
+
+# Must have called validate, so should have no errors
+
+
+def gen_cdefines(regs, outfile, src_lic, src_copy):
+    component = regs['name']
+    registers = regs['registers']
+    rnames = regs['genrnames']
+    outstr = io.StringIO()
+
+    if 'regwidth' in regs:
+        regwidth = int(regs['regwidth'], 0)
+    else:
+        regwidth = 32
+
+    for x in registers:
+        if 'reserved' in x:
+            continue
+
+        if 'skipto' in x:
+            continue
+
+        if 'sameaddr' in x:
+            for sareg in x['sameaddr']:
+                gen_cdefine_register(outstr, sareg, component, regwidth,
+                                     rnames)
+            continue
+
+        if 'window' in x:
+            gen_cdefine_window(outstr, x['window'], component, regwidth,
+                               rnames)
+            continue
+
+        if 'multireg' in x:
+            for reg in x['multireg']['genregs']:
+                gen_cdefine_register(outstr, reg, component, regwidth, rnames)
+            continue
+
+        gen_cdefine_register(outstr, x, component, regwidth, rnames)
+
+    generated = outstr.getvalue()
+    outstr.close()
+
+    genout(outfile, '// Generated register defines for ' + component + '\n\n')
+    if src_copy != '':
+        genout(outfile, '// Copyright information found in source file:\n')
+        genout(outfile, '// ' + src_copy + '\n\n')
+    if src_lic != None:
+        genout(outfile, '// Licensing information found in source file:\n')
+        for line in src_lic.splitlines():
+            genout(outfile, '// ' + line + '\n')
+        genout(outfile, '\n')
+    genout(outfile, '#ifndef _' + as_define(component) + '_REG_DEFS_\n')
+    genout(outfile, '#define _' + as_define(component) + '_REG_DEFS_\n\n')
+
+    tmax = 0
+    for l in generated.splitlines():
+        tpos = l.find('\t')
+        if tpos > tmax:
+            tmax = tpos
+    for l in generated.splitlines(keepends=True):
+        genout(outfile, l.expandtabs(tmax + 2))
+
+    genout(outfile, '#endif // _' + as_define(component) + '_REG_DEFS_\n')
+    genout(outfile, '//End generated register defines for ' + component)
+
+    return
diff --git a/util/reggen/gen_dv.py b/util/reggen/gen_dv.py
new file mode 100644
index 0000000..9058e4f
--- /dev/null
+++ b/util/reggen/gen_dv.py
@@ -0,0 +1,64 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Generate SystemVerilog designs from validated register json tree
+"""
+
+import logging as log
+import operator
+import sys
+
+from mako.template import Template
+from pkg_resources import resource_filename
+
+from .field_enums import HwAccess, SwAccess, SwRdAccess, SwWrAccess
+from .gen_rtl import Block, Field, Register, Window, json_to_reg
+
+
+# function get block class name
+def bcname(b):
+    return b.name + "_reg_block"
+
+
+# function get reg class name
+def rcname(b, r):
+    return b.name + "_reg_" + r.name
+
+
+# function get mem class name
+def mcname(b, m):
+    return b.name + "_mem_" + m.name.lower()
+
+
+# function get mem inst name
+def miname(m):
+    return m.name.lower()
+
+
+# function get base addr in SV syntax
+def sv_base_addr(b):
+    sv_base_addr = b.base_addr.replace("0x", str(b.width) + "'h")
+    return sv_base_addr
+
+
+# function generate dv ral model using raw dict object parsed from hjson
+def gen_dv(obj, outdir):
+    # obj: OrderedDict
+    block = json_to_reg(obj)
+    gen_ral(block, outdir)
+
+
+# function generate dv ral model using gen_rtl::Block specification
+def gen_ral(block, outdir):
+    # Read Register templates
+    uvm_reg_tpl = Template(
+        filename=resource_filename('reggen', 'uvm_reg.tpl.sv'))
+
+    # Generate pkg.sv with block name
+    with open(outdir + "/" + block.name + "_reg_block.sv", 'w') as fout:
+        fout.write(
+            uvm_reg_tpl.render(
+                block=block,
+                HwAccess=HwAccess,
+                SwRdAccess=SwRdAccess,
+                SwWrAccess=SwWrAccess))
diff --git a/util/reggen/gen_html.py b/util/reggen/gen_html.py
new file mode 100644
index 0000000..d888532
--- /dev/null
+++ b/util/reggen/gen_html.py
@@ -0,0 +1,346 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Generate html documentation from validated register json tree
+"""
+
+import logging as log
+import re
+import sys
+
+
+def genout(outfile, msg):
+    outfile.write(msg)
+
+
+# expand !!register references into html links, gen **bold** and *italic*
+def desc_expand(s, rnames):
+    def fieldsub(match):
+        base = match.group(1).partition('.')[0].lower()
+        if base in rnames:
+            if match.group(1)[-1] == ".":
+                return ('<a href="#Reg_' + base + '"><code class=\"reg\">' +
+                        match.group(1)[:-1] + '</code></a>.')
+            else:
+                return ('<a href="#Reg_' + base + '"><code class=\"reg\">' +
+                        match.group(1) + '</code></a>')
+        log.warn('!!' + match.group(1).partition('.')[0] +
+                 ' not found in register list.')
+        return match.group(0)
+
+    s = re.sub(r"!!([A-Za-z0-9_.]+)", fieldsub, s)
+    s = re.sub(r"(?s)\*\*(.+?)\*\*", r'<B>\1</B>', s)
+    s = re.sub(r"\*([^*]+?)\*", r'<I>\1</I>', s)
+    return s
+
+
+# Generation of HTML table with register bit-field summary picture
+# Max 16-bit wide on one line
+
+
+def gen_tbl_row(outfile, msb, width, close):
+    if (close):
+        genout(outfile, "</tr>\n")
+    genout(outfile, "<tr>")
+    for x in range(msb, msb - width, -1):
+        genout(outfile, "<td class=\"bitnum\">" + str(x) + "</td>")
+
+    genout(outfile, "</tr><tr>")
+
+
+def gen_html_reg_pic(outfile, reg, width):
+
+    if (width > 32):
+        bsize = 3
+        nextbit = 63
+        hdrbits = 16
+        nextline = 48
+    elif (width > 16):
+        bsize = 3
+        nextbit = 31
+        hdrbits = 16
+        nextline = 16
+    elif (width > 8):
+        bsize = 3
+        nextbit = 15
+        nextline = 0
+        hdrbits = 16
+    else:
+        bsize = 12
+        nextbit = 7
+        nextline = 0
+        hdrbits = 8
+
+    genout(outfile, "<table class=\"regpic\">")
+    gen_tbl_row(outfile, nextbit, hdrbits, False)
+
+    for field in reversed(reg['fields']):
+        fieldlsb = field['bitinfo'][2]
+        fieldwidth = field['bitinfo'][1]
+        fieldmsb = fieldlsb + fieldwidth - 1
+        fname = field['name']
+
+        while nextbit > fieldmsb:
+            if (nextbit >= nextline) and (fieldmsb < nextline):
+                spans = nextbit - (nextline - 1)
+            else:
+                spans = nextbit - fieldmsb
+            genout(
+                outfile, "<td class=\"unused\" colspan=" + str(spans) +
+                ">&nbsp;</td>\n")
+            if (nextbit >= nextline) and (fieldmsb < nextline):
+                nextbit = nextline - 1
+                gen_tbl_row(outfile, nextbit, hdrbits, True)
+                nextline = nextline - 16
+            else:
+                nextbit = fieldmsb
+
+        while (fieldmsb >= nextline) and (fieldlsb < nextline):
+            spans = fieldmsb - (nextline - 1)
+            genout(
+                outfile, "<td class=\"fname\" colspan=" + str(spans) + ">" +
+                fname + "...</td>\n")
+            fname = "..." + field['name']
+            fieldwidth = fieldwidth - spans
+            fieldmsb = nextline - 1
+            nextline = nextline - 16
+            gen_tbl_row(outfile, fieldmsb, hdrbits, True)
+
+        namelen = len(fname)
+        if namelen == 0 or fname == ' ': fname = "&nbsp;"
+        if (namelen > bsize * fieldwidth):
+            usestyle = (" style=\"font-size:" + str(
+                (bsize * 100 * fieldwidth) / namelen) + "%\"")
+        else:
+            usestyle = ""
+
+        genout(
+            outfile, "<td class=\"fname\" colspan=" + str(fieldwidth) +
+            usestyle + ">" + fname + "</td>\n")
+
+        if (fieldlsb == nextline) and nextline > 0:
+            gen_tbl_row(outfile, nextline - 1, hdrbits, True)
+            nextline = nextline - 16
+
+        nextbit = fieldlsb - 1
+    while (nextbit > 0):
+        spans = nextbit - (nextline - 1)
+        genout(outfile,
+               "<td class=\"unused\" colspan=" + str(spans) + ">&nbsp;</td>\n")
+        nextbit = nextline - 1
+        if (nextline > 0):
+            gen_tbl_row(outfile, nextline - 1, hdrbits, True)
+            nextline = nextline - 16
+
+    genout(outfile, "</tr></table>")
+
+
+# Generation of HTML table with header, register picture and details
+
+
+def gen_html_register(outfile, reg, comp, width, rnames, toc, toclvl):
+    def gen_merge(outfile, fieldlsb, mergebase, mergeprev, mergedesc):
+        genout(
+            outfile, "<tr><td class=\"regbits\">" + str(fieldlsb - 1) + ':' +
+            str(mergebase) + "</td>")
+        genout(outfile, "<td class=\"regperm\"></td>")
+        genout(outfile, "<td class=\"regrv\"></td>")
+        genout(outfile, "<td class=\"regfn\"></td>")
+        if mergeprev != mergedesc:
+            genout(outfile,
+                   "<td class=\"regde\">" + mergedesc + ".." + mergeprev[4:])
+        else:
+            genout(outfile, "<td class=\"regde\">" + mergedesc)
+        genout(outfile, "</td></tr>\n")
+
+    rname = reg['name']
+    offset = reg['genoffset']
+    #in a multireg with multiple regs give anchor with base register name
+    if 'genbasebits' in reg and rname[-1] == '0':
+        genout(outfile, "<div id=\"Reg_" + rname[:-1].lower() + "\"></div>\n")
+    regwen_string = ''
+    if 'regwen' in reg and (reg['regwen'] != ''):
+        regwen_string = '<br>Register enable = ' + reg['regwen']
+    genout(
+        outfile, "<table class=\"regdef\" id=\"Reg_" + rname.lower() + "\">\n"
+        "<tr><th class=\"regdef\" colspan=5>" + comp + "." + rname + " @ + " +
+        hex(offset) + "<br>" + desc_expand(reg['desc'], rnames) + "<br>" +
+        "Reset default = " + hex(reg['genresval']) + ", mask " + hex(
+            reg['genresmask']) + regwen_string + "</th></tr>\n")
+    if toc != None:
+        toc.append((toclvl, comp + "." + rname, "Reg_" + rname.lower()))
+    genout(outfile, "<tr><td colspan=5>")
+    gen_html_reg_pic(outfile, reg, width)
+    genout(outfile, "</td></tr>\n")
+
+    genout(outfile, "<tr><th width=5%>Bits</th>")
+    genout(outfile, "<th width=5%>Type</th>")
+    genout(outfile, "<th width=5%>Reset</th>")
+    genout(outfile, "<th>Name</th>")
+    genout(outfile, "<th>Description</th></tr>")
+    nextbit = 0
+    fcount = 0
+    mergebase = -1
+    for field in reg['fields']:
+        fcount += 1
+        if not 'name' in field:
+            fname = "field " + str(fcount)
+        else:
+            fname = field['name']
+
+        fieldlsb = field['bitinfo'][2]
+        if (fieldlsb > nextbit) and mergebase < 0:
+            genout(outfile, "<tr><td class=\"regbits\">")
+            if (nextbit == (fieldlsb - 1)):
+                genout(outfile, str(nextbit))
+            else:
+                genout(outfile, str(fieldlsb - 1) + ":" + str(nextbit))
+            genout(outfile,
+                   "</td><td></td><td></td><td></td><td>Reserved</td></tr>")
+        if 'genbasebits' in reg:
+            if (((1 << fieldlsb) & reg['genbasebits']) == 0):
+                mergeprev = field['desc']
+                if (mergebase < 0):
+                    mergebase = fieldlsb
+                    mergedesc = field['desc']
+                nextbit = fieldlsb + field['bitinfo'][1]
+                continue
+            else:
+                if (mergebase >= 0):
+                    gen_merge(outfile, fieldlsb, mergebase, mergeprev,
+                              mergedesc)
+                    mergebase = -1
+        genout(outfile, "<tr><td class=\"regbits\">" + field['bits'] + "</td>")
+        genout(outfile, "<td class=\"regperm\">" + field['swaccess'] + "</td>")
+        genout(
+            outfile,
+            "<td class=\"regrv\">" + ('x' if field['genresvalx'] else hex(
+                field['genresval'])) + "</td>")
+        genout(outfile, "<td class=\"regfn\">" + fname + "</td>")
+        if 'desc' in field:
+            genout(
+                outfile, "<td class=\"regde\">" + desc_expand(
+                    field['desc'], rnames) + "\n")
+        else:
+            genout(outfile, "<td>\n")
+
+        if 'enum' in field:
+            genout(outfile, "    <table>")
+            for enum in field['enum']:
+                if (not 'name' in enum):
+                    ename = "enum for " + fname + " in " + rname
+                else:
+                    ename = enum['name']
+                genout(outfile, "    <tr><td>" + enum['value'] + "</td>")
+                genout(outfile, "<td>" + ename + "</td>")
+                genout(
+                    outfile, "<td>" + desc_expand(enum['desc'], rnames) +
+                    "</td></tr>\n")
+
+            genout(outfile, "    </table>")
+            if 'genrsvdenum' in field:
+                genout(outfile, "Other values are reserved.")
+        genout(outfile, "</td></tr>\n")
+        nextbit = fieldlsb + field['bitinfo'][1]
+
+    # could be in the middle of a merge
+    if (mergebase >= 0):
+        gen_merge(outfile, nextbit, mergebase, mergeprev, mergedesc)
+
+    genout(outfile, "</table>\n<br><br>\n")
+
+    return
+
+
+def gen_html_window(outfile, win, comp, regwidth, rnames, toc, toclvl):
+    wname = win['name']
+    offset = win['genoffset']
+    genout(
+        outfile, '<table class="regdef" id="Reg_' + wname.lower() + '">\n'
+        '<tr><th class="regdef">' + comp + '.' + wname + ' @ + ' + hex(offset)
+        + '<br>' + win['items'] + ' item ' + win['swaccess'] +
+        ' window<br>Byte writes are ' +
+        ('' if win['genbyte-write'] else '<i>not</i> ') +
+        'supported</th></tr>\n')
+    genout(outfile, '<tr><td><table class="regpic">')
+    genout(outfile, '<tr><td width="10%"></td>')
+    wid = win['genvalidbits']
+
+    for x in range(regwidth - 1, -1, -1):
+        if x == regwidth - 1 or x == wid - 1 or x == 0:
+            genout(outfile, '<td class="bitnum">' + str(x) + '</td>')
+        else:
+            genout(outfile, '<td class="bitnum"></td>')
+    genout(outfile, '</tr>')
+    tblmax = int(win['items']) - 1
+    for x in [0, 1, 2, tblmax - 1, tblmax]:
+        if x == 2:
+            genout(
+                outfile, '<tr><td>&nbsp;</td><td align=center colspan=' +
+                str(regwidth) + '>...</td></tr>')
+        else:
+            genout(
+                outfile, '<tr><td class="regbits">+' +
+                hex(offset + x * (regwidth // 8)) + '</td>')
+            if wid < regwidth:
+                genout(
+                    outfile, '<td class="unused" colspan=' +
+                    str(regwidth - wid) + '>&nbsp;</td>\n')
+                genout(
+                    outfile,
+                    '<td class="fname" colspan=' + str(wid) + '>&nbsp;</td>\n')
+            else:
+                genout(
+                    outfile, '<td class="fname" colspan=' + str(regwidth) +
+                    '>&nbsp;</td>\n')
+            genout(outfile, '</tr>')
+    genout(outfile, '</td></tr></table>')
+    genout(
+        outfile, '<tr><td class="regde">' + desc_expand(win['desc'], rnames) +
+        '</td></tr>')
+    genout(outfile, "</table>\n<br><br>\n")
+    if toc != None:
+        toc.append((toclvl, comp + "." + wname, "Reg_" + wname.lower()))
+
+
+# Must have called validate, so should have no errors
+
+
+def gen_html(regs, outfile, toclist=None, toclevel=3):
+    component = regs['name']
+    registers = regs['registers']
+    rnames = regs['genrnames']
+
+    if 'regwidth' in regs:
+        regwidth = int(regs['regwidth'], 0)
+    else:
+        regwidth = 32
+
+    for x in registers:
+        if 'reserved' in x:
+            continue
+
+        if 'skipto' in x:
+            continue
+
+        if 'sameaddr' in x:
+            for sareg in x['sameaddr']:
+                gen_html_register(outfile, sareg, component, regwidth, rnames,
+                                  toclist, toclevel)
+            continue
+
+        if 'window' in x:
+            gen_html_window(outfile, x['window'], component, regwidth, rnames,
+                            toclist, toclevel)
+            continue
+
+        if 'multireg' in x:
+            for reg in x['multireg']['genregs']:
+                gen_html_register(outfile, reg, component, regwidth, rnames,
+                                  toclist, toclevel)
+            continue
+
+        gen_html_register(outfile, x, component, regwidth, rnames, toclist,
+                          toclevel)
+    return
diff --git a/util/reggen/gen_json.py b/util/reggen/gen_json.py
new file mode 100644
index 0000000..7088d17
--- /dev/null
+++ b/util/reggen/gen_json.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Generate json/compact json/hjson from register json tree
+"""
+
+import hjson
+
+
+def gen_json(obj, outfile, format):
+    if format == 'json':
+        hjson.dumpJSON(
+            obj,
+            outfile,
+            ensure_ascii=False,
+            use_decimal=True,
+            indent='  ',
+            for_json=True)
+    elif format == 'compact':
+        hjson.dumpJSON(
+            obj,
+            outfile,
+            ensure_ascii=False,
+            for_json=True,
+            use_decimal=True,
+            separators=(',', ':'))
+    elif format == 'hjson':
+        hjson.dump(
+            obj, outfile, ensure_ascii=False, for_json=True, use_decimal=True)
+    else:
+        raise ValueError('Invalid json format ' + format)
diff --git a/util/reggen/gen_rtl.py b/util/reggen/gen_rtl.py
new file mode 100644
index 0000000..c7d4bfa
--- /dev/null
+++ b/util/reggen/gen_rtl.py
@@ -0,0 +1,268 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Generate SystemVerilog designs from validated register json tree
+"""
+
+import logging as log
+import operator
+import sys
+
+from mako.template import Template
+from pkg_resources import resource_filename
+
+from .field_enums import HwAccess, SwAccess, SwRdAccess, SwWrAccess
+
+
+class Field():
+    """Field in a register.
+
+    Field class contains necessary info to generate RTL code.
+    It has two additional (tool generated) feidls, swrdaccess and swwraccess,
+    which represent read and write type. This makes RTL generation code simpler.
+    """
+    name = ""  # required
+    msb = 31  # required
+    lsb = 0  # required
+    resval = 0  # optional
+    swaccess = SwAccess.NONE  # optional
+    swrdaccess = SwRdAccess.NONE
+    swwraccess = SwWrAccess.NONE
+    hwaccess = HwAccess.HRO
+    hwqe = False
+    hwre = False
+
+    def __init__(self):
+        self.name = ""  # required
+        self.msb = 31  # required
+        self.lsb = 0  # required
+        self.resval = 0  # optional
+        self.swaccess = SwAccess.NONE  # optional
+        self.swrdaccess = SwRdAccess.NONE
+        self.swwraccess = SwWrAccess.NONE
+        self.hwaccess = HwAccess.HRO
+        self.hwqe = False
+        self.hwre = False
+
+
+class Register():
+    name = ""
+    offset = 0
+    hwqe = False
+    hwre = False
+    hwext = False  # External register
+    resval = 0
+    dvrights = "RO"  # Used by UVM REG only
+    regwen = ""
+    fields = []
+
+    def __init__(self):
+        self.name = ""
+        self.offset = 0
+        self.hwqe = False
+        self.hwre = False
+        self.hwext = False  # External register
+        self.resval = 0
+        self.dvrights = "RO"  # Used by UVM REG only
+        self.regwen = ""
+        self.fields = []
+
+
+class Window():
+    base_addr = 0
+    limit_addr = 0
+    n_bits = 0
+
+    def __init__(self):
+        self.base_addr = 0
+        self.limit_addr = 0
+        self.n_bits = 0
+
+class Block():
+    width = 32
+    addr_width = 12
+    base_addr = 0
+    name = ""
+    regs = []
+    wins = []
+    blocks = []
+
+    def __init__(self):
+        self.width = 32
+        self.addr_width = 12
+        self.base_addr = 0
+        self.name = ""
+        self.regs = []
+        self.wins = []
+        self.blocks = []
+
+def escape_name(name):
+    return name.lower().replace(' ', '_')
+
+
+def check_field_bool(obj, field, default):
+    if field in obj:
+        return True if obj[field] == "true" else False
+    else:
+        return default
+
+
+def parse_field(obj, reg, nfields):
+    """Convert OrderedDict field into Field class
+    """
+    f = Field()
+    f.name = escape_name(obj["name"])
+    # if name doesn't exist and only one field in a reg
+    if f.name == "" and nfields == 1:
+        f.name = reg.name
+
+    # MSB, LSB
+    f.lsb = obj["bitinfo"][2]
+    f.msb = f.lsb + obj["bitinfo"][1] - 1
+
+    #assert not 'swaccess' in obj, "R[%s] F[%s]: SwAccess in Field not supported" % (reg.name, f.name)
+    f.swaccess = obj["genswaccess"]
+    f.swrdaccess = obj["genswrdaccess"]
+    f.swwraccess = obj["genswwraccess"]
+    f.hwaccess = obj["genhwaccess"]
+    f.hwqe = obj["genhwqe"]
+    f.hwre = obj["genhwre"]
+
+    # resval handling. `genresval` has zero value if `resval` field is defined
+    # as unknown 'x'
+    f.resval = obj["genresval"]
+
+    return f
+
+
+def parse_reg(obj):
+    """Convert OrderedDict register into Register class
+    """
+
+    reg = Register()
+    reg.name = escape_name(obj['name'])
+    reg.offset = obj["genoffset"]
+    reg.fields = []
+
+    reg.hwext = (obj['hwext'] == "true")
+    reg.hwqe = (obj["hwqe"] == "true")
+    reg.hwre = (obj["hwre"] == "true")
+    reg.resval = obj["genresval"]
+    reg.dvrights = obj["gendvrights"]
+    reg.regwen = obj["regwen"].lower()
+
+    # Parsing Fields
+    for f in obj["fields"]:
+        field = parse_field(f, reg, len(obj["fields"]))
+        if field != None:
+            reg.fields.append(field)
+
+    # TODO(eunchan): Field bitfield overlapping check
+    log.info("R[0x%04x]: %s ", reg.offset, reg.name)
+    for f in reg.fields:
+        log.info("  F[%2d:%2d]: %s", f.msb, f.lsb, f.name)
+
+    return reg
+
+
+def parse_win(obj, width):
+    # Convert register window fields into Window class
+    # base_addr : genoffset
+    # limit_addr : genoffset + items*width
+    win = Window()
+    win.name = obj["name"]
+    win.base_addr = obj["genoffset"]
+    win.limit_addr = obj["genoffset"] + int(obj["items"]) * (width // 8)
+    win.dvrights = obj["swaccess"]
+    win.n_bits = obj["genvalidbits"]
+
+    # TODO: Generate warnings of `noalign` or `unusual`
+    return win
+
+
+def json_to_reg(obj):
+    """Converts json OrderedDict into structure having useful information for
+    Template to use.
+
+    Main purpose of this function is:
+        - Add Offset value based on auto calculation
+        - Prepare Systemverilog data structure to generate _pkg file
+    """
+    block = Block()
+
+    # Name
+    block.name = escape_name(obj["name"])
+    log.info("Processing module: %s", block.name)
+
+    block.width = int(obj["regwidth"], 0)
+
+    if block.width != 32 and block.width != 64:
+        log.error(
+            "Current reggen tool doesn't support field width that is not 32 nor 64"
+        )
+
+    log.info("Data Width is set to %d bits", block.width)
+
+    for r in obj["registers"]:
+        # Check if any exception condition hit
+        if 'reserved' in r:
+            continue
+        elif 'skipto' in r:
+            continue
+        elif 'sameaddr' in r:
+            log.error("Current tool doesn't support 'sameaddr' type")
+            continue
+        elif 'window' in r:
+            win = parse_win(r['window'], block.width)
+            if win != None:
+                block.wins.append(win)
+            continue
+        elif 'multireg' in r:
+            for genr in r['multireg']['genregs']:
+                reg = parse_reg(genr)
+                if reg != None:
+                    block.regs.append(reg)
+            continue
+        reg = parse_reg(r)
+        if reg != None:
+            block.regs.append(reg)
+        # mdhayter -- moved logging into parse_regs
+
+    # Last offset and calculate space
+    #  Later on, it could use block.regs[-1].genoffset
+    if "space" in obj:
+        block.addr_width = int(obj["space"], 0).bit_length()
+    else:
+        block.addr_width = (obj["gensize"] - 1).bit_length()
+
+    return block
+
+
+def gen_rtl(obj, outdir):
+    # obj: OrderedDict
+
+    block = json_to_reg(obj)
+
+    # Read Register templates
+    reg_top_tpl = Template(
+        filename=resource_filename('reggen', 'reg_top.tpl.sv'))
+    reg_pkg_tpl = Template(
+        filename=resource_filename('reggen', 'reg_pkg.tpl.sv'))
+
+    # Generate pkg.sv with block name
+    with open(outdir + "/" + block.name + "_reg_pkg.sv", 'w',
+              encoding='UTF-8') as fout:
+        fout.write(
+            reg_pkg_tpl.render(block=block,
+                               HwAccess=HwAccess,
+                               SwRdAccess=SwRdAccess,
+                               SwWrAccess=SwWrAccess))
+
+    # Generate top.sv
+    with open(outdir + "/" + block.name + "_reg_top.sv", 'w',
+              encoding='UTF-8') as fout:
+        fout.write(
+            reg_top_tpl.render(block=block,
+                               HwAccess=HwAccess,
+                               SwRdAccess=SwRdAccess,
+                               SwWrAccess=SwWrAccess))
diff --git a/util/reggen/gen_selfdoc.py b/util/reggen/gen_selfdoc.py
new file mode 100644
index 0000000..49dd7df
--- /dev/null
+++ b/util/reggen/gen_selfdoc.py
@@ -0,0 +1,308 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Generates the documentation for the register tool
+
+"""
+import sys
+
+from reggen import validate
+
+
+def genout(outfile, msg):
+    outfile.write(msg)
+
+
+doc_intro = """
+
+(start of output generated by `regtool.py --doc`)
+
+The tables describe each key and the type of the value. The following
+types are used:
+
+Type | Description
+---- | -----------
+"""
+
+swaccess_intro = """
+
+Register fields are tagged using the swaccess key to describe the
+permitted access and side-effects. This key must have one of these
+values:
+
+"""
+
+hwaccess_intro = """
+
+Register fields are tagged using the hwaccess key to describe the
+permitted access from hardware logic and side-effects. This key must
+have one of these values:
+
+"""
+
+top_example = """
+The basic structure of a register definition file is thus:
+
+```hjson
+{
+  name: "GP",
+  regwidth: "32",
+  registers: [
+    // register definitions...
+  ]
+}
+
+```
+
+"""
+
+register_example = """
+
+The basic register definition group will follow this pattern:
+
+```hjson
+    { name: "REGA",
+      desc: "Description of register",
+      swaccess: "rw",
+      resval: "42",
+      fields: [
+        // bit field definitions...
+      ]
+    }
+```
+
+The name and brief description are required. If the swaccess key is
+provided it describes the access pattern that will be used by all
+bitfields in the register that do not override with their own swaccess
+key. This is a useful shortcut because in most cases a register will
+have the same access restrictions for all fields. The reset value of
+the register may also be provided here or in the individual fields. If
+it is provided in both places then they must match, if it is provided
+in neither place then the reset value defaults to zero for all except
+write-only fields when it defaults to x.
+
+"""
+
+field_example = """
+
+Field names should be relatively short because they will be used
+frequently (and need to fit in the register layout picture!) The field
+description is expected to be longer and will most likely make use of
+the hjson ability to include multi-line strings. An example with three
+fields:
+
+```hjson
+    fields: [
+      { bits: "15:0",
+        name: "RXS",
+        desc: '''
+        Last 16 oversampled values of RX. These are captured at 16x the baud
+        rate clock. This is a shift register with the most recent bit in
+        bit 0 and the oldest in bit 15. Only valid when ENRXS is set.
+        '''
+      }
+      { bits: "16",
+        name: "ENRXS",
+        desc: '''
+          If this bit is set the receive oversampled data is collected
+          in the RXS field.
+        '''
+      }
+      {bits: "20:19", name: "TXILVL",
+       desc: "Trigger level for TX interrupts",
+       resval: "2",
+       enum: [
+               { value: "0", name: "txlvl1", desc: "1 character" },
+               { value: "1", name: "txlvl4", desc: "4 characters" },
+               { value: "2", name: "txlvl8", desc: "8 characters" },
+               { value: "3", name: "txlvl16", desc: "16 characters" }
+             ]
+      }
+    ]
+```
+
+In all of these the swaccess parameter is inherited from the register
+level, and will be added so this key is always available to the
+backend. The RXS and ENRXS will default to zero reset value (unless
+something different is provided for the register) and will have the
+key added, but TXILVL expicitly sets its reset value as 2.
+
+The missing bits 17 and 18 will be treated as reserved by the tool, as
+will any bits between 21 and the maximum in the register.
+
+The TXILVL is an example using an enumeration to specify all valid
+values for the field. In this case all possible values are described,
+if the list is incomplete then the field is marked with the rsvdenum
+key so the backend can take appropriate action. (If the enum field is
+more than 7 bits then the checking is not done.)
+
+"""
+
+offset_intro = """
+
+"""
+
+multi_intro = """
+
+The multireg expands on the register required fields and will generate
+a list of the generated registers (that contain all required and
+generated keys for an actual register).
+
+"""
+
+window_intro = """
+
+A window defines an open region of the register space that can be used
+for things that are not registers (for example access to a buffer ram).
+
+"""
+
+regwen_intro = """
+
+Registers can protect themselves from software writes by using the
+register attribute regwen. When not an emptry string (the default
+value), regwen indicates that another register must be true in order
+to allow writes to this register.  This is useful for the prevention
+of software modification.  The register-enable register (call it
+REGWEN) must be one bit in width, and should default to 1 and be rw1c
+for preferred security control.  This allows all writes to proceed
+until at some point software disables future modifications by clearing
+REGWEN. An error is reported if REGWEN does not exist, contains more
+than one bit, is not `rw1c` or does not default to 1. One REGWEN can
+protect multiple registers. An example:
+
+```hjson
+    { name: "REGWEN",
+      desc: "Register write enable for a bank of registers",
+      swaccess: "rw1c",
+      fields: [ { bits: "0", resval: "1" } ]
+    }
+    { name: "REGA",
+      swaccess: "rw",
+      regwen: "REGWEN",
+      ...
+    }
+    { name: "REGB",
+      swaccess: "rw",
+      regwen: "REGWEN",
+      ...
+    }
+```
+"""
+
+doc_tail = """
+
+(end of output generated by `regtool.py --doc`)
+
+"""
+
+
+def doc_tbl_head(outfile, use):
+    if (use != None):
+        genout(outfile, "\nKey | Kind | Type | Description of Value\n")
+        genout(outfile, "--- | ---- | ---- | --------------------\n")
+    else:
+        genout(outfile, "\nKey | Description\n")
+        genout(outfile, "--- | -----------\n")
+
+
+def doc_tbl_line(outfile, key, use, desc):
+    if use != None:
+        genout(
+            outfile, key + " | " + validate.key_use[use] + " | " +
+            validate.val_types[desc[0]][0] + " | " + desc[1] + "\n")
+    else:
+        genout(outfile, key + " | " + desc + "\n")
+
+
+def document(outfile):
+    genout(outfile, doc_intro)
+    for x in validate.val_types:
+        genout(
+            outfile,
+            validate.val_types[x][0] + " | " + validate.val_types[x][1] + "\n")
+
+    genout(outfile, swaccess_intro)
+    doc_tbl_head(outfile, None)
+    for x in validate.swaccess_permitted:
+        doc_tbl_line(outfile, x, None, validate.swaccess_permitted[x][0])
+
+    genout(outfile, hwaccess_intro)
+    doc_tbl_head(outfile, None)
+    for x in validate.hwaccess_permitted:
+        doc_tbl_line(outfile, x, None, validate.hwaccess_permitted[x][0])
+
+    genout(outfile,
+           "\n\nThe top level of the json is a group containing "\
+           "the following keys:\n")
+    doc_tbl_head(outfile, 1)
+    for x in validate.top_required:
+        doc_tbl_line(outfile, x, 'r', validate.top_required[x])
+    for x in validate.top_optional:
+        doc_tbl_line(outfile, x, 'o', validate.top_optional[x])
+    for x in validate.top_added:
+        doc_tbl_line(outfile, x, 'a', validate.top_added[x])
+    genout(outfile, top_example)
+
+    genout(outfile,
+           "\n\nThe list of registers includes register definition groups:\n")
+    doc_tbl_head(outfile, 1)
+    for x in validate.reg_required:
+        doc_tbl_line(outfile, x, 'r', validate.reg_required[x])
+    for x in validate.reg_optional:
+        doc_tbl_line(outfile, x, 'o', validate.reg_optional[x])
+    for x in validate.reg_added:
+        doc_tbl_line(outfile, x, 'a', validate.reg_added[x])
+    genout(outfile, register_example)
+
+    genout(outfile,
+           "\n\nIn the fields list each field definition is a group "\
+           "containing:\n")
+    doc_tbl_head(outfile, 1)
+    for x in validate.field_required:
+        doc_tbl_line(outfile, x, 'r', validate.field_required[x])
+    for x in validate.field_optional:
+        doc_tbl_line(outfile, x, 'o', validate.field_optional[x])
+    for x in validate.field_added:
+        doc_tbl_line(outfile, x, 'a', validate.field_added[x])
+    genout(outfile, field_example)
+
+    genout(outfile, "\n\nDefinitions in an enumeration group contain:\n")
+    doc_tbl_head(outfile, 1)
+    for x in validate.enum_required:
+        doc_tbl_line(outfile, x, 'r', validate.enum_required[x])
+    for x in validate.enum_optional:
+        doc_tbl_line(outfile, x, 'o', validate.enum_optional[x])
+    for x in validate.enum_added:
+        doc_tbl_line(outfile, x, 'a', validate.enum_added[x])
+
+    genout(
+        outfile, "\n\nThe list of registers may include single entry groups "\
+          "to control the offset, open a window or generate registers:\n")
+    doc_tbl_head(outfile, 1)
+    for x in validate.list_optone:
+        doc_tbl_line(outfile, x, 'o', validate.list_optone[x])
+
+    genout(outfile, offset_intro)
+    genout(outfile, regwen_intro)
+
+    genout(outfile, window_intro)
+    doc_tbl_head(outfile, 1)
+    for x in validate.window_required:
+        doc_tbl_line(outfile, x, 'r', validate.window_required[x])
+    for x in validate.window_optional:
+        doc_tbl_line(outfile, x, 'o', validate.window_optional[x])
+    for x in validate.window_added:
+        doc_tbl_line(outfile, x, 'a', validate.window_added[x])
+
+    genout(outfile, multi_intro)
+    doc_tbl_head(outfile, 1)
+    for x in validate.multireg_required:
+        doc_tbl_line(outfile, x, 'r', validate.multireg_required[x])
+    for x in validate.multireg_optional:
+        doc_tbl_line(outfile, x, 'o', validate.multireg_optional[x])
+    for x in validate.multireg_added:
+        doc_tbl_line(outfile, x, 'a', validate.multireg_added[x])
+
+    genout(outfile, doc_tail)
diff --git a/util/reggen/reg_html.css b/util/reggen/reg_html.css
new file mode 100644
index 0000000..9beaf15
--- /dev/null
+++ b/util/reggen/reg_html.css
@@ -0,0 +1,74 @@
+/* Stylesheet for reggen html register output */
+/* Copyright lowRISC contributors. */
+/* Licensed under the Apache License, Version 2.0, see LICENSE for details.*/
+/* SPDX-License-Identifier: Apache-2.0 */
+
+table.regpic {
+    width: 95%;
+    border-collapse: collapse;
+    margin-left:auto; 
+    margin-right:auto;
+    table-layout:fixed;
+}
+
+table.regdef {
+    border: 1px solid black;
+    width: 80%;
+    border-collapse: collapse;
+    margin-left:auto; 
+    margin-right:auto;
+    table-layout:auto;
+}
+
+table.regdef th {
+    border: 1px solid black;
+    font-family: sans-serif;
+
+}
+
+td.bitnum {
+    font-size: 60%;
+    text-align: center;
+}
+
+td.unused {
+    border: 1px solid black;
+    background-color: gray;
+}
+
+td.fname {
+    border: 1px solid black;
+    text-align: center;
+    font-family: sans-serif;
+}
+
+
+td.regbits, td.regperm, td.regrv {
+    border: 1px solid black;
+    text-align: center;
+    font-family: sans-serif;
+}
+
+td.regde, td.regfn {
+    border: 1px solid black;
+}
+
+table.cfgtable {
+    border: 1px solid black;
+    width: 80%;
+    border-collapse: collapse;
+    margin-left:auto;
+    margin-right:auto;
+    table-layout:auto;
+}
+
+table.cfgtable th {
+    border: 1px solid black;
+    font-family: sans-serif;
+    font-weight: bold;
+}
+
+table.cfgtable td {
+    border: 1px solid black;
+    font-family: sans-serif;
+}
diff --git a/util/reggen/reg_pkg.tpl.sv b/util/reggen/reg_pkg.tpl.sv
new file mode 100644
index 0000000..0c43c4e
--- /dev/null
+++ b/util/reggen/reg_pkg.tpl.sv
@@ -0,0 +1,147 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// Register Package auto-generated by `reggen` containing data structure
+
+package ${block.name}_reg_pkg;
+
+// Register to internal design logic
+typedef struct packed {
+<%
+# directly mirrors below (avoided optimizations to ensure a match)
+# have to do as a python block to avoid inserting blank lines
+# compute number of bits because packed structs are declared msb first
+packbit = 0
+for r in block.regs:
+  if len(r.fields) == 1 and r.fields[0].hwaccess in [HwAccess.HRW, HwAccess.HRO]:
+    packbit += 1 + r.fields[0].msb - r.fields[0].lsb
+    if r.fields[0].hwqe:
+      packbit += 1
+    if r.fields[0].hwre:
+      packbit += 1
+  elif len(r.fields) >= 2 and len([f for f in r.fields if f.hwaccess in [HwAccess.HRW, HwAccess.HRO]]):
+    for f in r.fields:
+      if f.hwaccess in [HwAccess.HRW, HwAccess.HRO]:
+        if f.msb != f.lsb:
+          packbit += 1 + f.msb - f.lsb
+        else:
+          packbit += 1
+        if r.hwqe:
+          packbit += 1
+        if r.fields[0].hwre:
+          packbit += 1
+nbits = packbit - 1
+packbit = 0
+%>
+% for r in block.regs:
+  % if len(r.fields) == 1 and r.fields[0].hwaccess in [HwAccess.HRW, HwAccess.HRO]:
+    ## Only one field, should use register name as it is
+  struct packed {
+    logic [${r.fields[0].msb - r.fields[0].lsb}:0] q; // [${nbits - packbit}:${nbits - (packbit + r.fields[0].msb - r.fields[0].lsb)}]<% packbit += 1 + r.fields[0].msb - r.fields[0].lsb %>
+    % if r.fields[0].hwqe:
+    logic qe; // [${nbits - packbit}]<% packbit += 1 %>
+    % endif
+    % if r.fields[0].hwre:
+    logic re; // [${nbits - packbit}]<% packbit += 1 %>
+    % endif
+  } ${r.name};
+  % elif len(r.fields) >= 2 and len([f for f in r.fields if f.hwaccess in [HwAccess.HRW, HwAccess.HRO]]):
+  struct packed {
+    % for f in r.fields:
+      % if f.hwaccess in [HwAccess.HRW, HwAccess.HRO]:
+    struct packed {
+      ## reg2hw signal based on HW type and virtual?
+      % if f.msb != f.lsb:
+      logic [${f.msb - f.lsb}:0] q; // [${nbits - packbit}:${nbits - (packbit + f.msb - f.lsb)}]<% packbit += 1 + f.msb - f.lsb %>
+      % else:
+      logic q; // [${nbits - packbit}]<% packbit += 1 %>
+      % endif
+      % if f.hwqe:
+      logic qe; // [${nbits - packbit}]<% packbit += 1 %>
+      % endif
+      % if r.fields[0].hwre:
+      logic re; // [${nbits - packbit}]<% packbit += 1 %>
+      % endif
+    } ${f.name};
+      % endif
+    % endfor
+  } ${r.name};
+  % endif
+% endfor
+} ${block.name}_reg2hw_t;
+
+// Internal design logic to register
+typedef struct packed {
+<%
+packbit = 0
+for r in block.regs:
+  if len(r.fields) == 1 and r.fields[0].hwaccess in [HwAccess.HRW, HwAccess.HWO]:
+    packbit += 1 + r.fields[0].msb - r.fields[0].lsb
+    if r.hwext == 0:
+      packbit += 1
+  elif len(r.fields) >= 2 and len([f for f in r.fields if f.hwaccess in [HwAccess.HRW, HwAccess.HWO]]):
+    for f in r.fields:
+      if f.hwaccess in [HwAccess.HRW, HwAccess.HWO]:
+        if f.msb != f.lsb:
+          packbit += 1 + f.msb - f.lsb
+        else:
+          packbit += 1
+        if r.hwext == 0:
+          packbit += 1
+nbits = packbit - 1
+packbit = 0
+%>
+% for r in block.regs:
+  % if len(r.fields) == 1 and r.fields[0].hwaccess in [HwAccess.HRW, HwAccess.HWO]:
+    ## Only one field, should use register name as it is
+  struct packed {
+    logic [${r.fields[0].msb - r.fields[0].lsb}:0] d; // [${nbits - packbit}:${nbits - (packbit + r.fields[0].msb - r.fields[0].lsb)}]<% packbit += 1 + r.fields[0].msb - r.fields[0].lsb %>
+    % if r.hwext == 0:
+    logic de; // [${nbits - packbit}]<% packbit += 1 %>
+    % endif
+  } ${r.name};
+  % elif len(r.fields) >= 2 and len([f for f in r.fields if f.hwaccess in [HwAccess.HRW, HwAccess.HWO]]):
+  struct packed {
+    % for f in r.fields:
+      % if f.hwaccess in [HwAccess.HRW, HwAccess.HWO]:
+    struct packed {
+      % if f.msb != f.lsb:
+      logic [${f.msb - f.lsb}:0] d; // [${nbits - packbit}:${nbits - (packbit + f.msb - f.lsb)}]<% packbit += 1 + f.msb - f.lsb %>
+      % else:
+      logic d;  // [${nbits - packbit}]<% packbit += 1 %>
+      % endif
+      % if r.hwext == 0:
+      logic de; // [${nbits - packbit}]<% packbit += 1 %>
+      % endif
+    } ${f.name};
+      % endif
+    % endfor
+  } ${r.name};
+  % endif
+% endfor
+} ${block.name}_hw2reg_t;
+
+  // Register Address
+% for r in block.regs:
+  parameter ${block.name.upper()}_${r.name.upper()}_OFFSET = ${block.addr_width}'h ${"%x" % r.offset};
+% endfor
+
+% if len(block.wins) > 0:
+  // Window parameter
+% endif
+% for i,w in enumerate(block.wins):
+  parameter ${block.name.upper()}_${w.name.upper()}_OFFSET = ${block.addr_width}'h ${"%x" % w.base_addr};
+  parameter ${block.name.upper()}_${w.name.upper()}_SIZE   = ${block.addr_width}'h ${"%x" % (w.limit_addr - w.base_addr)};
+% endfor
+
+##   typedef enum int {
+## % for r in block.regs:
+##   % if loop.last:
+##   ${block.name.upper()}_${r.name.upper()}
+##   % else:
+##   ${block.name.upper()}_${r.name.upper()},
+##   % endif
+## % endfor
+##   } ${block.name}_reg_e;
+endpackage
diff --git a/util/reggen/reg_top.tpl.sv b/util/reggen/reg_top.tpl.sv
new file mode 100644
index 0000000..d47c927
--- /dev/null
+++ b/util/reggen/reg_top.tpl.sv
@@ -0,0 +1,552 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// Register Top module auto-generated by `reggen`
+<%
+  num_wins = len(block.wins)
+  num_wins_width = ((num_wins+1).bit_length()) - 1
+  num_dsp  = num_wins + 1
+%>
+
+module ${block.name}_reg_top (
+  input clk_i,
+  input rst_ni,
+
+  // Below Regster interface can be changed
+  input  tlul_pkg::tl_h2d_t tl_i,
+  output tlul_pkg::tl_d2h_t tl_o,
+% if num_wins != 0:
+
+  // Output port for window
+  output tlul_pkg::tl_h2d_t tl_win_o  [${num_wins}],
+  input  tlul_pkg::tl_d2h_t tl_win_i  [${num_wins}],
+
+% endif
+  // To HW
+  output ${block.name}_reg_pkg::${block.name}_reg2hw_t reg2hw, // Write
+  input  ${block.name}_reg_pkg::${block.name}_hw2reg_t hw2reg  // Read
+);
+
+  import ${block.name}_reg_pkg::* ;
+
+  localparam AW = ${block.addr_width};
+  localparam IW = $bits(tl_i.a_source);
+  localparam DW = ${block.width};
+  localparam DBW = DW/8;                    // Byte Width
+  localparam logic [$clog2($clog2(DBW)+1)-1:0] FSZ = $clog2(DBW); // Full Size 2^(FSZ) = DBW;
+
+  // register signals
+  logic          reg_we;
+  logic          reg_re;
+  logic [AW-1:0] reg_addr;
+  logic [DW-1:0] reg_wdata;
+  logic          reg_valid;
+  logic [DW-1:0] reg_rdata;
+  logic          tl_malformed, tl_addrmiss;
+
+  // Bus signals
+  tlul_pkg::tl_d_op_e rsp_opcode; // AccessAck or AccessAckData
+  logic          reqready;
+  logic [IW-1:0] reqid;
+  logic [IW-1:0] rspid;
+
+  logic          outstanding;
+
+  tlul_pkg::tl_h2d_t tl_reg_h2d;
+  tlul_pkg::tl_d2h_t tl_reg_d2h;
+
+% if num_wins == 0:
+  assign tl_reg_h2d = tl_i;
+  assign tl_o       = tl_reg_d2h;
+% else:
+  tlul_pkg::tl_h2d_t tl_socket_h2d [${num_dsp}];
+  tlul_pkg::tl_d2h_t tl_socket_d2h [${num_dsp}];
+
+  logic [${num_wins_width}:0] reg_steer;
+
+  // socket_1n connection
+  assign tl_reg_h2d = tl_socket_h2d[${num_wins}];
+  assign tl_socket_d2h[${num_wins}] = tl_reg_d2h;
+
+  % for i,t in enumerate(block.wins):
+  assign tl_win_o[${i}] = tl_socket_h2d[${i}];
+  assign tl_socket_d2h[${i}] = tl_win_i[${i}];
+  % endfor
+
+  // Create Socket_1n
+  tlul_socket_1n #(
+    .N          (${num_dsp}),
+    .HReqPass   (1'b1),
+    .HRspPass   (1'b1),
+    .DReqPass   ({${num_dsp}{1'b1}}),
+    .DRspPass   ({${num_dsp}{1'b1}}),
+    .HReqDepth  (4'h1),
+    .HRspDepth  (4'h1),
+    .DReqDepth  ({${num_dsp}{4'h1}}),
+    .DRspDepth  ({${num_dsp}{4'h1}})
+  ) u_socket (
+    .clk_i,
+    .rst_ni,
+    .tl_h_i (tl_i),
+    .tl_h_o (tl_o),
+    .tl_d_o (tl_socket_h2d),
+    .tl_d_i (tl_socket_d2h),
+    .dev_select (reg_steer)
+  );
+
+  // Create steering logic
+  always_comb begin
+    reg_steer = ${num_dsp-1};       // Default set to register
+
+    // TODO: Can below codes be unique case () inside ?
+  % for i,w in enumerate(block.wins):
+      % if w.limit_addr == 2**block.addr_width:
+    if (tl_i.a_address[AW-1:0] >= ${w.base_addr}) begin
+      // Exceed or meet the address range. Removed the comparison of limit addr ${"'h %x" % w.limit_addr}
+      % else:
+    if (tl_i.a_address[AW-1:0] >= ${w.base_addr} && tl_i.a_address[AW-1:0] < ${w.limit_addr}) begin
+      % endif
+      reg_steer = ${i};
+    end
+  % endfor
+  end
+% endif
+
+  // TODO(eunchan): Fix it after bus interface is finalized
+  assign reg_we = tl_reg_h2d.a_valid && tl_reg_d2h.a_ready &&
+                  ((tl_reg_h2d.a_opcode == tlul_pkg::PutFullData) ||
+                   (tl_reg_h2d.a_opcode == tlul_pkg::PutPartialData));
+  assign reg_re = tl_reg_h2d.a_valid && tl_reg_d2h.a_ready &&
+                  (tl_reg_h2d.a_opcode == tlul_pkg::Get);
+  assign reg_addr = tl_reg_h2d.a_address[AW-1:0];
+  assign reg_wdata = tl_reg_h2d.a_data;
+
+  assign tl_reg_d2h.d_valid  = reg_valid;
+  assign tl_reg_d2h.d_opcode = rsp_opcode;
+  assign tl_reg_d2h.d_param  = '0;
+  assign tl_reg_d2h.d_size   = FSZ;         // always Full Size
+  assign tl_reg_d2h.d_source = rspid;
+  assign tl_reg_d2h.d_sink   = '0;          // Used in TL-C
+  assign tl_reg_d2h.d_data   = reg_rdata;
+  assign tl_reg_d2h.d_user   = '0;          // Doesn't allow additional features yet
+  assign tl_reg_d2h.d_error  = tl_malformed | tl_addrmiss;
+
+  assign tl_reg_d2h.a_ready  = reqready;
+
+  assign reqid     = tl_reg_h2d.a_source;
+
+  always_ff @(posedge clk_i or negedge rst_ni) begin
+    if (!rst_ni) begin
+      tl_malformed <= 1'b1;
+    end else if (tl_reg_h2d.a_valid && tl_reg_d2h.a_ready) begin
+      if ((tl_reg_h2d.a_opcode != tlul_pkg::Get) &&
+          (tl_reg_h2d.a_opcode != tlul_pkg::PutFullData) &&
+          (tl_reg_h2d.a_opcode != tlul_pkg::PutPartialData)) begin
+        tl_malformed <= 1'b1;
+      // Only allow Full Write with full mask
+      end else if (tl_reg_h2d.a_size != FSZ || tl_reg_h2d.a_mask != {DBW{1'b1}}) begin
+        tl_malformed <= 1'b1;
+      end else if (tl_reg_h2d.a_user.parity_en == 1'b1) begin
+        tl_malformed <= 1'b1;
+      end else begin
+        tl_malformed <= 1'b0;
+      end
+    end
+  end
+  // TODO(eunchan): Revise Register Interface logic after REG INTF finalized
+  // TODO(eunchan): Make concrete scenario
+  //    1. Write: No response, so that it can guarantee a request completes a clock after we
+  //              It means, bus_reg_ready doesn't have to be lowered.
+  //    2. Read: response. So bus_reg_ready should assert after reg_bus_valid & reg_bus_ready
+  //               _____         _____
+  // a_valid _____/     \_______/     \______
+  //         ___________         _____
+  // a_ready            \_______/     \______ <- ERR though no logic malfunction
+  //                     _____________
+  // d_valid ___________/             \______
+  //                             _____
+  // d_ready ___________________/     \______
+  //
+  // Above example is fine but if r.b.r doesn't assert within two cycle, then it can be wrong.
+  always_ff @(posedge clk_i or negedge rst_ni) begin
+    // Not to accept new request when a request is handling
+    //   #Outstanding := 1
+    if (!rst_ni) begin
+      reqready <= 1'b0;
+    end else if (reg_we || reg_re) begin
+      reqready <= 1'b0;
+    end else if (outstanding == 1'b0) begin
+      reqready <= 1'b1;
+    end
+  end
+
+  // Request/ Response ID
+  always_ff @(posedge clk_i or negedge rst_ni) begin
+    if (!rst_ni) begin
+      rspid <= '0;
+    end else if (reg_we || reg_re) begin
+      rspid <= reqid;
+    end
+  end
+
+  // Define SW related signals
+  // Format: <reg>_<field>_{wd|we|qs}
+  //        or <reg>_{wd|we|qs} if field == 1 or 0
+  % for r in block.regs:
+    % if len(r.fields) == 1:
+<%
+      msb = r.fields[0].msb
+      lsb = r.fields[0].lsb
+      sig_name = r.name
+      f = r.fields[0]
+      swwraccess = f.swwraccess
+      swrdaccess = f.swrdaccess
+      hwext = r.hwext
+      regwen = r.regwen
+%>\
+${sig_gen(msb, lsb, sig_name, swwraccess, swrdaccess, hwext, regwen)}\
+    % else:
+      % for f in r.fields:
+<%
+      msb = f.msb
+      lsb = f.lsb
+      sig_name = r.name + "_" + f.name
+      swwraccess = f.swwraccess
+      swrdaccess = f.swrdaccess
+      hwext = r.hwext
+      regwen = r.regwen
+%>\
+${sig_gen(msb, lsb, sig_name, swwraccess, swrdaccess, hwext, regwen)}\
+      % endfor
+    % endif
+  % endfor
+
+  // Register instances
+  % for r in block.regs:
+  // R[${r.name}]: V(${str(r.hwext)})
+    % if len(r.fields) == 1:
+<%
+      f = r.fields[0]
+      finst_name = r.name
+      fsig_name = r.name
+      msb = f.msb
+      lsb = f.lsb
+      swaccess = f.swaccess
+      swrdaccess = f.swrdaccess
+      swwraccess = f.swwraccess
+      hwaccess = f.hwaccess
+      hwqe = f.hwqe
+      hwre = f.hwre
+      hwext = r.hwext
+      resval = f.resval
+      regwen = r.regwen
+%>
+${finst_gen(finst_name, fsig_name, msb, lsb, swaccess, swrdaccess, swwraccess, hwaccess, hwqe, hwre, hwext, resval, regwen)}
+    % else:
+      % for f in r.fields:
+<%
+      finst_name = r.name + "_" + f.name
+      fsig_name = r.name + "." + f.name
+      msb = f.msb
+      lsb = f.lsb
+      swaccess = f.swaccess
+      swrdaccess = f.swrdaccess
+      swwraccess = f.swwraccess
+      hwaccess = f.hwaccess
+      hwqe = f.hwqe
+      hwre = f.hwre
+      hwext = r.hwext
+      resval = f.resval
+      regwen = r.regwen
+%>
+  //   F[${f.name}]: ${f.msb}:${f.lsb}
+${finst_gen(finst_name, fsig_name, msb, lsb, swaccess, swrdaccess, swwraccess, hwaccess, hwqe, hwre, hwext, resval, regwen)}
+      % endfor
+    % endif
+
+  ## for: block.regs
+  % endfor
+
+  logic [${len(block.regs)-1}:0] addr_hit;
+  always_comb begin
+    addr_hit = '0;
+    % for i,r in enumerate(block.regs):
+    addr_hit[${i}] = (reg_addr == ${block.name.upper()}_${r.name.upper()}_OFFSET);
+    % endfor
+  end
+
+  always_ff @(posedge clk_i or negedge rst_ni) begin
+    if (!rst_ni) begin
+      tl_addrmiss <= 1'b0;
+    end else if (reg_re || reg_we) begin
+      tl_addrmiss <= ~|addr_hit;
+    end
+  end
+
+  // Write Enable signal
+  % for i, r in enumerate(block.regs):
+    % if len(r.fields) == 1:
+<%
+      f = r.fields[0]
+      sig_name = r.name
+      inst_name = r.name
+      msb = f.msb
+      lsb = f.lsb
+      swrdaccess = f.swrdaccess
+      swwraccess = f.swwraccess
+      hwext = r.hwext
+%>
+${we_gen(sig_name, msb, lsb, swrdaccess, swwraccess, hwext, i)}\
+    % else:
+      % for f in r.fields:
+<%
+      sig_name = r.name + "_" + f.name
+      inst_name = r.name + "." + f.name
+      msb = f.msb
+      lsb = f.lsb
+      swrdaccess = f.swrdaccess
+      swwraccess = f.swwraccess
+      hwext = r.hwext
+%>
+${we_gen(sig_name, msb, lsb, swrdaccess, swwraccess, hwext, i)}\
+      % endfor
+    % endif
+  % endfor
+
+  // Read data return
+  logic [DW-1:0] reg_rdata_next;
+  always_comb begin
+    reg_rdata_next = '0;
+    unique case (1'b1)
+      % for i, r in enumerate(block.regs):
+        % if len(r.fields) == 1:
+<%
+          f = r.fields[0]
+          sig_name = r.name
+          inst_name = r.name
+          msb = f.msb
+          lsb = f.lsb
+          swrdaccess = f.swrdaccess
+%>\
+      addr_hit[${i}]: begin
+${rdata_gen(sig_name, msb, lsb, swrdaccess)}\
+      end
+
+        % else:
+      addr_hit[${i}]: begin
+          % for f in r.fields:
+<%
+          sig_name = r.name + "_" + f.name
+          inst_name = r.name + "." + f.name
+          msb = f.msb
+          lsb = f.lsb
+          swrdaccess = f.swrdaccess
+%>\
+${rdata_gen(sig_name, msb, lsb, swrdaccess)}\
+          % endfor
+      end
+
+        % endif
+      % endfor
+      default: begin
+        reg_rdata_next = '1;
+      end
+    endcase
+  end
+
+  always_ff @(posedge clk_i or negedge rst_ni) begin
+    if (!rst_ni) begin
+      reg_valid <= 1'b0;
+      reg_rdata <= '0;
+      rsp_opcode <= tlul_pkg::AccessAck;
+    end else if (reg_re || reg_we) begin
+      // Guarantee to return data in a cycle
+      reg_valid <= 1'b1;
+      if (reg_re) begin
+        reg_rdata <= reg_rdata_next;
+        rsp_opcode <= tlul_pkg::AccessAckData;
+      end else begin
+        rsp_opcode <= tlul_pkg::AccessAck;
+      end
+    end else if (tl_reg_h2d.d_ready) begin
+      reg_valid <= 1'b0;
+    end
+  end
+
+  // Outstanding: 1 outstanding at a time. Identical to `reg_valid`
+  always_ff @(posedge clk_i or negedge rst_ni) begin
+    if (!rst_ni) begin
+      outstanding <= 1'b0;
+    end else if (tl_reg_h2d.a_valid && tl_reg_d2h.a_ready) begin
+      outstanding <= 1'b1;
+    end else if (tl_reg_d2h.d_valid && tl_reg_h2d.d_ready) begin
+      outstanding <= 1'b0;
+    end
+  end
+
+  // Assertions for Register Interface
+  `ASSERT_PULSE(wePulse, reg_we, clk_i, !rst_ni)
+  `ASSERT_PULSE(rePulse, reg_re, clk_i, !rst_ni)
+
+  `ASSERT(reAfterRv, $rose(reg_re || reg_we) |=> reg_valid, clk_i, !rst_ni)
+
+  `ASSERT(en2addrHit, (reg_we || reg_re) |-> $onehot0(addr_hit), clk_i, !rst_ni)
+
+  `ASSERT(reqParity, tl_reg_h2d.a_valid |-> tl_reg_h2d.a_user.parity_en == 1'b0, clk_i, !rst_ni)
+
+endmodule
+<%def name="str_bits_sv(msb, lsb)">\
+% if msb != lsb:
+${msb}:${lsb}\
+% else:
+${msb}\
+% endif
+</%def>\
+<%def name="str_arr_sv(msb, lsb)">\
+% if msb != lsb:
+[${msb-lsb}:0] \
+% endif
+</%def>\
+<%def name="sig_gen(msb, lsb, sig_name, swwraccess, swrdaccess, hwext, regwen)">\
+  % if swrdaccess != SwRdAccess.NONE:
+  logic ${str_arr_sv(msb, lsb)}${sig_name}_qs;
+  % endif
+  % if swwraccess != SwWrAccess.NONE:
+  logic ${str_arr_sv(msb, lsb)}${sig_name}_wd;
+  logic ${sig_name}_we;
+  % endif
+  % if swrdaccess != SwRdAccess.NONE and hwext:
+  logic ${sig_name}_re;
+  % endif
+</%def>\
+<%def name="finst_gen(finst_name, fsig_name, msb, lsb, swaccess, swrdaccess, swwraccess, hwaccess, hwqe, hwre, hwext, resval, regwen)">\
+  % if hwext:       ## if hwext, instantiate prim_subreg_ext
+  prim_subreg_ext #(
+    .DW    (${msb - lsb + 1})
+  ) u_${finst_name} (
+    % if swrdaccess != SwRdAccess.NONE:
+    .re     (${finst_name}_re),
+    % else:
+    .re     (1'b0),
+    % endif
+    % if swwraccess != SwWrAccess.NONE:
+      % if regwen:
+    // qualified with register enable
+    .we     (${finst_name}_we & ${regwen}_qs),
+      % else:
+    .we     (${finst_name}_we),
+      % endif
+    .wd     (${finst_name}_wd),
+    % else:
+    .we     (1'b0),
+    .wd     ('0),
+    % endif
+    % if hwaccess == HwAccess.HRO:
+    .d      ('0),
+    % else:
+    .d      (hw2reg.${fsig_name}.d),
+    % endif
+    % if hwre:
+    .qre    (reg2hw.${fsig_name}.re),
+    % else:
+    .qre    (),
+    % endif
+    % if hwaccess == HwAccess.HWO:
+    .qe     (),
+    .q      (),
+    % else:
+      % if hwqe:
+    .qe     (reg2hw.${fsig_name}.qe),
+      % else:
+    .qe     (),
+      % endif
+    .q      (reg2hw.${fsig_name}.q ),
+    % endif
+    % if swrdaccess != SwRdAccess.NONE:
+    .qs     (${finst_name}_qs)
+    % else:
+    .qs     ()
+    % endif
+  );
+  % else:       ## if not hwext, instantiate prim_subreg or constant assign
+    % if hwaccess == HwAccess.NONE and swrdaccess == SwRdAccess.RD and swwraccess == SwWrAccess.NONE:
+  // constant-only read
+  assign ${finst_name}_qs = ${msb-lsb+1}'h${"%x" % resval};
+    % else:     ## not hwext not constant
+  prim_subreg #(
+    .DW      (${msb - lsb + 1}),
+    .SWACCESS("${swaccess.name}"),
+    .RESVAL  (${msb-lsb+1}'h${"%x" % resval})
+  ) u_${finst_name} (
+    .clk_i   (clk_i    ),
+    .rst_ni  (rst_ni  ),
+
+      % if swwraccess != SwWrAccess.NONE: ## non-RO types
+        % if regwen:
+    // from register interface (qualified with register enable)
+    .we     (${finst_name}_we & ${regwen}_qs),
+        % else:
+    // from register interface
+    .we     (${finst_name}_we),
+        % endif
+    .wd     (${finst_name}_wd),
+      % else:                             ## RO types
+    .we     (1'b0),
+    .wd     ('0  ),
+      % endif
+
+    // from internal hardware
+      % if hwaccess == HwAccess.HRO or hwaccess == HwAccess.NONE:
+    .de     (1'b0),
+    .d      ('0  ),
+      % else:
+    .de     (hw2reg.${fsig_name}.de),
+    .d      (hw2reg.${fsig_name}.d ),
+      % endif
+
+    // to internal hardware
+      % if hwaccess == HwAccess.HWO or hwaccess == HwAccess.NONE:
+    .qe     (),
+    .q      (),
+      % else:
+        % if hwqe:
+    .qe     (reg2hw.${fsig_name}.qe),
+        % else:
+    .qe     (),
+        % endif
+    .q      (reg2hw.${fsig_name}.q ),
+      % endif
+
+      % if swrdaccess != SwRdAccess.NONE:
+    // to register interface (read)
+    .qs     (${finst_name}_qs)
+      % else:
+    .qs     ()
+      % endif
+  );
+    % endif  ## end non-constant prim_subreg
+  % endif
+</%def>\
+<%def name="we_gen(sig_name, msb, lsb, swrdaccess, swwraccess, hwext, idx)">\
+% if swwraccess != SwWrAccess.NONE:
+  % if swrdaccess != SwRdAccess.RC:
+  assign ${sig_name}_we = addr_hit[${idx}] && reg_we;
+  assign ${sig_name}_wd = reg_wdata[${str_bits_sv(msb,lsb)}];
+  % else:
+  ## Generate WE based on read request, read should clear
+  assign ${sig_name}_we = addr_hit[${idx}] && reg_re;
+  assign ${sig_name}_wd = '1;
+  % endif
+% endif
+% if swrdaccess != SwRdAccess.NONE and hwext:
+  assign ${sig_name}_re = addr_hit[${idx}] && reg_re;
+% endif
+</%def>\
+<%def name="rdata_gen(sig_name, msb, lsb, swrdaccess)">\
+% if swrdaccess != SwRdAccess.NONE:
+        reg_rdata_next[${str_bits_sv(msb,lsb)}] = ${sig_name}_qs;
+% else:
+        reg_rdata_next[${str_bits_sv(msb,lsb)}] = '0;
+% endif
+</%def>\
diff --git a/util/reggen/uvm_reg.tpl.sv b/util/reggen/uvm_reg.tpl.sv
new file mode 100644
index 0000000..249bdb0
--- /dev/null
+++ b/util/reggen/uvm_reg.tpl.sv
@@ -0,0 +1,191 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// UVM Registers auto-generated by `reggen` containing data structure
+// Do Not Edit directly
+<% from reggen import (gen_dv)
+%>\
+##// [PY-COMMENT]: function to generate typedefs recursively for blocks
+<%def name="typedefs(block)">\
+% for b in block.blocks:
+${typedefs(b)}
+% endfor
+% for r in block.regs:
+typedef class ${gen_dv.rcname(block, r)};
+% endfor
+% for w in block.wins:
+typedef class ${gen_dv.mcname(block, w)};
+% endfor
+typedef class ${gen_dv.bcname(block)};
+</%def>\
+##// [PY-COMMENT]: function to recursively define all classes
+<%def name="construct_classes(block)">\
+% for b in block.blocks:
+${construct_classes(b)}
+
+% endfor
+// Block: ${block.name}
+% for r in block.regs:
+<%
+  reg_width = block.width
+%>\
+// Class: ${gen_dv.rcname(block, r)}
+class ${gen_dv.rcname(block, r)} extends dv_base_reg;
+  // fields
+% for f in r.fields:
+  rand dv_base_reg_field ${f.name};
+% endfor
+
+  `uvm_object_utils(${gen_dv.rcname(block, r)})
+
+  function new(string       name = "${gen_dv.rcname(block, r)}",
+               int unsigned n_bits = ${reg_width},
+               int          has_coverage = UVM_NO_COVERAGE);
+    super.new(name, n_bits, has_coverage);
+  endfunction : new
+
+  virtual function void build();
+    // create fields
+% for f in r.fields:
+<%
+  field_size = f.msb - f.lsb + 1
+  if f.swaccess.name == "R0W1C":
+    field_access = "W1C"
+  else:
+    field_access = f.swaccess.name
+
+  if f.hwaccess == HwAccess.HRO:
+    field_volatile = 0
+  else:
+    field_volatile = 1
+%>\
+    ${f.name} = dv_base_reg_field::type_id::create("${f.name}");
+    ${f.name}.configure(
+      .parent(this),
+      .size(${field_size}),
+      .lsb_pos(${f.lsb}),
+      .access("${field_access}"),
+      .volatile(${field_volatile}),
+      .reset(${f.resval}),
+      .has_reset(1),
+      .is_rand(1),
+      .individually_accessible(1));
+% endfor
+  endfunction : build
+
+endclass : ${gen_dv.rcname(block, r)}
+
+% endfor
+% for w in block.wins:
+<%
+  mem_name = w.name.lower()
+  mem_right = w.dvrights.upper()
+  mem_n_bits = w.n_bits
+  mem_size = int((w.limit_addr - w.base_addr) / (mem_n_bits / 8))
+%>\
+// Class: ${gen_dv.mcname(block, w)}
+class ${gen_dv.mcname(block, w)} extends dv_base_mem;
+
+  `uvm_object_utils(${gen_dv.mcname(block, w)})
+
+  function new(string           name = "${gen_dv.mcname(block, w)}",
+               longint unsigned size = ${mem_size},
+               int unsigned     n_bits = ${mem_n_bits},
+               string           access = "RW"/* TODO:"${mem_right}"*/,
+               int              has_coverage = UVM_NO_COVERAGE);
+    super.new(name, size, n_bits, access, has_coverage);
+  endfunction : new
+
+endclass : ${gen_dv.mcname(block, w)}
+
+% endfor
+// Class: ${gen_dv.bcname(block)}
+class ${gen_dv.bcname(block)} extends dv_base_reg_block;
+% if block.blocks:
+  // sub blocks
+% endif
+% for b in block.blocks:
+  rand ${gen_dv.bcname(b)} ${b.name};
+% endfor
+% if block.regs:
+  // registers
+% endif
+% for r in block.regs:
+  rand ${gen_dv.rcname(block, r)} ${r.name};
+% endfor
+% if block.wins:
+  // memories
+% endif
+% for w in block.wins:
+  rand ${gen_dv.mcname(block, w)} ${gen_dv.miname(w)};
+% endfor
+
+  `uvm_object_utils(${gen_dv.bcname(block)})
+
+  function new(string name = "${gen_dv.bcname(block)}",
+               int    has_coverage = UVM_NO_COVERAGE);
+    super.new(name, has_coverage);
+  endfunction : new
+
+  virtual function void build(uvm_reg_addr_t base_addr);
+    // create default map
+    this.default_map = create_map(.name("default_map"),
+                                  .base_addr(base_addr),
+                                  .n_bytes(${block.width//8}),
+                                  .endian(UVM_LITTLE_ENDIAN));
+% if block.blocks:
+
+    // create sub blocks and add their maps
+% endif
+% for b in block.blocks:
+    ${b.name} = ${gen_dv.bcname(b)}::type_id::create("${b.name}");
+    ${b.name}.configure(.parent(this));
+    ${b.name}.build(.base_addr(base_addr + ${gen_dv.sv_base_addr(b)}));
+    default_map.add_submap(.child_map(${b.name}.default_map),
+                           .offset(base_addr + ${gen_dv.sv_base_addr(b)}));
+% endfor
+% if block.regs:
+
+    // create registers
+% endif
+% for r in block.regs:
+<%
+  reg_name = r.name
+  reg_right = r.dvrights
+  reg_width = block.width
+  reg_offset =  str(reg_width) + "'h" + "%x" % r.offset
+%>\
+    ${reg_name} = ${gen_dv.rcname(block, r)}::type_id::create("${reg_name}");
+    ${reg_name}.configure(.blk_parent(this));
+    ${reg_name}.build();
+    default_map.add_reg(.rg(${reg_name}),
+                        .offset(${reg_offset}),
+                        .rights("${reg_right}"));
+% endfor
+% if block.wins:
+
+    // create memories
+% endif
+% for w in block.wins:
+<%
+  mem_name = w.name.lower()
+  mem_right = w.dvrights.upper()
+  mem_offset = str(block.width) + "'h" + "%x" % w.base_addr
+  mem_n_bits = w.n_bits
+  mem_size = int((w.limit_addr - w.base_addr) / (mem_n_bits / 8))
+%>\
+    ${mem_name} = ${gen_dv.mcname(block, w)}::type_id::create("${mem_name}");
+    ${mem_name}.configure(.parent(this));
+    default_map.add_mem(.mem(${mem_name}),
+                        .offset(${mem_offset}),
+                        .rights("${mem_right}"));
+% endfor
+  endfunction : build
+
+endclass : ${gen_dv.bcname(block)}\
+</%def>\
+
+// Forward declare all register/memory/block classes
+${typedefs(block)}
+${construct_classes(block)}
diff --git a/util/reggen/validate.py b/util/reggen/validate.py
new file mode 100644
index 0000000..da45c21
--- /dev/null
+++ b/util/reggen/validate.py
@@ -0,0 +1,1206 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""
+Register json validation
+"""
+
+import logging as log
+import sys
+
+from reggen.field_enums import *
+
+
+# Routine that can be used for hjson object_pairs_hook
+# The baseline is dict(pairs) i.e. construct a dictonary from pairs
+# The usual is OrderedDict(pairs) which is redundant in latest python
+# Both of these silently allow repeated keys, which this version detects
+def checking_dict(pairs):
+    d = {}
+    for x in pairs:
+        if x[0] in d:
+            repkey = 'Repeated' + x[0]
+            log.warn("Repeated key " + x[0] + " added as " + repkey)
+            d[repkey] = x[1]
+        else:
+            d[x[0]] = x[1]
+    return d
+
+
+# validating version of int(x, 0)
+# returns int value, error flag
+# if error flag is True value will be zero
+def check_int(x, err_prefix):
+    if isinstance(x, int):
+        return x, False
+    if x[0] == '0' and len(x) > 2:
+        if x[1] in 'bB':
+            validch = '01'
+        elif x[1] in 'oO':
+            validch = '01234567'
+        elif x[1] in 'xX':
+            validch = '0123456789abcdefABCDEF'
+        else:
+            log.error(err_prefix +
+                      ": int must start digit, 0b, 0B, 0o, 0O, 0x or 0X")
+            return 0, True
+        for c in x[2:]:
+            if not c in validch:
+                log.error(err_prefix + ": Bad character " + c + " in " + x)
+                return 0, True
+    else:
+        if not x.isdecimal():
+            log.error(err_prefix + ": Number not valid int " + x)
+            return 0, 1
+    return int(x, 0), False
+
+
+def check_bool(x, err_prefix):
+    """check_bool checks if input 'x' is one of the list:
+        "true", "false"
+
+        It returns value as Bool type and Error condition.
+    """
+    if isinstance(x, bool):
+        # if Bool returns as it is
+        return x, False
+    if not x.lower() in ["true", "false"]:
+        log.error(err_prefix + ": Bad field value " + x)
+        return False, True
+    else:
+        return (x.lower() == "true"), False
+
+
+def check_ln(obj, x, withwidth, err_prefix):
+    error = 0
+    if not isinstance(obj[x], list):
+        log.error(err_prefix + ' element ' + x + ' not a list')
+        return 1
+    for y in obj[x]:
+        error += check_keys(y, ln_required, ln_optional if withwidth else {},
+                            {}, err_prefix + ' element ' + x)
+        if withwidth:
+            if 'width' in y:
+                w, err = check_int(y['width'], err_prefix + ' width in ' + x)
+                if err:
+                    error += 1
+                    w = 1
+            else:
+                w = 1
+            y['width'] = str(w)
+
+    return error
+
+
+def check_keys(obj, required_keys, optional_keys, added_keys, err_prefix):
+    error = 0
+    for x in required_keys:
+        if not x in obj:
+            error += 1
+            log.error(err_prefix + " missing required key " + x)
+    for x in obj:
+        type = ''
+        if x in required_keys:
+            type = required_keys[x][0]
+        elif x in optional_keys:
+            type = optional_keys[x][0]
+        elif not x in added_keys:
+            log.warning(err_prefix + " contains extra key " + x)
+        if type[:2] == 'ln':
+            error += check_ln(obj, x, type == 'lnw', err_prefix)
+    return error
+
+
+# Only allow zero or one of the list of keys
+def check_zero_one_key(obj, optone, err_prefix):
+    error = 0
+    seenopt = 0
+    for x in obj:
+        if (x in optone):
+            seenopt += 1
+    if (seenopt > 1) or ((seenopt == 1) and len(obj) > 1):
+        log.error(err_prefix + " only allowed one option key: ")
+        for x in obj:
+            log.error(err_prefix + "   found: " + x)
+            error += 1
+    return error
+
+
+def bitfield_add(bfield, num):
+    if ':' in bfield:
+        brange = bfield.partition(':')
+        msb = brange[0]
+        lsb = brange[2]
+        return str(int(msb) + num) + ':' + str(int(lsb) + num)
+    else:
+        return str(int(bfield) + num)
+
+
+# get_bits to give a sort key
+def get_bits(x):
+    pos = x['bits'].find(':')
+    if pos < 0:
+        return int(x['bits'])
+    else:
+        return int(x['bits'][:pos])
+
+
+# returns tuple (bitfield_mask, field width, lsb)
+def bitmask(bfield):
+    if ':' in bfield:
+        brange = bfield.partition(':')
+        msb = brange[0]
+        lsb = brange[2]
+        res = 0
+        if ((not msb.isdecimal()) or (not lsb.isdecimal()) or
+            (int(lsb) > int(msb))):
+            log.error("Bad bit range " + bfield + str(brange))
+            return (0, 0, 0)
+        else:
+            for i in range(int(lsb), int(msb) + 1):
+                res |= (1 << i)
+        return (res, int(msb) - int(lsb) + 1, int(lsb))
+    if (not bfield.isdecimal()):
+        log.error("Bad bit number " + bfield)
+        return (0, 0, 0)
+    else:
+        return (1 << int(bfield), 1, int(bfield))
+
+
+val_types = {
+    'd': ["int", "integer (binary 0b, octal 0o, decimal, hex 0x)"],
+    'x': ["xint", "x for undefined otherwise int"],
+    'b': [
+        "bitrange", "bit number as decimal integer, \
+                    or bit-range as decimal integers msb:lsb"
+    ],
+    'l': ["list", "comma separated list enclosed in `[]`"],
+    'ln': ["name list", 'comma separated list enclosed in `[]` of '\
+           'one or more groups that have just name and dscr keys.'\
+           ' e.g. `{ name: "name", desc: "description"}`'],
+    'lnw': ["name list+", 'name list that optionally contains a width'],
+    'g': ["group", "comma separated group of key:value enclosed in `{}`"],
+    's': ["string", "string, typically short"],
+    't': ["text", "string, may be multi-line enclosed in `'''` "\
+          "may use `**bold**`, `*italic*` or `!!Reg` markup"],
+    'T': ["tuple", "tuple enclosed in ()"],
+    'pi': ["python int", "Native python type int (generated)"],
+    'pb': ["python Bool", "Native python type Bool (generated)"],
+    'pl': ["python list", "Native python type list (generated)"],
+    'pe': ["python enum", "Native python type enum (generated)"]
+}
+
+# Toplevel keys
+top_required = {
+    'name': ['s', "name of the component"],
+    'clock_primary': ['s', "name of the primary clock"],
+    'bus_device': ['s', "name of the bus interface for the device"],
+    'registers': [
+        'l', "list of register definition groups and \
+                              offset control groups"
+    ]
+}
+top_optional = {
+    'bus_host': ['s', "name of the bus interface as host"],
+    'other_clock_list': ['l', "list of other chip clocks needed"],
+    'available_input_list': ['lnw', "list of available peripheral inputs"],
+    'available_output_list': ['lnw', "list of available peripheral outputs"],
+    'available_inout_list': ['lnw', "list of available peripheral inouts"],
+    'interrupt_list': ['lnw', "list of peripheral interrupts"],
+    'no_auto_intr_regs': ['s', "Set to true to suppress automatic "\
+                          "generation of interrupt registers. " \
+                          "Defaults to false if not present."],
+    'alert_list': ['ln', "list of peripheral alerts"],
+    'regwidth': ['d', "width of registers in bits (default 32)"],
+    'SPDX-License-Identifier': ['s', "License ientifier (if using pure json) "\
+                                "Only use this if unable to put this "\
+                                "information in a comment at the top of the "\
+                                "file."]
+}
+top_added = {'genrnames': ['pl', "list of register names"],
+             'genautoregs': ['pb', "Registers were generated from config info"],
+             'genwennames': ['pl', "list of registers used as write enables"],
+             'gennextoffset': ['pi', "offset next register would use"],
+             'gensize': ['pi', "address space size needed for registers. "\
+                              "Generated by tool as next power of 2."]
+             }
+
+# ln type has list of groups with only name and description
+# (was called "subunit" in cfg_validate)
+ln_required = {
+    'name': ['s', "name of the item"],
+    'desc': ['s', "description of the item"],
+}
+ln_optional = {
+    'width': ['d', "bit width of the item (if not 1)"],
+}
+
+# Registers list may have embedded keys
+list_optone = {'reserved': ['d', "number of registers to reserve space for"],
+              'skipto':    ['d', "set next register offset to value"],
+              'sameaddr':  ['l', "list of register definition groups "\
+                            "that share the same offset"],
+              'window':    ['g', "group defining an address range "\
+                            "for something other than standard registers"],
+              'multireg':  ['g', "group defining registers generated "\
+                            "from a base instance."]
+               }
+
+# Register keys
+reg_required = {
+    'name': ['s', "name of the register"],
+    'desc': ['t', "description of the register"],
+    'fields': ['l', "list of register field description groups"]
+}
+
+reg_optional = {
+    'swaccess': [
+        's', "software access permission to use for " +
+        "fields that don't specify swaccess"
+    ],
+    'hwaccess': [
+        's', "hardware access permission to use for " +
+        "fields that don't specify hwaccess"
+    ],
+    'hwext': [
+        's',
+        "'true' if the register is stored out side " + "of the register module"
+    ],
+    'hwqe': [
+        's', "'true' if hardware uses 'q' enable signal, " +
+        "which is latched signal of software write pulse."
+    ],
+    'hwre': [
+        's', "'true' if hardware uses 're' signal, " +
+        "which is latched signal of software read pulse."
+    ],
+    'regwen': [
+        's', "if register is write-protected by another register, that " +
+        "register name should be given here. empty-string for no register " +
+        "write protection"
+    ],
+    'resval': ['d', "reset value of full register (default 0)"]
+}
+reg_added = {
+    'genresval': ['pi', "reset value generated from resval and fields"],
+    'genresmask': ['pi', "mask of bits with valid reset value (not x)"],
+    'genbitsused': ['pi', "mask of bits defined in the register"],
+    'genoffset': ['pi', "offset address of the register"],
+    'genbasebits': ['pi', "multireg only: mask of base bits defined"],
+    'gendvrights': ['s', "SW Rights used in UVM reg class"]
+}
+
+# Window keys
+window_required = {
+    'name': ['s', "Name of the window"],
+    'desc': ['t', "description of the window"],
+    'items': ['d', "size in fieldaccess width words of the window"],
+    'swaccess': ['s', "software access permitted"],
+}
+
+# TODO potential for additional optional to give more type info?
+# eg sram-hw-port: "none", "sync", "async"
+window_optional = {'byte-write': ['s', "True if byte writes are supported. "\
+                                  "Defaults to false if not present."],
+                   'validbits': ['d', "Number of valid data bits within "\
+                                 "regwidth sized word. "\
+                                 "Defaults to regwidth. If "\
+                                 "smaller than the regwidth then in each "\
+                                 "word of the window bits "\
+                                 "[regwidth-1:validbits] are unused and "\
+                                 "bits [validbits-1:0] are valid."],
+                   'noalign': ['s', "Set to True to prevent tool aligning "\
+                               "the base address of the window. "\
+                               "Defaults to false if not present."],
+                   'unusual': ['s', "True if window has unusual parameters "\
+                               "(set to prevent Unusual: errors)."\
+                               "Defaults to false if not present."]
+                  }
+
+window_added = {'genbyte-write': ['pb', "generated boolean for byte-write "],
+                'genvalidbits': ['pi', "vailid data width"],
+                'genoffset': ['pi', "base offset address of the window "\
+                              "(aligned for size)"],
+                'genswaccess': ['pe', "Software access (gen enum)"],
+                'genswwraccess': ['pe', "Software write access (gen enum)"],
+                'genswrdaccess': ['pe', "Software read access (gen enum)"]
+               }
+
+# Multireg keys
+multireg_required = {'name':   ['s', "base name of the registers"],
+                     'desc':   ['t', "description of the registers"],
+                     'count':  ['d', "number of instances to generate"],
+                     'cname':  ['s', "base name for each instance, mostly "\
+                                "useful for refering to instance in messages"],
+                     'fields': ['l', "list of register field description"\
+                                "groups. Describes bit positions used for"\
+                                " base instance."]
+                     }
+multireg_optional = reg_optional
+multireg_added = {'genregs': ['l',
+                              "generated list of registers with required "\
+                              "and added keys"]
+                  }
+
+# Field keys
+# special case in the code, no name and no desc if only field
+field_required = {
+    'name': ['s', "name of the field (optional if only field)"],
+    'desc': ['t', "description of field (optional if no name)"],
+    'bits': ['b', "bit or bit range (msb:lsb)"]
+}
+field_optional = {
+    'swaccess': [
+        's', "software access permission, copied from "
+        "register if not provided in field. "
+        "(Tool adds if not provided.)"
+    ],
+    'hwaccess': [
+        's', "hardware access permission, copied from "
+        "register if not prvided in field. "
+        "(Tool adds if not provided.)"
+    ],
+    'resval': [
+        'x', "reset value, comes from register resval "
+        "if not provided in field. Zero if neither "
+        "are provided and the field is readable, "
+        "x if neither are provided and the field "
+        "is wo. Must match if both are provided."
+    ],
+    'enum': ['l', "list of permitted enumeration groups"]
+}
+field_added = {
+    'genrsvdenum': ['pb', "enum did not cover every possible value"],
+    'genresval': [
+        'pi', "resval for field constructed by the tool. "
+        "Will be set to 0 for x."
+    ],
+    'genresvalx': ['pb', "Indicates if resval is x"],
+    'genswaccess': ['pe', "Software access (generated enum)"],
+    'genswwraccess': ['pe', "Software write access (generated enum)"],
+    'genswrdaccess': ['pe', "Software read access (generated enum)"],
+    'genhwaccess': ['pe', "Hardware access (generated Enum)"],
+    'genhwqe': ['pb', "Hardware qualifier enable signal needed"],
+    'genhwre': ['pb', "Hardware read enable signal needed"],
+    'bitinfo': ['T', "tuple (bitfield_mask, field width, lsb)"]
+}
+
+# Enum keys
+enum_required = {
+    'name': ['s', "name of the member of the enum"],
+    'desc': ['t', "description when field has this value"],
+    'value': ['d', "value of this member of the enum"]
+}
+enum_optional = {}
+enum_added = {}
+
+# swaccess permitted values
+# text description, access enum, wr access enum, rd access enum, ok in window
+swaccess_permitted = {
+    'ro':    ("Read Only",
+                        SwAccess.RO,  SwWrAccess.NONE, SwRdAccess.RD,   True),
+    'rc':    ("Read Only, reading clears",
+                        SwAccess.RC,  SwWrAccess.WR,   SwRdAccess.RC,   False),
+    'rw':    ("Read/Write",
+                        SwAccess.RW,  SwWrAccess.WR,   SwRdAccess.RD,   True),
+    'r0w1c': ("Read zero, Write with 1 clears",
+                        SwAccess.W1C, SwWrAccess.WR,   SwRdAccess.NONE, False),
+    'rw1s':  ("Read, Write with 1 sets",
+                        SwAccess.W1S, SwWrAccess.WR,   SwRdAccess.RD,   False),
+    'rw1c':  ("Read, Write with 1 clears",
+                        SwAccess.W1C, SwWrAccess.WR,   SwRdAccess.RD,   False),
+    'rw0c':  ("Read, Write with 0 clears",
+                        SwAccess.W0C, SwWrAccess.WR,   SwRdAccess.RD,   False),
+    'wo':    ("Write Only",
+                        SwAccess.WO,  SwWrAccess.WR,   SwRdAccess.NONE, True)
+} # yapf: disable
+
+# hwaccess permitted values
+hwaccess_permitted = {
+    'hro': ("Read Only", HwAccess.HRO),
+    'hrw': ("Read/Write", HwAccess.HRW),
+    'hwo': ("Write Only", HwAccess.HWO),
+    'none': ("No Access Needed", HwAccess.NONE)
+}
+
+key_use = {'r': "required", 'o': "optional", 'a': "added by tool"}
+
+# Register name prohibited (used as reserved keywords in systemverilog)
+keywords_verilog = [
+    'alias', 'always', 'always_comb', 'always_ff', 'always_latch', 'and',
+    'assert', 'assign', 'assume', 'automatic', 'before', 'begin', 'bind',
+    'bins', 'binsof', 'bit', 'break', 'buf', 'bufif0', 'bufif1', 'byte',
+    'case', 'casex', 'casez', 'cell', 'chandle', 'class', 'clocking', 'cmos',
+    'config', 'const', 'constraint', 'context', 'continue', 'cover',
+    'covergroup', 'coverpoint', 'cross', 'deassign', 'default', 'defparam',
+    'design', 'disable', 'dist', 'do', 'edge', 'else', 'end', 'endcase',
+    'endclass', 'endclocking', 'endconfig', 'endfunction', 'endgenerate',
+    'endgroup', 'endinterface', 'endmodule', 'endpackage', 'endprimitive',
+    'endprogram', 'endproperty', 'endspecify', 'endsequence', 'endtable',
+    'endtask', 'enum', 'event', 'expect', 'export', 'extends', 'extern',
+    'final', 'first_match', 'for', 'force', 'foreach', 'forever', 'fork',
+    'forkjoin', 'function', 'generate', 'genvar', 'highz0', 'highz1', 'if',
+    'iff', 'ifnone', 'ignore_bins', 'illegal_bins', 'import', 'incdir',
+    'include', 'initial', 'inout', 'input', 'inside', 'instance', 'int',
+    'integer', 'interface', 'intersect', 'join', 'join_any', 'join_none',
+    'large', 'liblist', 'library', 'local', 'localparam', 'logic', 'longint',
+    'macromodule', 'matches', 'medium', 'modport', 'module', 'nand', 'negedge',
+    'new', 'nmos', 'nor', 'noshowcancelled', 'not', 'notif0', 'notif1', 'null',
+    'or', 'output', 'package', 'packed', 'parameter', 'pmos', 'posedge',
+    'primitive', 'priority', 'program', 'property', 'protected', 'pull0',
+    'pull1', 'pulldown', 'pullup', 'pulsestyle_onevent', 'pulsestyle_ondetect',
+    'pure', 'rand', 'randc', 'randcase', 'randsequence', 'rcmos', 'real',
+    'realtime', 'ref', 'reg', 'release', 'repeat', 'return', 'rnmos', 'rpmos',
+    'rtran', 'rtranif0', 'rtranif1', 'scalared', 'sequence', 'shortint',
+    'shortreal', 'showcancelled', 'signed', 'small', 'solve', 'specify',
+    'specparam', 'static', 'string', 'strong0', 'strong1', 'struct', 'super',
+    'supply0', 'supply1', 'table', 'tagged', 'task', 'this', 'throughout',
+    'time', 'timeprecision', 'timeunit', 'tran', 'tranif0', 'tranif1', 'tri',
+    'tri0', 'tri1', 'triand', 'trior', 'trireg', 'type', 'typedef', 'union',
+    'unique', 'unsigned', 'use', 'uwire', 'var', 'vectored', 'virtual', 'void',
+    'wait', 'wait_order', 'wand', 'weak0', 'weak1', 'while', 'wildcard',
+    'wire', 'with', 'within', 'wor', 'xnor', 'xor'
+]
+
+
+def validate_fields(fields, rname, default_sw, default_hw, full_resval,
+                    reg_hwqe, reg_hwre, width):
+    error = 0
+    bits_used = 0
+    gen_resval = 0
+    gen_resmask = 0
+    fcount = 0
+
+    fieldnames = []
+    if len(fields) == 0:
+        log.warn(rname + " fields is empty")
+
+    for field in fields:
+        fcount += 1
+        if not 'name' in field:
+            fname = rname + ".field" + str(fcount)
+            if (len(fields) == 1):
+                field['name'] = rname
+                # only allow no desc if no name
+                if not 'desc' in field:
+                    field['desc'] = ""
+        else:
+            fname = field['name']
+            if fname in keywords_verilog:
+                error += 1
+                log.error(rname + " field " + fname + " uses verilog keywords")
+            if (fname == ""):
+                fname = rname + ".field" + str(fcount)
+            else:
+                if fname in fieldnames:
+                    error += 1
+                    log.error(rname + " field " + str(fcount) +
+                              ": duplicate use of field name " + fname)
+                else:
+                    fieldnames.append(fname)
+                fname = rname + "." + fname
+        ck_err = check_keys(field, field_required, field_optional, field_added,
+                            fname)
+        if (ck_err != 0):
+            error += ck_err
+            continue
+
+        if not 'swaccess' in field:
+            if (default_sw == None):
+                error += 1
+                log.error(fname + ":no swaccess or register default swaccess")
+                swaccess = "wo"
+            else:
+                log.info(fname + ": use register default swaccess")
+                field['swaccess'] = default_sw
+                swaccess = default_sw
+        else:
+            swaccess = field['swaccess']
+            if (not swaccess in swaccess_permitted):
+                error += 1
+                log.error(fname + ": Bad field swaccess value " + swaccess)
+                swaccess = "wo"
+        swacc_info = swaccess_permitted[swaccess]
+        field['genswaccess'] = swacc_info[1]
+        field['genswwraccess'] = swacc_info[2]
+        field['genswrdaccess'] = swacc_info[3]
+
+        if not 'hwaccess' in field:
+            if (default_hw == None):
+                error += 1
+                log.error(fname + ": no hwaccess or register default hwaccess")
+                hwaccess = "hro"
+            else:
+                log.info(fname + ": use register default hwaccess")
+                field['hwaccess'] = default_hw
+                hwaccess = default_hw
+        else:
+            hwaccess = field['hwaccess']
+            if (not hwaccess in hwaccess_permitted):
+                error += 1
+                log.error(fname + ": Bad field hwaccess value " + hwaccess)
+                hwaccess = "hro"
+        hwacc_info = hwaccess_permitted[hwaccess]
+        field['genhwaccess'] = hwacc_info[1]
+        field['genhwqe'] = reg_hwqe
+        field['genhwre'] = reg_hwre
+
+        # allow an int but make a string for all downstream users
+        if isinstance(field['bits'], int):
+            field['bits'] = str(field['bits'])
+        field_bits = bitmask(field['bits'])
+        if (field_bits[0] == 0):
+            error += 1
+        else:
+            reuse_check = bits_used & field_bits[0]
+            # > is correct here because the check is of the bit
+            # above the msb. The equal case is thus valid
+            if ((field_bits[1] + field_bits[2]) > width):
+                error += 1
+                log.error(fname + ": Register not wide enough for bits: " +
+                          field['bits'])
+            elif reuse_check != 0:
+                error += 1
+                log.error(fname + ": Defines already defined bits " +
+                          hex(reuse_check))
+            bits_used |= field_bits[0]
+            field['bitinfo'] = field_bits
+        max_in_field = (1 << field_bits[1]) - 1
+
+        if 'resval' in field:
+            if field['resval'] != "x":
+                resval, ierr = check_int(field['resval'], fname + " resval")
+                if ierr:
+                    error += 1
+                if (resval > max_in_field):
+                    error += 1
+                    log.error(fname + ": Reset value " + field['resval'] +
+                              " greater than max field can hold (" +
+                              hex(max_in_field) + ")")
+                    resval &= max_in_field
+
+                if ((full_resval != None) and
+                    (resval !=
+                     ((full_resval >> field_bits[2]) & max_in_field))):
+                    error += 1
+                    log.error(fname + ": Field resval " + field['resval'] +
+                              " differs from value in main register resval " +
+                              hex(full_resval))
+                gen_resval |= resval << field_bits[2]
+                gen_resmask |= field_bits[0]
+                field['genresval'] = resval
+                field['genresvalx'] = False
+            else:
+                field['genresval'] = 0
+                field['genresvalx'] = True
+        else:
+            if (full_resval != None):
+                resval = (full_resval >> field_bits[2]) & max_in_field
+                gen_resval |= resval << field_bits[2]
+                gen_resmask |= field_bits[0]
+                field['genresval'] = resval
+                field['genresvalx'] = False
+                log.info(fname + ": use register default genresval")
+            else:
+                if swaccess[0] != 'w':
+                    field['genresval'] = 0
+                    field['genresvalx'] = False
+                    log.info(fname + ": use zero genresval")
+                    gen_resmask |= field_bits[0]
+                else:
+                    field['genresval'] = 0
+                    field['genresvalx'] = True
+                    log.info(fname + ": use x genresval")
+
+        if 'enum' in field:
+            if max_in_field > 127:
+                log.warning(fname + "enum too big for checking.")
+                enum_mask = 0
+            else:
+                enum_mask = (1 << (max_in_field + 1)) - 1
+            for enum in field['enum']:
+                eck_err = check_keys(enum, enum_required, [], [],
+                                     fname + " enum")
+                if (eck_err != 0):
+                    error += eck_err
+                    continue
+                ename = enum['name']
+                val, ierr = check_int(enum['value'], fname + "." + ename)
+                if ierr:
+                    error += 1
+                if (val > max_in_field):
+                    error += 1
+                    log.error(fname + ": enum value " + str(val) + "too big")
+                elif max_in_field <= 127:
+                    valbit = 1 << val
+                    if ((enum_mask & valbit) == 0):
+                        log.warning(fname + "enum has multiple " + str(val))
+                    else:
+                        enum_mask ^= valbit
+
+            if (enum_mask != 0):
+                field['genrsvdenum'] = True
+                log.info(fname + ": Enum values not complete. Mask " +
+                         hex(enum_mask))
+
+    return error, gen_resval, gen_resmask, bits_used
+
+
+def parse_dvrights(field=None):
+    if field == None:
+        return "RO"
+    elif field in ['ro', 'rc']:
+        return "RO"
+    elif field in ['rw', 'r0w1c', 'rw1s', 'rw1c', 'rw0c']:
+        return "RW"
+    else:
+        return "WO"
+
+
+def validate_reg_defaults(reg, rname):
+    error = 0
+    if 'swaccess' in reg:
+        default_sw = reg['swaccess']
+        if (not default_sw in swaccess_permitted):
+            error += 1
+            log.error(rname + ": Bad register swaccess value " + default_sw)
+            default_sw = None
+    else:
+        default_sw = None
+
+    if 'hwaccess' in reg:
+        default_hw = reg['hwaccess']
+        if (not default_hw in hwaccess_permitted):
+            error += 1
+            log.error(rname + ": Bad register hwaccess value " + default_hw)
+            default_hw = None
+    else:
+        default_hw = "hro"  # Read-Only
+
+    if 'hwext' in reg:
+        hwext, ierr = check_bool(reg['hwext'], rname + " hwext")
+        if ierr:
+            error += 1
+            reg['hwext'] = "false"
+    else:
+        reg['hwext'] = "false"
+
+    if 'hwqe' in reg:
+        hwqe, ierr = check_bool(reg['hwqe'], rname + " hwqe")
+
+        if ierr:
+            error += 1
+            reg['hwqe'] = "false"
+        elif hwqe == False and reg[
+                'hwext'] == "true" and reg['swaccess'] != "ro":
+            log.warning(rname + ": hwqe must be true for hwext register. " +
+                        "Changing it to true.")
+            reg['hwqe'] = "true"
+    elif reg['hwext'] == "true" and reg['swaccess'] != "ro":
+        log.warning(rname + ": hwqe not provided but must be true for "\
+                    "hwext not read-only register. Setting it to true.")
+        reg['hwqe'] = "true"
+    else:
+        reg['hwqe'] = "false"
+
+    if 'hwre' in reg:
+        hwre, ierr = check_bool(reg['hwre'], rname + " hwre")
+
+        if ierr:
+            error += 1
+            reg['hwre'] = "false"
+        elif hwre == True and reg['hwext'] == "false":
+            log.warning(rname + ": hwre cannot be used with hwext. " +
+                        "Changing it to false.")
+            reg['hwre'] = "false"
+    else:
+        reg['hwre'] = "false"
+
+    if 'regwen' not in reg:
+        reg['regwen'] = ''
+
+    if 'resval' in reg:
+        full_resval, ierr = check_int(reg['resval'], rname + " resval")
+        if ierr:
+            error += 1
+            full_resval = None
+    else:
+        full_resval = None
+
+    return error, default_sw, default_hw, full_resval
+
+
+def validate_register(reg, offset, width, top):
+    error = 0
+
+    if not 'name' in reg:
+        rname = "Register at +" + hex(offset)
+    else:
+        rname = reg['name']
+        if rname in keywords_verilog:
+            error += 1
+            log.error("Register at +" + hex(offset) + rname +
+                      " uses verilog keywords")
+        if rname.lower() in top['genrnames']:
+            error += 1
+            log.error("Register at +" + hex(offset) + " duplicate name " +
+                      rname)
+        else:
+            top['genrnames'].append(rname.lower())
+
+    error += check_keys(reg, reg_required, reg_optional, reg_added, rname)
+
+    derr, default_sw, default_hw, full_resval = validate_reg_defaults(
+        reg, rname)
+    error += derr
+
+    # if there was an error before this then can't trust anything!
+    if error > 0:
+        log.info(rname + "@" + hex(offset) + " " + str(error) +
+                 " top level errors. Not processing fields")
+        return error
+
+    gen = validate_fields(reg['fields'], rname, default_sw, default_hw,
+                          full_resval, reg['hwqe'] == "true",
+                          reg['hwre'] == "true", width)
+    error = error + gen[0]
+    # ensure the fields are in order (except if error which could be bad bits)
+    if error == 0:
+        reg['fields'].sort(key=get_bits)
+    reg['genresval'] = gen[1]
+    reg['genresmask'] = gen[2]
+    reg['genbitsused'] = gen[3]
+    reg['genoffset'] = offset
+    reg['gendvrights'] = parse_dvrights(default_sw)
+
+    if ((reg['regwen'] != '') and
+        (not reg['regwen'] in top['genwennames'])):
+        top['genwennames'].append(reg['regwen'])
+
+    log.info(rname + "@" + hex(offset) + " " + str(error) + " errors. Mask " +
+             hex(gen[3]))
+
+    return error
+
+
+def validate_multi(mreg, offset, addrsep, width, top):
+    error = 0
+    bits_used = 0
+
+    if not 'name' in mreg:
+        mrname = "MultiRegister at +" + hex(offset)
+    else:
+        mrname = mreg['name']
+    error = check_keys(mreg, multireg_required, multireg_optional,
+                       multireg_added, mrname)
+    derr, default_sw, default_hw, full_resval = validate_reg_defaults(
+        mreg, mrname)
+    error += derr
+
+    # if there was an error before this then can't trust anything!
+    if error > 0:
+        log.info(mrname + "@" + hex(offset) + " " + str(error) +
+                 " top level errors. Not processing fields")
+        return error
+
+    gen = validate_fields(mreg['fields'], mrname, default_sw, default_hw,
+                          full_resval, mreg['hwqe'] == "true",
+                          mreg['hwre'] == "true", width)
+
+    error += gen[0]
+
+    mcount, ierr = check_int(mreg['count'], mrname + " multireg count")
+    if ierr:
+        error += 1
+
+    if error > 0:
+        return (error, 0)
+    bused = gen[3]
+    max_rval = (1 << width) - 1
+    cname = mreg['cname']
+    bpos = 0
+    inum = 0
+    rlist = []
+    rnum = 0
+    while inum < mcount:
+        closereg = False
+        if bpos == 0:
+            genreg = {}
+            genreg['name'] = mrname + str(rnum)
+            genreg['desc'] = mreg['desc']
+            genreg['hwext'] = mreg['hwext']
+            genreg['hwqe'] = mreg['hwqe']
+            genreg['hwre'] = mreg['hwre']
+            genreg['regwen'] = mreg['regwen']
+            resval = 0
+            resmask = 0
+            bits_used = 0
+            genfields = []
+
+        while bpos < width:
+            trypos = bused << bpos
+            if trypos > max_rval:
+                bpos = width
+                break
+            if (trypos & bits_used) == 0:
+                break
+            bpos += 1
+        if bpos < width:
+            # found a spot
+            for fn in mreg['fields']:
+                newf = fn.copy()
+                newf['name'] += str(inum)
+                if bpos != 0:
+                    newf['bits'] = bitfield_add(newf['bits'], bpos)
+                    newf['desc'] = 'for ' + cname + str(inum)
+                    newf['bitinfo'] = (newf['bitinfo'][0] << bpos,
+                                       newf['bitinfo'][1],
+                                       newf['bitinfo'][2] + bpos)
+                    if 'enum' in newf:
+                        del newf['enum']
+                else:
+                    newf['desc'] += ' for ' + cname + str(inum)
+                genfields.append(newf)
+            bits_used = bits_used | bused << bpos
+            resval = resval | gen[1] << bpos
+            resmask = resmask | gen[2] << bpos
+            bpos += 1
+            inum += 1
+            if inum == mcount:
+                closereg = True
+        else:
+            # need new register
+            closereg = True
+
+        if closereg:
+            genreg['genresval'] = resval
+            genreg['genresmask'] = resmask
+            genreg['genbitsused'] = bits_used
+            genreg['genbasebits'] = bused
+            genreg['genoffset'] = offset + (rnum * addrsep)
+            genreg['gendvrights'] = parse_dvrights(default_sw)
+            genfields.sort(key=get_bits)
+            genreg['fields'] = genfields
+            rnum += 1
+            bpos = 0
+            rlist.append(genreg)
+            top['genrnames'].append(genreg['name'].lower())
+    if len(rlist) == 1:
+        rlist[0]['name'] = mrname
+        top['genrnames'].pop()
+    mreg['genregs'] = rlist
+    top['genrnames'].append(mrname.lower())
+    return error, rnum
+
+
+def make_intr_reg(regs, name, offset, swaccess, hwaccess, desc):
+    intrs = regs['interrupt_list']
+    genreg = {}
+    genreg['name'] = name
+    genreg['desc'] = desc
+    genreg['hwext'] = 'true' if name == 'INTR_TEST' else 'false'
+    genreg['hwqe'] = 'true' if name == 'INTR_TEST' else 'false'
+    genreg['hwre'] = 'false'
+    bits_used = 0
+    genfields = []
+    cur_bit = 0
+    for bit in intrs:
+        newf = {}
+        newf['name'] = bit['name']
+        w = 1
+        if 'width' in bit and bit['width'] != '1':
+            w = int(bit['width'], 0)
+            newf['bits'] = str(cur_bit + w - 1) + ':' + str(cur_bit)
+            newf['bitinfo'] = (((1 << w) - 1) << cur_bit, w, cur_bit)
+        else:
+            newf['bits'] = str(cur_bit)
+            newf['bitinfo'] = (1 << cur_bit, 1, cur_bit)
+        if name == 'INTR_ENABLE':
+            newf['desc'] = 'Enable interrupt when ' + \
+                           ('corresponding bit in ' if w > 1 else '') + \
+                           '!!INTR_STATE.' + newf['name'] + ' is set'
+        elif name == 'INTR_TEST':
+            newf['desc'] = 'Write 1 to force ' + \
+                           ('corresponding bit in ' if w > 1 else '') + \
+                           '!!INTR_STATE.' + newf['name'] + ' to 1'
+        else:
+            newf['desc'] = bit['desc']
+        newf['swaccess'] = swaccess
+        swacc_info = swaccess_permitted[swaccess]
+        newf['genswaccess'] = swacc_info[1]
+        newf['genswwraccess'] = swacc_info[2]
+        newf['genswrdaccess'] = swacc_info[3]
+        newf['hwaccess'] = hwaccess
+        hwacc_info = hwaccess_permitted[hwaccess]
+        newf['genhwaccess'] = hwacc_info[1]
+        newf['genhwqe'] = True if name == 'INTR_TEST' else False
+        newf['genhwre'] = False
+        newf['genresval'] = 0
+        newf['genresvalx'] = False
+
+        bits_used = bits_used | (1 << cur_bit)
+        cur_bit += 1
+        genfields.append(newf)
+
+    genreg['genresval'] = 0
+    genreg['genresmask'] = bits_used
+    genreg['genbitsused'] = bits_used
+    genreg['genoffset'] = offset
+    genreg['gendvrights'] = parse_dvrights(swaccess)
+    genreg['fields'] = genfields
+    genreg['regwen'] = ''
+    regs['genrnames'].append(name.lower())
+    return genreg
+
+
+def make_intr_regs(regs, offset, addrsep, fullwidth):
+    iregs = []
+    intrs = regs['interrupt_list']
+    if len(intrs) > fullwidth:
+        log.error('More than ' + str(fullwidth) + ' interrupts in list')
+        return iregs, 1
+
+    iregs.append(
+        make_intr_reg(regs, 'INTR_STATE', offset, 'rw1c', 'hrw',
+                      'Interrupt State Register'))
+    iregs.append(
+        make_intr_reg(regs, 'INTR_ENABLE', offset + addrsep, 'rw', 'hro',
+                      'Interrupt Enable Register'))
+    iregs.append(
+        make_intr_reg(regs, 'INTR_TEST', offset + 2 * addrsep, 'wo', 'hro',
+                      'Interrupt Test Register'))
+    return iregs, 0
+
+
+def validate_window(win, offset, regwidth, top):
+    error = 0
+
+    if not 'name' in win:
+        name = "Window at +" + hex(offset)
+    else:
+        name = win['name']
+        if name.lower() in top['genrnames']:
+            error += 1
+            log.error("Window at +" + hex(offset) + " duplicate name " + name)
+        else:
+            top['genrnames'].append(name.lower())
+
+    error += check_keys(win, window_required, window_optional, window_added,
+                        name)
+
+    # if there was an error before this then can't trust anything!
+    if error > 0:
+        log.info(name + "@" + hex(offset) + " " + str(error) +
+                 " top level errors. Window will be ignored.")
+        return error, offset
+
+    # optional flags
+    unusual = 'unusual' in win and win['unusual'].lower() == "true"
+    noalign = 'noalign' in win and win['noalign'].lower() == "true"
+    win['genbyte-write'] = ('byte-write' in win and
+                            win['byte-write'].lower() == "true")
+
+    if 'validbits' in win:
+        wid, err = check_int(win['validbits'], name + " validbits")
+        if err:
+            error += err
+            wid = regwidth
+        if wid > regwidth:
+            error += 1
+            log.error(name + ": validbits " + str(wid) +
+                      " is greater than regwidth (" + str(regwidth) + ").")
+            wid = regwidth
+        win['genvalidbits'] = wid
+    else:
+        win['genvalidbits'] = regwidth
+
+    winitems, err = check_int(win['items'], name + " items")
+    if err:
+        error += err
+        winitems = 4
+    # convert items to bytes
+    winsize = winitems * (regwidth // 8)
+    # if size is not a power of two, po2_size is next po2 larger
+    po2_size = 1 << (winsize.bit_length() - 1)
+    if winsize != po2_size:
+        # the -1 above was wrong if not a power of two
+        po2_size = po2_size << 1
+        if not unusual:
+            log.warn(name + ": Unusual: Size " + str(winitems) +
+                     " is not a power of 2.")
+
+    if noalign:
+        genoff = offset
+        nextoff = offset + winsize
+    else:
+        # Align to ensure base address of first item in window has
+        # all zeros in the low bits
+        if (offset & (po2_size - 1)) != 0:
+            genoff = (offset | (po2_size - 1)) + 1
+        else:
+            genoff = offset
+        nextoff = genoff + winsize
+    win['genoffset'] = genoff
+
+    swaccess = win['swaccess']
+    if (not swaccess in swaccess_permitted):
+        log.warn(name + ": Bad window swaccess value " + swaccess)
+        swaccess = "wo"
+    swacc_info = swaccess_permitted[swaccess]
+    win['genswaccess'] = swacc_info[1]
+    win['genswwraccess'] = swacc_info[2]
+    win['genswrdaccess'] = swacc_info[3]
+    if not swacc_info[4] and not unusual:
+        log.warn(name + ": Unusual: access type for a window " + swaccess)
+
+    return error, nextoff
+
+
+def check_wen_regs(regs):
+    error = 0
+    for x in regs['genwennames']:
+        if not x.lower() in regs['genrnames']:
+            error += 1
+            log.error(x + " used as regwen but is not defined")
+        else:
+            for reg in regs['registers']:
+                if ('name' in reg):
+                    if (reg['name'] == x):
+                        break
+
+            if reg['genbitsused'] != 1:
+                error += 1
+                log.error(x + " used as regwen fails requirement to only " +
+                          "define bit 0")
+            elif reg['genresval'] != 1:
+                error += 1
+                log.error(x + " used as regwen fails requirement to default " +
+                          "to 1")
+            elif reg['fields'][0]['genswaccess'] != SwAccess.W1C:
+                error += 1
+                log.error(x + " used as regwen fails requirement to be " +
+                          "rw1c")
+    return error
+
+
+def validate(regs):
+    if not 'name' in regs:
+        log.error("Component has no name. Aborting.")
+        return 1
+    component = regs['name']
+
+    error = check_keys(regs, top_required, top_optional, top_added, component)
+    if (error > 0):
+        log.error("Component has top level errors. Aborting.")
+        return error
+    regs['genrnames'] = []
+    regs['genwennames'] = []
+    error = 0
+
+    if 'regwidth' in regs:
+        fullwidth, ierr = check_int(regs['regwidth'], "regwidth")
+        if ierr:
+            fullwidth = 32
+            error += 1
+    else:
+        fullwidth = 32
+        log.warning('regwidth not specified, assuming 32.')
+    regs['regwidth'] = str(fullwidth)
+
+    if ((fullwidth % 8) != 0):
+        addrsep = (fullwidth // 8) + 1
+        log.warning("regwidth is not a multiple of 8 bits!")
+    else:
+        addrsep = fullwidth // 8
+
+    offset = 0
+    autoregs = []
+
+    # auto header generation would go here and update autoregs
+
+    if 'no_auto_intr_regs' in regs:
+        no_autoi, err = check_bool(
+            regs['no_auto_intr_regs'], 'no_auto_intr_regs')
+        if err:
+            error += 1
+    else:
+        no_autoi = False
+    if 'interrupt_list' in regs and not 'genautoregs' in regs and not no_autoi:
+        iregs, err = make_intr_regs(regs, offset, addrsep, fullwidth)
+        error += err
+        autoregs.extend(iregs)
+        offset += addrsep * len(iregs)
+
+    for x in regs['registers']:
+        ck_err = check_zero_one_key(x, list_optone, "At " + hex(offset))
+        if ck_err != 0:
+            error += ck_err
+            continue
+
+        if 'reserved' in x:
+            nreserved, ierr = check_int(x['reserved'],
+                                        "Reserved at " + hex(offset))
+            if ierr:
+                error += 1
+            else:
+                offset = offset + (addrsep * nreserved)
+            continue
+
+        if 'skipto' in x:
+            skipto, ierr = check_int(x['skipto'], "skipto at " + hex(offset))
+            if ierr:
+                error += 1
+            elif (skipto <= offset):
+                log.error("{skipto " + x['skipto'] + "} at " + hex(offset) +
+                          " evaluates as " + hex(skipto) +
+                          " which would move backwards")
+                error += 1
+            elif (skipto % addrsep) != 0:
+                log.error("{skipto " + x['skipto'] + "} at " + hex(offset) +
+                          " evaluates as " + hex(skipto) +
+                          " which is not a multiple of the register size " +
+                          str(addrsep))
+                error += 1
+            else:
+                offset = skipto
+            continue
+
+        if 'sameaddr' in x:
+            for sareg in x['sameaddr']:
+                error += validate_register(sareg, offset, fullwidth, regs)
+            offset += addrsep
+            continue
+
+        if 'window' in x:
+            err, offset = validate_window(x['window'], offset, fullwidth, regs)
+            error += err
+            continue
+
+        if 'multireg' in x:
+            err, n = validate_multi(x['multireg'], offset, addrsep, fullwidth,
+                                    regs)
+            error += err
+            offset += addrsep * n
+            continue
+
+        error += validate_register(x, offset, fullwidth, regs)
+        offset += addrsep
+    regs['gennextoffset'] = offset
+    # make the right thing happen if now exactly on power of 2
+    if (offset > 0): offset -= 1
+    regs['gensize'] = 1 << offset.bit_length()
+
+    error += check_wen_regs(regs)
+
+    if autoregs:
+        # auto generated registers go at the front
+        autoregs.extend(regs['registers'])
+        regs['registers'] = autoregs
+        regs['genautoregs'] = True
+
+    log.info("Validated, size = " + hex(regs['gensize']) + " errors=" +
+             str(error) + " names are " + str(regs['genrnames']))
+    if (error > 0):
+        log.error("Register description had " + str(error) + " error" +
+                  "s" if error > 1 else "")
+    return error
diff --git a/util/reggen/version.py b/util/reggen/version.py
new file mode 100644
index 0000000..3539c46
--- /dev/null
+++ b/util/reggen/version.py
@@ -0,0 +1,24 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Standard version printing
+"""
+import os
+import subprocess
+import sys
+
+import pkg_resources  # part of setuptools
+
+
+def show_and_exit(clitool, packages):
+    util_path = os.path.dirname(os.path.realpath(clitool))
+    os.chdir(util_path)
+    ver = subprocess.run(
+        ["git", "describe", "--always", "--dirty", "--broken"],
+        stdout=subprocess.PIPE).stdout.strip().decode('ascii')
+    if (ver == ''):
+        ver = 'not found (not in Git repository?)'
+    sys.stderr.write(clitool + " Git version " + ver + '\n')
+    for p in packages:
+        sys.stderr.write(p + ' ' + pkg_resources.require(p)[0].version + '\n')
+    exit(0)
diff --git a/util/regtool.py b/util/regtool.py
new file mode 100755
index 0000000..34bc935
--- /dev/null
+++ b/util/regtool.py
@@ -0,0 +1,203 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Command-line tool to validate and convert register hjson
+
+"""
+import argparse
+import logging as log
+import os
+import re
+import sys
+from pathlib import PurePath
+
+import hjson
+import pkg_resources
+
+from reggen import (gen_cheader, gen_ctheader, gen_dv, gen_html, gen_json,
+                    gen_rtl, gen_selfdoc, validate, version)
+
+DESC = """regtool, generate register info from hjson source"""
+
+USAGE = '''
+  regtool [options]
+  regtool [options] <input>
+  regtool (-h | --help)
+  regtool (-V | --version)
+'''
+
+
+def main():
+    format = 'hjson'
+    verbose = 0
+
+    parser = argparse.ArgumentParser(
+        prog="regtool",
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        usage=USAGE,
+        description=DESC)
+    parser.add_argument(
+        'input',
+        nargs='?',
+        metavar='file',
+        type=argparse.FileType('r'),
+        default=sys.stdin,
+        help='input file in hjson type')
+    parser.add_argument(
+        '-d', action='store_true', help='Output register documentation (html)')
+    parser.add_argument(
+        '--cdefines',
+        '-D',
+        action='store_true',
+        help='Output C defines header')
+    parser.add_argument(
+        '--ctdefines',
+        '-T',
+        action='store_true',
+        help='Output C defines header (Titan style)')
+    parser.add_argument(
+        '--doc',
+        action='store_true',
+        help='Output source file documentation (gfm)')
+    parser.add_argument(
+        '-j', action='store_true', help='Output as formatted JSON')
+    parser.add_argument('-c', action='store_true', help='Output as JSON')
+    parser.add_argument(
+        '-r', action='store_true', help='Output as SystemVerilog RTL')
+    parser.add_argument(
+        '-s', action='store_true', help='Output as UVM Register class')
+    parser.add_argument('--outdir', '-t',
+                        help='Target directory for generated RTL, '\
+                             'tool uses ../rtl if blank.')
+    parser.add_argument(
+        '--outfile',
+        '-o',
+        type=argparse.FileType('w'),
+        default=sys.stdout,
+        help='Target filename for json, html, gfm.')
+    parser.add_argument(
+        '--verbose',
+        '-v',
+        action='store_true',
+        help='Verbose and run validate twice')
+    parser.add_argument(
+        '--version', '-V', action='store_true', help='Show version')
+    parser.add_argument(
+        '--novalidate',
+        action='store_true',
+        help='Skip validate, just output json')
+
+    args = parser.parse_args()
+
+    if args.version:
+        version.show_and_exit(__file__, ["Hjson", "Mako"])
+
+    verbose = args.verbose
+
+    if args.j: format = 'json'
+    elif args.c: format = 'compact'
+    elif args.d: format = 'html'
+    elif args.doc: format = 'doc'
+    elif args.r: format = 'rtl'
+    elif args.s: format = 'dv'
+    elif args.cdefines: format = 'cdh'
+    elif args.ctdefines: format = 'cth'
+
+    if (verbose):
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    outfile = args.outfile
+
+    infile = args.input
+
+    if format == 'rtl':
+        if args.outdir:
+            outdir = args.outdir
+        elif infile != sys.stdin:
+            outdir = str(PurePath(infile.name).parents[1].joinpath("rtl"))
+        else:
+            # Using sys.stdin. not possible to generate RTL
+            log.error("-r option cannot be used with pipe or stdin")
+    elif format == 'dv':
+        if args.outdir:
+            outdir = args.outdir
+        elif infile != sys.stdin:
+            outdir = str(PurePath(infile.name).parents[1].joinpath("dv"))
+        else:
+            # Using sys.stdin. not possible to generate RTL
+            log.error("-s option cannot be used with pipe or stdin")
+    else:
+        # Ignore
+        outdir = "."
+
+    if format == 'doc':
+        with outfile:
+            gen_selfdoc.document(outfile)
+        exit(0)
+
+    with infile:
+        try:
+            srcfull = infile.read()
+            obj = hjson.loads(
+                srcfull,
+                use_decimal=True,
+                object_pairs_hook=validate.checking_dict)
+        except ValueError:
+            raise SystemExit(sys.exc_info()[1])
+
+    if args.novalidate:
+        with outfile:
+            gen_json.gen_json(obj, outfile, format)
+            outfile.write('\n')
+    elif (validate.validate(obj) == 0):
+        if (verbose):
+            log.info("Second validate pass (should show added optional keys)")
+            validate.validate(obj)
+
+        if format == 'rtl':
+            gen_rtl.gen_rtl(obj, outdir)
+            return 0
+        if format == 'dv':
+            gen_dv.gen_dv(obj, outdir)
+            return 0
+
+        src_lic = None
+        src_copy = ''
+        found_spdx = None
+        found_lunder = None
+        copy = re.compile(r'.*(copyright.*)|(.*\(c\).*)', re.IGNORECASE)
+        spdx = re.compile(r'.*(SPDX-License-Identifier:.+)')
+        lunder = re.compile(r'.*(Licensed under.+)', re.IGNORECASE)
+        for line in srcfull.splitlines():
+            mat = copy.match(line)
+            if mat != None:
+                src_copy += mat.group(1)
+            mat = spdx.match(line)
+            if mat != None:
+                found_spdx = mat.group(1)
+            mat = lunder.match(line)
+            if mat != None:
+                found_lunder = mat.group(1)
+        if found_lunder:
+            src_lic = found_lunder
+        if found_spdx:
+            src_lic += '\n' + found_spdx
+
+        with outfile:
+            if format == 'html':
+                gen_html.gen_html(obj, outfile)
+            elif format == 'cdh':
+                gen_cheader.gen_cdefines(obj, outfile, src_lic, src_copy)
+            elif format == 'cth':
+                gen_ctheader.gen_cdefines(obj, outfile, src_lic, src_copy)
+            else:
+                gen_json.gen_json(obj, outfile, format)
+
+            outfile.write('\n')
+
+
+if __name__ == '__main__':
+    main()
diff --git a/util/run-clang-format.sh b/util/run-clang-format.sh
new file mode 100755
index 0000000..6e23acf
--- /dev/null
+++ b/util/run-clang-format.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+find sw hw \
+    -not \( -path '*/vendor' -prune \) \
+    -not \( -path 'sw/coremark' -prune \) \
+    \( -name '*.cpp' \
+    -o -name '*.cc' \
+    -o -name '*.c' \
+    -o -name '*.h' \) \
+    -exec clang-format -i {} \;
+
+# Report on missing curly braces for loops and control statements.
+# clang-format cannot fix them for us, so this requires manual work.
+braces_missing=$(
+    find sw hw \
+       -not \( -path '*/vendor' -prune \) \
+       -not \( -path 'sw/coremark' -prune \) \
+        \( -name '*.cpp' \
+        -o -name '*.cc' \
+        -o -name '*.c' \
+        -o -name '*.h' \) \
+       -exec grep -Hn -P '(^|\s)((if|while|for) \(.+\)|else\s*)$' {} \;
+)
+if [ ! -z "$braces_missing" ]; then
+    echo ERROR: Curly braces are missing from the following control or loop
+    echo statements. Please add them manually and re-run this script.
+    echo
+    echo "$braces_missing"
+    exit 1
+fi
diff --git a/util/simplespi/README.md b/util/simplespi/README.md
new file mode 100644
index 0000000..bbedb45
--- /dev/null
+++ b/util/simplespi/README.md
@@ -0,0 +1,80 @@
+# Simple SPI Tests
+
+Spitest is a trivial python3 tool for interacting with the SPI device
+code in the hello_world test program running on earlgrey on the Nexys
+Video board.
+
+The same FTDI interface is used for SPI as for JTAG, so this tool
+cannot be used at the same time as an OpenOCD debugger. The
+`top_earlgray_nexsysvideo` pin mux has been modified to use one of the
+FTDI GPIOs to select between the two interfaces, so it is possible to
+dynamically switch between the tools. This status bit is also
+presented as GPIO[16] (which was previously unused).
+
+The example commands assume $REPO_TOP is set to the toplevel directory
+of the repo.
+
+### Setup
+
+If packages have not previously been installed you will need to set a
+few things up. First use `apt` and `pip3` to install some required packages:
+```
+$ sudo apt-get install libusb-1.0
+$ pip3 install --user pyftdi
+```
+
+You also need the `udev` rules to be set correctly by following the
+instructions for setting up the Xilinx tools.
+
+### SPI/JTAG selection GPIO
+
+The tool can be used to test the SPI/JTAG selection gpio. This is done
+by providing the `--flippy` or `-f` argument which causes the
+selection to be flipped 10 times with 2 second pauses between. The
+`hello_world` program will see the bit flip and print a message.
+
+
+```
+$ cd $REPO_TOP/util/simplespi
+$ ./spitest.py -f
+```
+
+
+### SPI Protocol Test
+
+The SPI protocol used in the test is a simple exchange of 4-byte
+messages. When data is received from the SPI Device interface the
+`hello_world` program will receive and print it out on the UART. The
+first four bytes have their bottom bit inverted and are returned in
+the next SPI transaction. If the sender is faster than the FPGA then
+messages are aggregated in the receive buffer and will appear as a
+single long message to `hello_world` which will only return a single
+4-byte message.
+
+A simple test should therefore just use 4 characters.
+
+
+```
+$ cd $REPO_TOP/util/simplespi
+$ ./spitest.py 1234
+$ ./spitest.py 5678
+```
+
+Long messages will be padded to a multiple of 4 bytes before being
+sent, and there may be some garbage return messages.
+
+```
+$ cd $REPO_TOP/util/simplespi
+$ ./spitest.py the quick brown fox jumps
+```
+
+Messages of any length (the length given is used even if not a
+multiple of 4) may be generated by the tool. The message is formed
+from repeating the ascii characters 0123456789abcdef to fill the
+required length) and sent, and there may be some garbage return
+messages.
+
+```
+$ cd $REPO_TOP/util/simplespi
+$ ./spitest.py -l 120
+```
diff --git a/util/simplespi/__init__.py b/util/simplespi/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/util/simplespi/__init__.py
diff --git a/util/simplespi/spitest.py b/util/simplespi/spitest.py
new file mode 100755
index 0000000..14d754c
--- /dev/null
+++ b/util/simplespi/spitest.py
@@ -0,0 +1,146 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Simple Tool for FPGA SPI experiments
+"""
+
+import argparse
+import logging as log
+import os
+import subprocess
+import sys
+import time
+
+import pkg_resources  # part of setuptools
+from pyftdi.spi import SpiController
+
+
+def show_and_exit(clitool, packages):
+    util_path = os.path.dirname(os.path.realpath(clitool))
+    os.chdir(util_path)
+    ver = subprocess.run(
+        ["git", "describe", "--always", "--dirty", "--broken"],
+        stdout=subprocess.PIPE).stdout.strip().decode('ascii')
+    if (ver == ''):
+        ver = 'not found (not in Git repository?)'
+    sys.stderr.write(clitool + " Git version " + ver + '\n')
+    for p in packages:
+        sys.stderr.write(p + ' ' + pkg_resources.require(p)[0].version + '\n')
+    exit(0)
+
+
+USAGE = """
+  spitest [options] text [text ...]
+"""
+
+
+def main():
+    done_stdin = False
+    parser = argparse.ArgumentParser(
+        prog="spitest",
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        usage=USAGE,
+        description=__doc__)
+    parser.add_argument(
+        '--version', action='store_true', help='Show version and exit')
+    parser.add_argument(
+        '-v',
+        '--verbose',
+        action='store_true',
+        help='Verbose output during processing')
+    parser.add_argument(
+        '-f',
+        '--flippy',
+        action='store_true',
+        help='Flip the SPI/JTAG control GPIO 10 times and exit')
+    parser.add_argument(
+        '-l',
+        '--length',
+        type=int,
+        action='store',
+        help='Construct and send a message of specified length')
+    parser.add_argument(
+        '-j',
+        '--jtag',
+        action='store_true',
+        help='Set SPI/JTAG control to JTAG and exit')
+    parser.add_argument(
+        'message',
+        nargs='*',
+        metavar='input',
+        default='1234',
+        help='message to send in 4 byte chunks')
+    args = parser.parse_args()
+
+    if args.version:
+        show_and_exit(__file__, ["pyftdi"])
+
+    if (args.verbose):
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    # Instanciate a SPI controller
+    spi = SpiController(cs_count=1)
+
+    # interfaces start from 1 here, so this is Channel A (called 0 in jtag)
+    spi.configure('ftdi://ftdi:2232h/1')
+
+    # Get a port to a SPI slave w/ /CS on A*BUS3 and SPI mode 0 @ 1MHz
+    slave = spi.get_port(cs=0, freq=1E6, mode=0)
+
+    # Get GPIO port to manage extra pins
+    # BUS4 = JTAG TRST_N, BUS5 = JTAG SRST_N, BUS6 = JTAG_SPIN
+    # Note: something makes FTDI default to BUS6 low, selected that for SPI
+    # otherwise SRST being default low holds the chip in reset
+    # pyftdi Set Direction also forces the output to zero
+    # so initially make SRST an input w/pullup in FPGA in case SPI/JTAG was
+    # initially JTAG
+    gpio = spi.get_gpio()
+    gpio.set_direction(0x40, 0x40)
+    time.sleep(1)
+    gpio.set_direction(0x70, 0x70)
+
+    if args.jtag:
+        gpio.write(0x70)
+        return
+
+    gpio.write(0x30)
+
+    if args.flippy:
+        for i in range(10):
+            print("Select SPI")
+            gpio.write(0x30)
+            time.sleep(2)
+            print("Select JTAG")
+            gpio.write(0x70)
+            time.sleep(2)
+        return
+
+    print("Select SPI")
+    gpio.write(0x30)
+    # Synchronous exchange with the remote SPI slave
+    if args.length:
+        s = ''
+        for i in range(args.length):
+            s += hex(i & 15)[-1]
+    else:
+        s = ''
+        for m in args.message:
+            s += m + ' '
+            s = s[:-1]  # remove extra space put on end
+        # pad to ensure multiple of 4 bytes
+        filled = len(s) % 4
+        if filled:
+            s += '....' [filled:]
+
+    while len(s):
+        write_buf = bytes(s[:4], encoding='utf8')
+        read_buf = slave.exchange(write_buf, duplex=True).tobytes()
+        print("Got " + str(read_buf))
+        s = s[4:]
+
+
+if __name__ == '__main__':
+    main()
diff --git a/util/syn_yosys.sh b/util/syn_yosys.sh
new file mode 100755
index 0000000..791f7c7
--- /dev/null
+++ b/util/syn_yosys.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# This script converts all SystemVerilog RTL files to Verilog and then
+# runs Yosys.
+#
+# The following tools are required:
+#  - sv2v: SystemVerilog-to-Verilog converter from github.com/zachjs/sv2v
+#  - yosys: synthesis tool from github.com/YosysHQ/yosys
+#
+# Usage:
+#   syn_yosys.sh 2>&1 | tee syn.std
+
+#-------------------------------------------------------------------------
+# use fusesoc to generate file list
+#-------------------------------------------------------------------------
+\rm -Rf build
+fusesoc --cores-root .. sim --build-only formal > /dev/null 2>&1
+
+# copy all files into directory "syn_out"
+\rm -Rf syn_out
+mkdir syn_out
+cp build/formal_0/src/*/*/*.sv build/formal_0/src/*/*/*/*.sv syn_out
+
+#-------------------------------------------------------------------------
+# convert all RTL files to Verilog
+#-------------------------------------------------------------------------
+cd syn_out
+
+# TODO: delete below file for now because sv2v currently crashes with it
+\rm -f hmac_pkg.sv
+
+for file in *.sv; do
+  module=`basename -s .sv $file`
+  echo $file
+  sv2v --oneunit *_pkg.sv prim_assert.sv $file > ${module}.v
+done
+
+# remove *pkg.v files (they are empty files and not needed)
+\rm -Rf *_pkg.v
+
+#-------------------------------------------------------------------------
+# run yosys
+#-------------------------------------------------------------------------
+
+# for now, read in each verilog file into Yosys and only output errors
+# and warnings
+for file in *.v; do
+  yosys -QTqp "read_verilog ${file}"
+done
+
+cd -
+
+# TODOs:
+#  - add LEC to check if generated Verilog is equivalent to original SV
+#  - add full yosys synthesis for all modules
+#  - add final LEC check (RTL-versus-netlist)
diff --git a/util/test_reggen/__init__.py b/util/test_reggen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/util/test_reggen/__init__.py
diff --git a/util/test_reggen/test_rtl.py b/util/test_reggen/test_rtl.py
new file mode 100644
index 0000000..b80bed4
--- /dev/null
+++ b/util/test_reggen/test_rtl.py
@@ -0,0 +1,20 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import unittest
+
+from reggen import gen_rtl
+
+
+class TestFieldCheck(unittest.TestCase):
+    def test_check_bool(self):
+        arg = {'field1': "true", 'field2': "false", 'field3': "True"}
+        result = gen_rtl.check_field_bool(arg, 'field1', False)
+        self.assertTrue(result)
+        result = gen_rtl.check_field_bool(arg, 'field2', True)
+        self.assertFalse(result)
+        result = gen_rtl.check_field_bool(arg, 'field3', False)
+        self.assertFalse(result)
+        result = gen_rtl.check_field_bool(arg, 'field4', False)
+        self.assertFalse(result)
diff --git a/util/tlgen.py b/util/tlgen.py
new file mode 100755
index 0000000..e1d2455
--- /dev/null
+++ b/util/tlgen.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r""" TileLink-Uncached Lightweight Xbar generator
+"""
+
+import argparse
+import logging as log
+import sys
+from pathlib import Path, PurePath
+
+import hjson
+import mako
+import pkg_resources
+
+import tlgen
+
+
+def main():
+    parser = argparse.ArgumentParser(prog="tlgen")
+    parser.add_argument('--topcfg',
+                        '-t',
+                        metavar='file',
+                        required=True,
+                        type=argparse.FileType('r'),
+                        help="`top_cfg.hjson` file.")
+    parser.add_argument(
+        '--outdir',
+        '-o',
+        required=True,
+        help=
+        "Target directory. tlgen needs 'rtl/' and 'dv/' directory under the target dir"
+    )
+    parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
+
+    args = parser.parse_args()
+
+    if args.verbose:
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    # Check if outdir exists. If not, show error and exit
+    if not Path(args.outdir).is_dir():
+        log.error("'--outdir' should point to writable directory")
+
+    # Load contents of top_cfg
+    # Skip this part and use internal structure at this time
+    try:
+        obj = hjson.load(args.topcfg, use_decimal=True)
+    except ValueError:
+        raise SystemExit(sys.exc_info()[1])
+
+    log.info(obj)
+
+    xbar = tlgen.validate(obj)
+
+    if not tlgen.elaborate(xbar):
+        log.error("Elaboration failed." + repr(xbar))
+
+    # Generate
+    out_rtl, out_pkg, out_dv = tlgen.generate(xbar)
+
+    rtl_path = Path(args.outdir) / 'rtl'
+    rtl_path.mkdir(parents=True, exist_ok=True)
+    dv_path = Path(args.outdir) / 'dv'
+    dv_path.mkdir(parents=True, exist_ok=True)
+
+    rtl_filename = "xbar_%s.sv" % (xbar.name)
+    rtl_filepath = rtl_path / rtl_filename
+    with rtl_filepath.open(mode='w', encoding='UTF-8') as fout:
+        fout.write(out_rtl)
+
+    pkg_filename = "tl_%s_pkg.sv" % (xbar.name)
+    pkg_filepath = rtl_path / pkg_filename
+    with pkg_filepath.open(mode='w', encoding='UTF-8') as fout:
+        fout.write(out_pkg)
+
+    dv_filename = "xbar_%s_tb.sv" % (xbar.name)
+    dv_filepath = dv_path / dv_filename
+    with dv_filepath.open(mode='w', encoding='UTF-8') as fout:
+        fout.write(out_dv)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/util/tlgen/README.md b/util/tlgen/README.md
new file mode 100644
index 0000000..50382b3
--- /dev/null
+++ b/util/tlgen/README.md
@@ -0,0 +1,5 @@
+# TL-UL Xbar generator
+
+This is the TL-UL generator.
+
+TODO: Add more documentation, or link to documentation.
diff --git a/util/tlgen/__init__.py b/util/tlgen/__init__.py
new file mode 100644
index 0000000..7534400
--- /dev/null
+++ b/util/tlgen/__init__.py
@@ -0,0 +1,9 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+from .elaborate import elaborate
+from .generate import generate
+from .item import Edge, Node, NodeType
+from .validate import validate
+from .xbar import Xbar
diff --git a/util/tlgen/elaborate.py b/util/tlgen/elaborate.py
new file mode 100644
index 0000000..2927fe7
--- /dev/null
+++ b/util/tlgen/elaborate.py
@@ -0,0 +1,146 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import copy
+import logging as log
+
+from .item import Edge, Node, NodeType
+from .xbar import Xbar
+
+
+def elaborate(xbar):  # xbar: Xbar -> bool
+    """elaborate reads all nodes and edges then
+    construct internal FIFOs, Sockets.
+    """
+    # Condition check
+    if len(xbar.nodes) <= 1 or len(xbar.edges) == 0:
+        log.error(
+            "# of Nodes is less than 2 or no Edge exists. Cannot proceed.")
+        return False
+
+    for host in xbar.hosts:
+        process_node(host, xbar)
+        log.info("Node Processed: " + repr(xbar))
+
+    ## Pipeline
+    process_pipeline(xbar)
+
+    ## Build address map
+    ## Each socket_1n should have address map
+
+    ## Gather clocks
+    xbar.clocks = {xbar.clock
+                   } | {clk
+                        for node in xbar.nodes for clk in node.clocks}
+
+    return True
+
+
+def process_node(node, xbar):  # node: Node -> xbar: Xbar -> Xbar
+    """process each node based on algorithm
+
+    1. If a node has different clock from main clock and not ASYNC_FIFO:
+       a. (New Node) Create ASYNC_FIFO node.
+       b. Revise every edges from the node to have start node as ASYNC_FIFO
+          node. (New Edge) create a edge from the node to ASYNC_FIFO node.
+          - Repeat the algorithm with ASYNC_FIFO node.
+       c. Revise every edges to the node to have end node as ASYNC_FIFO
+          node. (New Edge) create a edge from ASYNC_FIFO node to the node.
+       d. If it is not DEVICE, HOST node, raise Error. If it is DEVICE, end
+          (next item).
+    2. If a node has multiple edges having it as a end node and not SOCKET_M1:
+       a. (New node) Create SOCKET_M1 node.
+       b. Revise every edges to the node to have SOCKET_M1 node as end node.
+       c. (New Edge) create a edge from SOCKET_M1 to the node.
+       d. Repeat the algorithm with the node.
+    3. If a node has multiple edges having it as a start node and not SOCKET_1N:
+       a. (New node) Create SOCKET_1N node.
+       b. Revise every edges from the node to have SOCKET_1N node as start node.
+       c. (New Edge) Create a edge from the node to SOCKET_1N node.
+       d. (for loop) Repeat the algorithm with SOCKET_1N’s other side node.
+    """
+
+    # If a node has different clock from main clock and not ASYNC_FIFO:
+    if node.node_type != NodeType.ASYNC_FIFO and node.clocks[0] != xbar.clock:
+        # (New Node) Create ASYNC_FIFO node
+        new_node = Node(name="asf_" + str(len(xbar.nodes)),
+                        node_type=NodeType.ASYNC_FIFO,
+                        clock=xbar.clock)
+        if node.node_type == NodeType.HOST:
+            new_node.clocks.insert(0, node.clocks[0])
+        else:
+            new_node.clocks.append(node.clocks[0])
+
+        xbar.insert_node(new_node, node)
+
+        process_node(new_node, xbar)
+
+    # If a node has multiple edges having it as a end node and not SOCKET_M1:
+    elif node.node_type != NodeType.SOCKET_M1 and len(node.us) > 1:
+        # (New node) Create SOCKET_M1 node
+        new_node = Node(name="sm1_" + str(len(xbar.nodes)),
+                        node_type=NodeType.SOCKET_M1,
+                        clock=xbar.clock)
+        new_node.hpass = 2**len(node.us) - 1
+        new_node.dpass = 1
+        xbar.insert_node(new_node, node)
+        process_node(new_node, xbar)
+
+    # If a node has multiple edges having it as a start node and not SOCKET_1N:
+    elif node.node_type != NodeType.SOCKET_1N and len(node.ds) > 1:
+        # (New node) Create SOCKET_1N node
+        new_node = Node(name="s1n_" + str(len(xbar.nodes)),
+                        node_type=NodeType.SOCKET_1N,
+                        clock=xbar.clock)
+        new_node.hpass = 1
+        new_node.dpass = 2**len(node.ds) - 1
+        xbar.insert_node(new_node, node)
+
+        # (for loop) Repeat the algorithm with SOCKET_1N's other side node
+        for edge in new_node.ds:
+            process_node(edge.ds, xbar)
+
+    return xbar
+
+
+def process_pipeline(xbar):
+    """Check if HOST, DEVICE has pipeline key and is True, then propagate it to end
+    """
+    for host in xbar.hosts:
+        # go downstream and set the HReqPass at the first instance.
+        # If it is async, skip.
+        # If Socket 1N, set hpass to 1 and skip
+        # If Socket M1, find position of the host and set 1 of the bit in hpass skip
+        # If it is device, it means host and device are directly connected. Ignore now.
+
+        # After process node is done, always only one downstream exists in any host node
+        if host.pipeline == False:
+            # No need to process, default is Pass the req/rsp
+            continue
+
+        dnode = host.ds[0].ds
+        if dnode.node_type == NodeType.SOCKET_1N:
+            dnode.hpass = 0
+        elif dnode.node_type == NodeType.SOCKET_M1:
+            idx = dnode.us.index(host.ds)
+            dnode.hpass = dnode.hpass ^ (1 << idx)
+
+    for device in xbar.devices:
+        # go upstream and set DReq/RspPass at the first instance.
+        # If it is async, skip
+        # If Socket 1N, set dpass to the bit position and skip
+        # If Socket M1, set dpass to 1 and skip
+        # If it is host, ignore
+
+        if device.pipeline == False:
+            continue
+
+        unode = device.us[0].us
+        if unode.node_type == NodeType.SOCKET_1N:
+            idx = unode.ds.index(device.us)
+            unode.dpass = unode.dpass ^ (1 << idx)
+        elif unode.node_type == NodeType.SOCKET_M1:
+            unode.dpass = 0
+
+    return xbar
diff --git a/util/tlgen/generate.py b/util/tlgen/generate.py
new file mode 100644
index 0000000..80f7e91
--- /dev/null
+++ b/util/tlgen/generate.py
@@ -0,0 +1,27 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+from mako.template import Template
+from pkg_resources import resource_filename
+
+from .item import NodeType
+from .xbar import Xbar
+
+
+def generate(xbar):  #xbar: Xbar -> str
+    """generate uses elaborated model then creates top level Xbar module
+    with prefix.
+    """
+
+    xbar_rtl_tpl = Template(
+        filename=resource_filename('tlgen', 'xbar.rtl.tpl.sv'))
+    xbar_pkg_tpl = Template(
+        filename=resource_filename('tlgen', 'xbar.pkg.tpl.sv'))
+    xbar_dv_tpl = Template(
+        filename=resource_filename('tlgen', 'xbar.dv.tpl.sv'))
+
+    out_rtl = xbar_rtl_tpl.render(xbar=xbar, ntype=NodeType)
+    out_pkg = xbar_pkg_tpl.render(xbar=xbar)
+    out_dv = xbar_dv_tpl.render(xbar=xbar, ntype=NodeType)
+    return (out_rtl, out_pkg, out_dv)
diff --git a/util/tlgen/item.py b/util/tlgen/item.py
new file mode 100644
index 0000000..116e275
--- /dev/null
+++ b/util/tlgen/item.py
@@ -0,0 +1,65 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+from enum import Enum
+
+
+class Edge:
+    """Edge class contains the connection from a node to a node.
+
+    a Node can be a host port, output of async_fifo, port in a socket,
+    or a device port.
+    """
+
+    def __init__(self, us, ds):
+        self.us = us
+        self.ds = ds
+
+    def __repr__(self):
+        return "U(%s) D(%s)" % (self.us.name, self.ds.name)
+
+
+#Edges = List[Edge]
+#Clocks = List[str]  # If length is more than one, should be exactly two
+
+# [UpstreamClock, DownstreamClock]
+
+
+class NodeType(Enum):
+    HOST = 1
+    DEVICE = 2
+    ASYNC_FIFO = 3
+    SOCKET_1N = 4
+    SOCKET_M1 = 5
+
+
+class Node:
+    """Node class is a port that communicates from/to other Node or TL-UL
+    input/output.
+    """
+
+    name = ""  # name: str
+    # node_type: NodeType
+    clocks = []  # Clocks  # Clock domain of the node
+    # e.g. async_fifo in : clk_core , out : clk_main
+
+    # If NodeType is Socket out from 1:N then address steering is used
+    # But this value is also propagated up to a Host from multiple Devices
+    # Device Node should have address_from, address_to
+    address_from = 0  #: int
+    address_to = 0  #: int
+
+    us = []  # Edges  # Number of Ports depends on the NodeType
+    # 1 for Host, Device, 2 for Async FIFO, N for Sockets
+    ds = []  # Edges
+
+    # Req/Rsp Pass. default False
+    pipeline = False
+
+    def __init__(self, name, node_type, clock):
+        self.name = name
+        self.node_type = node_type
+        self.clocks = [clock]
+        self.us = []
+        self.ds = []
diff --git a/util/tlgen/validate.py b/util/tlgen/validate.py
new file mode 100644
index 0000000..653b81f
--- /dev/null
+++ b/util/tlgen/validate.py
@@ -0,0 +1,89 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+import logging as log
+from collections import OrderedDict
+
+from .item import Edge, Node, NodeType
+from .xbar import Xbar
+
+
+def get_nodetype(t):  # t: str -> NodeType
+    if t == "host":
+        return NodeType.HOST
+    elif t == "device":
+        return NodeType.DEVICE
+    elif t == "async_fifo":
+        return NodeType.ASYNC_FIFO
+    elif t == "socket_1n":
+        return NodeType.SOCKET_1N
+    elif t == "socket_m1":
+        return NodeType.SOCKET_M1
+
+    raise
+
+
+def checkNameExist(name, xbar):  # name: str -> xbar: Xbar -> bool
+    return name.lower() in [x.name for x in xbar.nodes]
+
+
+def isOverlap(range1, range2):  # Tuple[int,int] -> Tuple[int,int] -> bool
+    return not (range2[1] < range1[0] or range2[0] > range1[1])
+
+
+# Tuple[int,int] -> List[Tuple[]] -> bool
+def checkAddressOverlap(addr, ranges):
+    result = [x for x in ranges if isOverlap(x, addr)]
+    return len(result) != 0
+
+
+def validate(obj):  # OrderedDict -> Xbar
+    xbar = Xbar()
+    xbar.name = obj["name"].lower()
+    xbar.clock = obj["clock"].lower()
+
+    addr_ranges = []
+
+    # Nodes
+    for nodeobj in obj["nodes"]:
+        clock = nodeobj["clock"].lower() if "clock" in nodeobj.keys(
+        ) else xbar.clock
+
+        if checkNameExist(nodeobj["name"], xbar):
+            log.error("Duplicated name: %s" % (nodeobj["name"]))
+            raise SystemExit("Duplicated name in the configuration")
+
+        node = Node(name=nodeobj["name"].lower(),
+                    node_type=get_nodetype(nodeobj["type"].lower()),
+                    clock=clock)
+
+        if node.node_type == NodeType.DEVICE:
+            # Add address obj["base_addr"], obj["size"])
+            node.address_from = int(nodeobj["base_addr"], 0)
+            size = int(nodeobj["size_byte"], 0)
+            node.address_to = node.address_from + size - 1
+
+            addr = (node.address_from, node.address_to)
+
+            if checkAddressOverlap(addr, addr_ranges):
+                log.error(
+                    "Address is overlapping. Check the config. Addr(0x%x - 0x%x)"
+                    % (addr[0], addr[1]))
+                raise SystemExit("Address overlapping error occurred")
+
+            addr_ranges.append(addr)
+
+        if node.node_type in [NodeType.DEVICE, NodeType.HOST
+                              ] and "pipeline" in nodeobj:
+            node.pipeline = True if nodeobj["pipeline"].lower() in [
+                "true", "1"
+            ] else False
+        xbar.nodes.append(node)
+
+    # Edge
+    for host in obj["connections"].keys():
+        # host: [device]
+        for device in obj["connections"][host]:
+            xbar.connect_nodes(host.lower(), device.lower())
+
+    return xbar
diff --git a/util/tlgen/xbar.dv.tpl.sv b/util/tlgen/xbar.dv.tpl.sv
new file mode 100644
index 0000000..4155512
--- /dev/null
+++ b/util/tlgen/xbar.dv.tpl.sv
@@ -0,0 +1,194 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// xbar_${xbar.name}_tb module generated by `tlgen.py` tool for sanity check
+<%
+  import random
+%>
+module xbar_${xbar.name}_tb;
+
+  import tlul_pkg::*;
+
+  // Clock generator
+% for clock in xbar.clocks:
+  localparam CLK_${clock.upper()}_PERIOD = ${random.randint(10,40)};
+% endfor
+
+% for clock in xbar.clocks:
+  logic clk_${clock};
+  initial begin
+    clk_${clock} = 1'b0;
+    forever begin
+      #(CLK_${clock.upper()}_PERIOD/2)
+      clk_${clock} = ~clk_${clock};
+    end
+  end
+
+% endfor
+
+  // One reset but synchronized to multiple reset
+  logic rst_n ;
+  initial begin
+    rst_n = 1'b0;
+    #117ns
+    rst_n = 1'b1;
+  end
+
+% for clock in xbar.clocks:
+  logic rst_${clock}_n;
+  initial begin
+    rst_${clock}_n = 1'b0;
+
+    wait(rst_n == 1'b1);
+    @(negedge clk_${clock});
+    rst_${clock}_n = 1'b1;
+  end
+
+% endfor
+
+  // Signals
+% for node in xbar.hosts + xbar.devices:
+  tl_h2d_t tl_${node.name}_h2d ;
+  tl_d2h_t tl_${node.name}_d2h ;
+% endfor
+
+  // Instance of xbar_${xbar.name}
+  xbar_${xbar.name} dut (
+% for clock in xbar.clocks:
+    .clk_${clock}_i   (clk_${clock}),
+    .rst_${clock}_ni  (rst_${clock}_n),
+% endfor
+
+    // Host interfaces
+% for node in xbar.hosts:
+    .tl_${node.name}_i  (tl_${node.name}_h2d),
+    .tl_${node.name}_o  (tl_${node.name}_d2h),
+% endfor
+
+    // Device interfaces
+% for node in xbar.devices:
+    .tl_${node.name}_o  (tl_${node.name}_h2d),
+    .tl_${node.name}_i  (tl_${node.name}_d2h),
+% endfor
+
+    .scanmode_i (1'b0)
+
+  );
+
+  task automatic tl_write(ref clk, ref tl_h2d_t tl_h2d, ref tl_d2h_t tl_d2h,
+    input [31:0] addr, input [31:0] wdata);
+    tl_h2d.a_address = addr;
+    tl_h2d.a_opcode = PutFullData;
+    tl_h2d.a_param = '0;
+    tl_h2d.a_size = 2'h2;
+    tl_h2d.a_user = '0;
+    tl_h2d.a_data = wdata;
+    tl_h2d.a_mask = 'hF;
+    tl_h2d.a_source = 0;
+    tl_h2d.a_valid = 1'b1;
+    @(posedge clk iff tl_d2h.a_ready == 1'b1);
+    tl_h2d.a_valid = 1'b0;
+    tl_h2d.d_ready = 1'b1;
+    @(posedge clk iff tl_d2h.d_valid == 1'b1);
+    if (tl_d2h.d_error == 1'b1) $error("TL-UL interface error occurred");
+    tl_h2d.d_ready = 1'b0;
+  endtask : tl_write
+
+  task automatic tl_read(ref clk, ref tl_h2d_t tl_h2d, ref tl_d2h_t tl_d2h,
+    input [31:0] addr, output logic [31:0] rdata);
+    tl_h2d.a_address = addr;
+    tl_h2d.a_opcode = Get;
+    tl_h2d.a_param = '0;
+    tl_h2d.a_size = 2'h2;
+    tl_h2d.a_user = '0;
+    tl_h2d.a_source = 0;
+    tl_h2d.a_valid = 1'b1;
+    @(posedge clk iff tl_d2h.a_ready == 1'b1);
+    tl_h2d.a_valid = 1'b0;
+    tl_h2d.d_ready = 1'b1;
+    @(posedge clk iff tl_d2h.d_valid == 1'b1);
+    if (tl_d2h.d_error == 1'b1) $error("TL-UL interface error occurred");
+    rdata = tl_d2h.d_data;
+    tl_h2d.d_ready = 1'b0;
+  endtask : tl_read
+
+  task automatic tl_compare(ref clk, ref tl_h2d_t tl_h2d, ref tl_d2h_t tl_d2h,
+    input [31:0] addr, input [31:0] wdata);
+    automatic logic [31:0] rdata;
+    tl_write(clk, tl_h2d, tl_d2h, addr, wdata);
+    tl_read(clk, tl_h2d, tl_d2h, addr, rdata);
+    if (wdata != rdata) $error("Addr(%x) mismatch: Exp(%x), Got(%x)", addr, wdata, rdata);
+  endtask : tl_compare
+
+  // Transaction generator
+  //
+  // Goal: Each host creates random sequence
+  //  1. select random device
+  //  2. select random burst (not implemented)
+  //  3. select random address range within the device
+  //  4. Write and read then compare
+  //  Note: There's chance that another host updates content at the same address location when a host
+  //        reads. This is unavoidable but the change is unlikely. But remind that it is possible.
+  typedef struct {
+    logic [31:0] addr_from;
+    logic [31:0] addr_to;
+  } addr_range_t;
+% for host in xbar.hosts:
+<%
+  clkname = "clk_" + host.clocks[0]
+  rstname = "rst_" + host.clocks[0] + "_n"
+  num_dev = len(xbar.get_s1n_if_exist(host).ds)
+
+  addrs = list(map(xbar.get_addr, xbar.get_devices_from_host(host)))
+%>\
+  addr_range_t ${host.name}_map [${num_dev}] = '{
+% for addr in addrs:
+% if loop.last:
+    '{addr_from: 32'h${"%x"%(addr[0])}, addr_to: 32'h${"%x" %(addr[1])}}
+% else:
+    '{addr_from: 32'h${"%x"%(addr[0])}, addr_to: 32'h${"%x" %(addr[1])}},
+% endif
+% endfor
+  };
+  initial begin
+    // Wait until reset is released
+    tl_${host.name}_h2d.a_valid = 1'b0;
+    tl_${host.name}_h2d.d_ready = 1'b0;
+    wait(${rstname} == 1'b1);
+    @(negedge ${clkname});
+    forever begin
+      // choose among the device
+      automatic int dev_sel = $urandom_range(${num_dev-1},0);
+
+      // determine address
+      automatic logic [31:0] addr = $urandom_range(${host.name}_map[dev_sel].addr_to,
+                                                   ${host.name}_map[dev_sel].addr_from);
+      addr = addr & 32'h FFFF_FFFC;
+
+      // compare
+      tl_compare(${clkname}, tl_${host.name}_h2d, tl_${host.name}_d2h, addr, $urandom());
+    end
+  end
+% endfor
+
+  // Instantiate generic TL-UL sram
+% for device in xbar.devices:
+<%
+  tl_h2d_sig = "tl_" + device.name + "_h2d"
+  tl_d2h_sig = "tl_" + device.name + "_d2h"
+%>
+  device_sram u_device_${device.name} (
+    .clk_i      (clk_${device.clocks[0]}),
+    .tl_i       (${tl_h2d_sig}),
+    .tl_o       (${tl_d2h_sig})
+  );
+% endfor
+
+  initial begin
+    #100us
+    $finish(1);
+  end
+endmodule
+
+
diff --git a/util/tlgen/xbar.pkg.tpl.sv b/util/tlgen/xbar.pkg.tpl.sv
new file mode 100644
index 0000000..a44d48f
--- /dev/null
+++ b/util/tlgen/xbar.pkg.tpl.sv
@@ -0,0 +1,56 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// tl_${xbar.name} package generated by `tlgen.py` tool
+
+<%
+  name_len = max([len(x.name) for x in xbar.devices])
+%>\
+package tl_${xbar.name}_pkg;
+
+% for device in xbar.devices:
+  ## Address
+  localparam logic [31:0] ADDR_SPACE_${device.name.upper().ljust(name_len)} = 32'h ${"%08x" % device.address_from};
+% endfor
+
+% for device in xbar.devices:
+  ## Mask
+  localparam logic [31:0] ADDR_MASK_${device.name.upper().ljust(name_len)} = 32'h ${"%08x" % (device.address_to -
+  device.address_from)};
+% endfor
+
+  localparam int N_HOST   = ${len(xbar.hosts)};
+  localparam int N_DEVICE = ${len(xbar.devices)};
+
+  typedef enum int {
+% for device in xbar.devices:
+  ## Create enum type for hosts( or blocks) connecting to the device
+  ## Device Node has one upstream port. So tl_device_h2d can be directly used
+<%
+  u_name = ''.join(device.name.title().split('_'));
+%>\
+  % if loop.last:
+    Tl${u_name} = ${loop.index}
+  % else:
+    Tl${u_name} = ${loop.index},
+  % endif
+% endfor
+  } tl_device_e;
+
+  typedef enum int {
+% for host in xbar.hosts:
+  ## Create enum type for downstream connecting to each host
+  ## Host Node has one downstream port. so tl_host_h2d can be directly used
+<%
+  u_name = ''.join(host.name.title().split('_'));
+%>\
+  % if loop.last:
+    Tl${u_name} = ${loop.index}
+  % else:
+    Tl${u_name} = ${loop.index},
+  % endif
+% endfor
+  } tl_host_e;
+
+endpackage
diff --git a/util/tlgen/xbar.py b/util/tlgen/xbar.py
new file mode 100644
index 0000000..04dc39f
--- /dev/null
+++ b/util/tlgen/xbar.py
@@ -0,0 +1,233 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import logging as log
+from typing import List
+
+from .item import Edge, Node, NodeType
+
+#Nodes = List[Node]
+#Edges = List[Edge]
+#Clocks = List[str]
+
+
+class Xbar:
+    """Xbar contains configurations to generate TL-UL crossbar.
+    """
+    nodes = []  # Nodes
+    edges = []  # Edges
+    clock = ""  # str  # Main clock remove 'clk_' prefix
+    name = ""  # str  # e.g. "main" --> main_xbar
+    clocks = []  # Clocks
+    # prefix is useful if SoC has more than one Xbar
+
+    # variables after elaboration. Shouldn't be touched by outside
+    blocks = []  # Nodes  # Internal blocks
+
+    def __init__(self):
+        self.blocks = []
+        self.nodes = []
+        self.edges = []
+        self.clocks = []
+
+    def __repr__(self):
+        out = "<Xbar(%s) #nodes:%d clock:%s" % (self.name, len(self.nodes),
+                                                self.clock)
+        out += " #edges:%d>\n" % (len(self.edges))
+
+        # print nodes
+        out += "  Nodes:\n"
+        for node in self.nodes:
+            out += "    - " + node.name + "\n"
+
+        out += "  Edges:\n"
+        for edge in self.edges:
+            out += "    - " + edge.us.name + " => " + edge.ds.name + "\n"
+        # print edges
+        return out
+
+    def get_edges_from_node(self, node):  # Node -> Edges
+        return [
+            edge for edge in self.edges
+            if node.name in (edge.us.name, edge.ns.name)
+        ]
+
+    def get_node(self, node):  # str -> Node
+        result = [x for x in self.nodes if x.name == node]
+        if len(result) != 1:
+            raise  # Exception
+
+        return result[0]
+
+    @property
+    def hosts(self):
+        return [x for x in self.nodes if x.node_type == NodeType.HOST]
+
+    @property
+    def devices(self):
+        return [x for x in self.nodes if x.node_type == NodeType.DEVICE]
+
+    @property
+    def socket_1ns(self):
+        return [x for x in self.nodes if x.node_type == NodeType.SOCKET_1N]
+
+    def get_downstream_device(self, node):  # Node -> Node
+        if (node.node_type == NodeType.DEVICE):
+            return node
+
+        if len(node.ds) == 0:
+            log.error("Node (%s) doesn't have downstream Node: US(%s), DS(%s)"
+                      % (node.name, ' '.join(map(repr, node.us)), ' '.join(
+                          map(repr, node.ds))))
+        return self.get_downstream_device(node.ds[0].ds)
+
+    def get_downstream_device_from_edge(self, edge):  # Edge -> Node
+        return self.get_downstream_device(edge.ds)
+
+    def get_leaf_from_s1n(self, node, idx):  # Node -> int -> Node
+        """ get end-device node from Socket_1n's Downstream port
+
+        Current implementation can't have multiple devices under the tree of
+        one downstream port in Socket_1N
+        """
+        return self.get_downstream_device(node.ds[idx].ds)
+
+    def get_s1n_if_exist(self, node):  # Node -> Node
+        """ return SOCKET_1N if exists down from the node, if not return itself
+        """
+        if node.node_type == NodeType.DEVICE:
+            log.error("get_s1n_if_exist hits DEVICE type (unexpected)")
+            return node
+        if node.node_type == NodeType.SOCKET_1N:
+            return node
+        return self.get_s1n_if_exist(node.ds[0].ds)
+
+    def get_leaf_from_node(self, node, idx):  # Node -> int -> Node
+        """ get end device node from any node, idx is given to look down.
+        """
+        num_dev = len(self.get_s1n_if_exist(node).ds)
+        if idx >= num_dev:
+            log.error(
+                "given index is greater than number of devices under the node")
+
+        return self.get_leaf_from_s1n(self.get_s1n_if_exist(node), idx)
+
+    def get_devices_from_host(self, host):  # Node -> Nodes
+        devices = list(
+            map(self.get_downstream_device_from_edge,
+                self.get_s1n_if_exist(host).ds))
+
+        return devices
+
+    def get_addr(self, device):  # Node -> Tuple[int,int]
+        if device.node_type != NodeType.DEVICE:
+            log.error("get_addr receives non DEVICE type node")
+
+        return (device.address_from, device.address_to)
+
+    def connect_nodes(self, u_node, d_node):  # str -> str -> bool
+        # Create edges between Nodes
+        # Return false if Nodes aren't exist or same connection exists
+        upNode = self.get_node(u_node)
+        dnNode = self.get_node(d_node)
+
+        edge = Edge(upNode, dnNode)
+
+        if any([
+                e.us.name == edge.us.name and e.ds.name == edge.ds.name
+                for e in self.edges
+        ]):
+            return False
+
+        self.edges.append(edge)
+
+        upNode.ds.append(edge)
+        dnNode.us.append(edge)
+
+        return True
+
+    def insert_node(self, new_node, node):
+        if new_node.node_type == NodeType.ASYNC_FIFO:
+            if node.node_type == NodeType.HOST:
+                # Insert node to downstream
+                edge = Edge(node, new_node)
+                new_node.ds = node.ds
+                node.ds = [edge]
+                new_node.us = [edge]
+                self.nodes.append(new_node)
+                self.edges.append(edge)
+                for e in new_node.ds:
+                    # replace us to new_node
+                    e.us = new_node
+            elif node.node_type == NodeType.DEVICE:
+                # insert node to upstream
+                edge = Edge(new_node, node)
+                new_node.us = node.us
+                new_node.ds = node
+                node.us = [edge]
+                new_node.ds = [edge]
+                self.nodes.append(new_node)
+                self.edges.append(edge)
+                for e in new_node.us:
+                    # replace us to new_node
+                    e.ds = new_node
+            else:
+                raise
+        elif new_node.node_type == NodeType.SOCKET_M1:
+            # Revise every upstream
+            edge = Edge(new_node, node)
+            new_node.us = node.us
+            node.us = [edge]
+            new_node.ds = [edge]
+            self.nodes.append(new_node)
+            self.edges.append(edge)
+            for e in new_node.us:
+                e.ds = new_node
+
+        elif new_node.node_type == NodeType.SOCKET_1N:
+            # Revise every downstream
+            edge = Edge(node, new_node)
+            new_node.ds = node.ds
+            node.ds = [edge]
+            new_node.us = [edge]
+            # TODO: add new_node.us logic
+            self.nodes.append(new_node)
+            self.edges.append(edge)
+            for e in new_node.ds:
+                e.us = new_node
+        else:
+            # Caller passes HOST or DEVICE as a new node. Error!
+            log.error(
+                "Xbar.insert_node is called with HOST or DEVICE: %s. Ignored" %
+                (new_node.name))
+
+        return self
+
+    def repr_tree(self, node, indent):
+        """string format of tree connection from node to devices
+
+        Desired output:
+        host_a
+          -> asf_nn
+            -> s1n_nn
+              -> sm1_mm
+                -> device_c
+              -> sm1_nn
+                -> device_b
+
+        """
+        out = "// "
+        if indent != 0:
+            # not First
+            out += ' ' * indent + '-> '
+
+        out += node.name
+
+        if node.node_type != NodeType.DEVICE:
+            # still more nodes exist under this node
+            for ds in node.ds:
+                out += '\n'
+                out += self.repr_tree(ds.ds, indent + 2)
+
+        return out
diff --git a/util/tlgen/xbar.rtl.tpl.sv b/util/tlgen/xbar.rtl.tpl.sv
new file mode 100644
index 0000000..0debad5
--- /dev/null
+++ b/util/tlgen/xbar.rtl.tpl.sv
@@ -0,0 +1,234 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// xbar_${xbar.name} module generated by `tlgen.py` tool
+// all reset signals should be generated from one reset signal to not make any deadlock
+//
+// Interconnect
+% for host in xbar.hosts:
+${xbar.repr_tree(host, 0)}
+% endfor
+
+module xbar_${xbar.name} (
+% for clock in xbar.clocks:
+  input clk_${clock}_i,
+  input rst_${clock}_ni,
+% endfor
+
+  // Host interfaces
+% for node in xbar.hosts:
+  input  tlul_pkg::tl_h2d_t tl_${node.name}_i,
+  output tlul_pkg::tl_d2h_t tl_${node.name}_o,
+% endfor
+
+  // Device interfaces
+% for node in xbar.devices:
+  output tlul_pkg::tl_h2d_t tl_${node.name}_o,
+  input  tlul_pkg::tl_d2h_t tl_${node.name}_i,
+% endfor
+
+  input scanmode_i
+);
+
+  import tlul_pkg::*;
+  import tl_${xbar.name}_pkg::*;
+
+% for block in xbar.nodes:
+  ## Create enum type for Upstream and Downstream ports connection
+  % if block.node_type.name   == "ASYNC_FIFO":
+    ## One US, one DS
+  tl_h2d_t tl_${block.name}_us_h2d ;
+  tl_d2h_t tl_${block.name}_us_d2h ;
+  tl_h2d_t tl_${block.name}_ds_h2d ;
+  tl_d2h_t tl_${block.name}_ds_d2h ;
+
+  % elif block.node_type.name == "SOCKET_1N":
+    ## One US, multiple DS
+  tl_h2d_t tl_${block.name}_us_h2d ;
+  tl_d2h_t tl_${block.name}_us_d2h ;
+
+  ##typedef enum int {
+  ##  % for port in block.ds:
+  ##    % if loop.last:
+  ##    % else:
+  ##    % endif
+  ##  % endfor
+  ##} socket_${block.name}_ds_e;
+
+  tl_h2d_t tl_${block.name}_ds_h2d [${len(block.ds)}];
+  tl_d2h_t tl_${block.name}_ds_d2h [${len(block.ds)}];
+
+  // Create steering signal
+  logic [${len(block.ds).bit_length()-1}:0] dev_sel_${block.name};
+
+  % elif block.node_type.name == "SOCKET_M1":
+    ## Multiple US, one DS
+    ## typedef enum int {
+    ##   % for port in block.us:
+    ##     % if loop.last:
+    ##     % else:
+    ##     % endif
+    ##   % endfor
+    ## } socket_${block.name}_us_e;
+
+  tl_h2d_t tl_${block.name}_us_h2d [${len(block.us)}];
+  tl_d2h_t tl_${block.name}_us_d2h [${len(block.us)}];
+
+  tl_h2d_t tl_${block.name}_ds_h2d ;
+  tl_d2h_t tl_${block.name}_ds_d2h ;
+
+  % else:
+    ## block is either HOST or DEVICE. Ignore
+  % endif
+% endfor
+
+% for conn in xbar.edges:
+  ## sweep each entry of edges and find each end (us, ds) then connect between
+  ## Connect upstream
+<%
+    if conn.ds.node_type.name == "ASYNC_FIFO":
+      ds_h2d_name = 'tl_' + conn.ds.name + '_us_h2d'
+      ds_d2h_name = 'tl_' + conn.ds.name + '_us_d2h'
+      ds_index = -1
+    elif conn.ds.node_type.name == "SOCKET_1N":
+      ds_h2d_name = 'tl_' + conn.ds.name + '_us_h2d'
+      ds_d2h_name = 'tl_' + conn.ds.name + '_us_d2h'
+      ds_index = -1
+    elif conn.ds.node_type.name == "SOCKET_M1":
+      ds_h2d_name = 'tl_' + conn.ds.name + '_us_h2d'
+      ds_d2h_name = 'tl_' + conn.ds.name + '_us_d2h'
+      ds_index = conn.ds.us.index(conn)
+    elif conn.ds.node_type.name == "DEVICE":
+      ds_h2d_name = 'tl_' + conn.ds.name + '_o'
+      ds_d2h_name = 'tl_' + conn.ds.name + '_i'
+      ds_index = -1
+
+    if conn.us.node_type.name == "ASYNC_FIFO":
+      us_h2d_name = 'tl_' + conn.us.name + '_ds_h2d'
+      us_d2h_name = 'tl_' + conn.us.name + '_ds_d2h'
+      us_index = -1
+    elif conn.us.node_type.name == "SOCKET_1N":
+      us_h2d_name = 'tl_' + conn.us.name + '_ds_h2d'
+      us_d2h_name = 'tl_' + conn.us.name + '_ds_d2h'
+      us_index = conn.us.ds.index(conn)
+    elif conn.us.node_type.name == "SOCKET_M1":
+      us_h2d_name = 'tl_' + conn.us.name + '_ds_h2d'
+      us_d2h_name = 'tl_' + conn.us.name + '_ds_d2h'
+      us_index = -1
+    elif conn.us.node_type.name == "HOST":
+      us_h2d_name = 'tl_' + conn.us.name + '_i'
+      us_d2h_name = 'tl_' + conn.us.name + '_o'
+      us_index = -1
+%>\
+
+% if us_index == -1 and ds_index == -1:
+  assign ${ds_h2d_name} = ${us_h2d_name};
+  assign ${us_d2h_name} = ${ds_d2h_name};
+% elif us_index == -1 and ds_index != -1:
+  assign ${ds_h2d_name}[${ds_index}] = ${us_h2d_name};
+  assign ${us_d2h_name} = ${ds_d2h_name}[${ds_index}];
+% elif us_index != -1 and ds_index == -1:
+  assign ${ds_h2d_name} = ${us_h2d_name}[${us_index}];
+  assign ${us_d2h_name}[${us_index}] = ${ds_d2h_name};
+% else:
+  assign ${ds_h2d_name}[${ds_index}] = ${us_h2d_name}[${us_index}];
+  assign ${us_d2h_name}[${us_index}] = ${ds_d2h_name}[${ds_index}];
+% endif
+% endfor
+
+% for block in xbar.socket_1ns:
+<%
+  addr_sig = "tl_" + block.name + "_us_h2d.a_address"
+  sel_len = len(block.ds).bit_length()
+%>\
+  always_comb begin
+    // default steering to generate error response if address is not within the range
+    dev_sel_${block.name} = ${"%d'd%d" % (sel_len, len(block.ds))};
+% for i in block.ds:
+<%
+  leaf = xbar.get_leaf_from_s1n(block, loop.index);
+  name_space = "ADDR_SPACE_" + leaf.name.upper();
+  name_mask  = "ADDR_MASK_" + leaf.name.upper();
+%>\
+  % if loop.first:
+    if ((${addr_sig} & ~(${name_mask})) == ${name_space}) begin
+  % else:
+    end else if ((${addr_sig} & ~(${name_mask})) == ${name_space}) begin
+  % endif
+      dev_sel_${block.name} = ${"%d'd%d" % (sel_len, loop.index)};
+  % if loop.last:
+    end
+  % endif
+% endfor
+  end
+
+% endfor
+
+  // Instantiation phase
+% for block in xbar.nodes:
+  % if block.node_type.name   == "ASYNC_FIFO":
+  tlul_fifo_async #(
+    .ReqDepth        (3),// At least 3 to make async work
+    .RspDepth        (3) // At least 3 to make async work
+  ) u_${block.name} (
+    .clk_h_i      (clk_${block.clocks[0]}_i),
+    .rst_h_ni     (rst_${block.clocks[0]}_ni),
+    .clk_d_i      (clk_${block.clocks[1]}_i),
+    .rst_d_ni     (rst_${block.clocks[1]}_ni),
+    .tl_h_i       (tl_${block.name}_us_h2d),
+    .tl_h_o       (tl_${block.name}_us_d2h),
+    .tl_d_o       (tl_${block.name}_ds_h2d),
+    .tl_d_i       (tl_${block.name}_ds_d2h)
+  );
+  % elif block.node_type.name == "SOCKET_1N":
+  tlul_socket_1n #(
+    % if block.hpass != 1:
+    .HReqPass (1'b${block.hpass}),
+    .HRspPass (1'b${block.hpass}),
+    % endif
+    % if block.dpass != 2**(len(block.ds)) -1:
+    .DReqPass (${len(block.ds)}'h ${"%x" % block.dpass}),
+    .DRspPass (${len(block.ds)}'h ${"%x" % block.dpass}),
+    % endif
+    ## //.HReqDepth(),
+    ## //.HRspDepth(),
+    ## //.DReqDepth(),
+    ## //.DRspDepth(),
+    .N        (${len(block.ds)})
+  ) u_${block.name} (
+    .clk_i        (clk_${xbar.clock}_i),
+    .rst_ni       (rst_${xbar.clock}_ni),
+    .tl_h_i       (tl_${block.name}_us_h2d),
+    .tl_h_o       (tl_${block.name}_us_d2h),
+    .tl_d_o       (tl_${block.name}_ds_h2d),
+    .tl_d_i       (tl_${block.name}_ds_d2h),
+    .dev_select   (dev_sel_${block.name})
+  );
+  % elif block.node_type.name == "SOCKET_M1":
+  tlul_socket_m1 #(
+    % if block.hpass != 2**(len(block.us)) -1:
+    .HReqPass     (${len(block.us)}'h ${"%x" % block.hpass}),
+    .HRspPass     (${len(block.us)}'h ${"%x" % block.hpass}),
+    % endif
+    ## //.HReqDepth    (),
+    ## //.HRspDepth    (),
+    % if block.dpass != 1:
+    .DReqPass     (1'b${block.dpass}),
+    .DRspPass     (1'b${block.dpass}),
+    % endif
+    ## //.DReqDepth    (),
+    ## //.DRspDepth    (),
+    .M            (${len(block.us)})
+  ) u_${block.name} (
+    .clk_i        (clk_${xbar.clock}_i),
+    .rst_ni       (rst_${xbar.clock}_ni),
+    .tl_h_i       (tl_${block.name}_us_h2d),
+    .tl_h_o       (tl_${block.name}_us_d2h),
+    .tl_d_o       (tl_${block.name}_ds_h2d),
+    .tl_d_i       (tl_${block.name}_ds_d2h)
+  );
+  % endif
+% endfor
+
+endmodule
diff --git a/util/topgen.py b/util/topgen.py
new file mode 100755
index 0000000..434fc0c
--- /dev/null
+++ b/util/topgen.py
@@ -0,0 +1,369 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Top Module Generator
+"""
+import argparse
+import logging as log
+import sys
+from io import StringIO
+from pathlib import Path
+
+import hjson
+from mako.template import Template
+
+import tlgen
+from reggen import gen_rtl, gen_dv, validate
+from topgen import get_hjsonobj_xbars, merge_top, search_ips, validate_top
+
+# Filter from IP list but adding generated hjson
+filter_list = ['rv_plic', 'alert_h']
+
+# Common header for generated files
+genhdr = '''// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+// ------------------- W A R N I N G: A U T O - G E N E R A T E D   C O D E !! -------------------//
+// PLEASE DO NOT HAND-EDIT THIS FILE. IT HAS BEEN AUTO-GENERATED WITH THE FOLLOWING COMMAND:
+'''
+
+def generate_rtl(top, tpl_filename):
+    top_rtl_tpl = Template(filename=tpl_filename)
+
+    out_rtl = top_rtl_tpl.render(top=top)
+    return out_rtl
+
+
+def generate_xbars(top, out_path):
+    for obj in top["xbar"]:
+        xbar = tlgen.validate(obj)
+
+        if not tlgen.elaborate(xbar):
+            log.error("Elaboration failed." + repr(xbar))
+
+        # Add clocks to the top configuration
+        obj["clocks"] = xbar.clocks
+        out_rtl, out_pkg, out_dv = tlgen.generate(xbar)
+
+        rtl_path = out_path / 'rtl'
+        rtl_path.mkdir(parents=True, exist_ok=True)
+        dv_path = out_path / 'dv'
+        dv_path.mkdir(parents=True, exist_ok=True)
+
+        rtl_filename = "xbar_%s.sv" % (xbar.name)
+        rtl_filepath = rtl_path / rtl_filename
+        with rtl_filepath.open(mode='w', encoding='UTF-8') as fout:
+            fout.write(out_rtl)
+
+        pkg_filename = "tl_%s_pkg.sv" % (xbar.name)
+        pkg_filepath = rtl_path / pkg_filename
+        with pkg_filepath.open(mode='w', encoding='UTF-8') as fout:
+            fout.write(out_pkg)
+
+        dv_filename = "xbar_%s_tb.sv" % (xbar.name)
+        dv_filepath = dv_path / dv_filename
+        with dv_filepath.open(mode='w', encoding='UTF-8') as fout:
+            fout.write(out_dv)
+
+
+def generate_plic(top, out_path):
+    # Count number of interrupts
+    src = sum([x["width"] if "width" in x else 1 for x in top["interrupt"]])
+
+    # Target and priority: Currently fixed
+    target = int(top["num_cores"], 0) if "num_cores" in top else 1
+    prio = 3
+
+    # Define target path
+    #   rtl: rv_plic.sv & rv_plic_reg_pkg.sv & rv_plic_reg_top.sv
+    #   doc: rv_plic.hjson
+    rtl_path = out_path / 'rtl'
+    rtl_path.mkdir(parents=True, exist_ok=True)
+    doc_path = out_path / 'doc'
+    doc_path.mkdir(parents=True, exist_ok=True)
+
+    # Generating IP top module script is not generalized yet.
+    # So, topgen reads template files from rv_plic directory directly.
+    # Next, if the ip top gen tool is placed in util/ we can import the library.
+    tpl_path = out_path / '../ip/rv_plic/doc'
+    hjson_tpl_path = tpl_path / 'rv_plic.tpl.hjson'
+    rtl_tpl_path = tpl_path / 'rv_plic.tpl.sv'
+
+    # Generate Register Package and RTLs
+    out = StringIO()
+    with hjson_tpl_path.open(mode='r', encoding='UTF-8') as fin:
+        hjson_tpl = Template(fin.read())
+        out = hjson_tpl.render(src=src, target=target, prio=prio)
+        log.info("RV_PLIC hjson: %s" % out)
+
+    if out == "":
+        log.error("Cannot generate interrupt controller config file")
+        return
+
+    hjson_gen_path = doc_path / "rv_plic.hjson"
+    gencmd = ("// util/topgen.py -t hw/top_earlgrey/doc/top_earlgrey.hjson --plic-only "
+              "-o hw/top_earlgrey/\n\n")
+    with hjson_gen_path.open(mode='w', encoding='UTF-8') as fout:
+        fout.write(genhdr + gencmd + out)
+
+    # Generate register RTLs (currently using shell execute)
+    # TODO: More secure way to gneerate RTL
+    hjson_obj = hjson.loads(out,
+                            use_decimal=True,
+                            object_pairs_hook=validate.checking_dict)
+    validate.validate(hjson_obj)
+    gen_rtl.gen_rtl(hjson_obj, str(rtl_path))
+
+    # Generate RV_PLIC Top Module
+    with rtl_tpl_path.open(mode='r', encoding='UTF-8') as fin:
+        rtl_tpl = Template(fin.read())
+        out = rtl_tpl.render(src=src, target=target, prio=prio)
+        log.info("RV_PLIC RTL: %s" % out)
+
+    if out == "":
+        log.error("Cannot generate interrupt controller RTL")
+        return
+
+    rtl_gen_path = rtl_path / "rv_plic.sv"
+    with rtl_gen_path.open(mode='w', encoding='UTF-8') as fout:
+        fout.write(genhdr + gencmd + out)
+
+
+def generate_top_ral(top, ip_objs, out_path):
+    # construct top ral block
+    top_block = gen_rtl.Block()
+    top_block.name = "chip"
+    top_block.base_addr = 0
+    top_block.width = int(top["datawidth"])
+
+    # add blocks
+    for ip_obj in ip_objs:
+        top_block.blocks.append(gen_rtl.json_to_reg(ip_obj))
+
+    # add memories
+    if "memory" in top.keys():
+        for item in list(top["memory"]):
+            mem = gen_rtl.Window()
+            mem.name = item["name"]
+            mem.base_addr = int(item["base_addr"], 0)
+            mem.limit_addr = int(item["base_addr"], 0) + int(item["size"], 0)
+            # TODO: need to add mem access info for memories in topcfg
+            mem.dvrights = "RW"
+            mem.n_bits = top_block.width
+            top_block.wins.append(mem)
+
+    # get sub-block base addresses from top cfg
+    for block in top_block.blocks:
+        for module in top["module"]:
+            if block.name == module["name"]:
+                block.base_addr = module["base_addr"]
+                break
+    # generate the top ral model with template
+    gen_dv.gen_ral(top_block, str(out_path))
+
+
+def main():
+    parser = argparse.ArgumentParser(prog="topgen")
+    parser.add_argument(
+        '--topcfg',
+        '-t',
+        required=True,
+        help="`top_{name}.hjson` file.")
+    parser.add_argument('--tpl', '-c', help="`top_{name}.tpl.sv` file.")
+    parser.add_argument(
+        '--outdir',
+        '-o',
+        help='''Target TOP directory.
+             Module is created under rtl/. (default: dir(topcfg)/..)
+             ''') # yapf: disable
+    parser.add_argument('--verbose', '-v', action='store_true', help="Verbose")
+
+    # Generator options: 'no' series. cannot combined with 'only' series
+    parser.add_argument(
+        '--no-top',
+        action='store_true',
+        help="If defined, topgen doesn't generate top_{name} RTLs.")
+    parser.add_argument(
+        '--no-xbar',
+        action='store_true',
+        help="If defined, topgen doesn't generate crossbar RTLs.")
+    parser.add_argument(
+        '--no-plic',
+        action='store_true',
+        help="If defined, topgen doesn't generate the interrup controller RTLs."
+    )
+    parser.add_argument(
+        '--no-gen-hjson',
+        action='store_true',
+        help='''If defined, the tool assumes topcfg as a generated hjson.
+             So it bypasses the validation step and doesn't read ip and
+             xbar configurations
+             ''')
+
+    # Generator options: 'only' series. cannot combined with 'no' series
+    parser.add_argument(
+        '--top-only',
+        action='store_true',
+        help="If defined, the tool generates top RTL only") # yapf:disable
+    parser.add_argument(
+        '--xbar-only',
+        action='store_true',
+        help="If defined, the tool generates crossbar RTLs only")
+    parser.add_argument(
+        '--plic-only',
+        action='store_true',
+        help="If defined, the tool generates RV_PLIC RTL and hjson only")
+    parser.add_argument(
+        '--hjson-only',
+        action='store_true',
+        help="If defined, the tool generates complete hjson only")
+    # Generator options: generate dv ral model
+    parser.add_argument(
+        '--top_ral',
+        '-r',
+        default=False,
+        action='store_true',
+        help="If set, the tool generates top level RAL model for DV")
+
+    args = parser.parse_args()
+
+    # check combinations
+    if args.top_ral:
+        args.hjson_only = True
+        args.no_top = True
+
+    if args.hjson_only:
+        args.no_gen_hjson = False
+
+    if (args.no_top or args.no_xbar or
+            args.no_plic) and (args.top_only or args.xbar_only or
+                               args.plic_only):
+        log.error(
+            "'no' series options cannot be used with 'only' series options")
+        raise SystemExit(sys.exc_info()[1])
+
+    if not args.hjson_only and not args.tpl:
+        log.error(
+            "Template file can be omitted only if '--hjson-only' is true")
+        raise SystemExit(sys.exc_info()[1])
+
+    if args.verbose:
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    if not args.outdir:
+        outdir = Path(args.topcfg).parent / ".."
+        log.info("TOP directory not given. Use %s", (outdir))
+    elif not Path(args.outdir).is_dir():
+        log.error("'--outdir' should point to writable directory")
+        raise SystemExit(sys.exc_info()[1])
+    else:
+        outdir = Path(args.outdir)
+
+    out_path = Path(outdir)
+
+    if not args.no_gen_hjson or args.hjson_only:
+        # load top configuration
+        try:
+            with open(args.topcfg, 'r') as ftop:
+                topcfg = hjson.load(ftop, use_decimal=True)
+        except ValueError:
+            raise SystemExit(sys.exc_info()[1])
+
+        # Sweep the IP directory and gather the config files
+        ip_dir = Path(__file__).parents[1] / 'hw/ip'
+        ips = search_ips(ip_dir)
+
+        # exclude rv_plic (to use top_earlgrey one) and
+        ips = [x for x in ips if not x.parents[1].name in filter_list]
+
+        # It may require two passes to check if the module is needed.
+        # TODO: first run of topgen will fail due to the absent of rv_plic.
+        # It needs to run up to amend_interrupt in merge_top function
+        # then creates rv_plic.hjson then run xbar generation.
+        hjson_dir = Path(args.topcfg).parent
+        ips.append(hjson_dir / 'rv_plic.hjson')
+
+        # load hjson and pass validate from reggen
+        try:
+            ip_objs = []
+            for x in ips:
+                # Skip if it is not in the module list
+                if not x.stem in [ip["type"] for ip in topcfg["module"]]:
+                    log.info(
+                        "Skip module %s as it isn't in the top module list" %
+                        x.stem)
+                    continue
+
+                obj = hjson.load(
+                    x.open('r'),
+                    use_decimal=True,
+                    object_pairs_hook=validate.checking_dict)
+                if validate.validate(obj) != 0:
+                    log.info("Parsing IP %s configuration failed. Skip" % x)
+                    continue
+                ip_objs.append(obj)
+
+        except ValueError:
+            raise SystemExit(sys.exc_info()[1])
+
+        # Read the crossbars under the top directory
+        xbar_objs = get_hjsonobj_xbars(hjson_dir)
+
+        log.info("Detected crossbars: %s" %
+                 (", ".join([x["name"] for x in xbar_objs])))
+
+        # TODO: Add validate
+        topcfg = validate_top(topcfg)
+
+        # TODO: Add conversion logic from top to top.complete.hjson
+        completecfg = merge_top(topcfg, ip_objs, xbar_objs)
+
+        genhjson_path = hjson_dir / ("top_%s.gen.hjson" % completecfg["name"])
+        gencmd = ("// util/topgen.py -t hw/top_earlgrey/doc/top_earlgrey.hjson --hjson-only "
+                  "-o hw/top_earlgrey/\n")
+
+        if args.top_ral:
+            generate_top_ral(completecfg, ip_objs, out_path)
+        else:
+            genhjson_path.write_text(genhdr + gencmd +
+                                     hjson.dumps(completecfg, for_json=True))
+
+    if args.hjson_only:
+        log.info("hjson is generated. Exiting...")
+        sys.exit()
+
+    if args.no_gen_hjson:
+        # load top.complete configuration
+        try:
+            with open(args.topcfg, 'r') as ftop:
+                completecfg = hjson.load(ftop, use_decimal=True)
+        except ValueError:
+            raise SystemExit(sys.exc_info()[1])
+
+    # Generate PLIC
+    if not args.no_plic or args.plic_only:
+        generate_plic(completecfg, out_path)
+
+    # Generate xbars
+    if not args.no_xbar or args.xbar_only:
+        generate_xbars(completecfg, out_path)
+
+    # TODO: Get name from hjson
+    top_name = completecfg["name"]
+
+    if not args.no_top or args.top_only:
+        rtl_path = out_path / 'rtl'
+        rtl_path.mkdir(parents=True, exist_ok=True)
+        rtl_filepath = rtl_path / ("top_%s.sv" % (top_name))
+        out_rtl = generate_rtl(completecfg, args.tpl)
+
+        with rtl_filepath.open(mode='w', encoding='UTF-8') as fout:
+            fout.write(out_rtl)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/util/topgen/__init__.py b/util/topgen/__init__.py
new file mode 100644
index 0000000..c192aab
--- /dev/null
+++ b/util/topgen/__init__.py
@@ -0,0 +1,5 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+from .merge import search_ips, merge_top, get_hjsonobj_xbars
+from .validate import validate_top
diff --git a/util/topgen/merge.py b/util/topgen/merge.py
new file mode 100644
index 0000000..e7bfb95
--- /dev/null
+++ b/util/topgen/merge.py
@@ -0,0 +1,338 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import logging as log
+from copy import deepcopy
+from pathlib import Path, PosixPath
+
+import hjson
+
+
+def is_ipcfg(ip: Path) -> bool:  # return bool
+    log.info("IP Path: %s" % repr(ip))
+    ip_name = ip.parents[1].name
+    hjson_name = ip.name
+
+    log.info("IP Name(%s) and HJSON name (%s)" % (ip_name, hjson_name))
+
+    if ip_name + ".hjson" == hjson_name or ip_name + "_reg.hjson" == hjson_name:
+        return True
+    return False
+
+
+def search_ips(ip_path):  # return list of config files
+    # list the every hjson file
+    p = ip_path.glob('*/doc/*.hjson')
+
+    # filter only ip_name/doc/ip_name{_reg|''}.hjson
+    ips = [x for x in p if is_ipcfg(x)]
+
+    log.info("Filtered-in IP files: %s" % repr(ips))
+    return ips
+
+
+def is_xbarcfg(xbar_obj):
+    if "type" in xbar_obj and xbar_obj["type"] == "xbar":
+        return True
+
+    return False
+
+
+def get_hjsonobj_xbars(xbar_path):
+    """ Search crossbars hjson files from given path.
+
+    Search every hjson in the directory and check hjson type.
+    It could be type: "top" or type: "xbar"
+    returns [(name, obj), ... ]
+    """
+    p = xbar_path.glob('*.hjson')
+    try:
+        xbar_objs = [hjson.load(x.open('r'), use_decimal=True) for x in p]
+    except ValueError:
+        raise Systemexit(sys.exc_info()[1])
+
+    xbar_objs = [x for x in xbar_objs if is_xbarcfg(x)]
+
+    return xbar_objs
+
+
+def amend_ip(top, ip):
+    """ Amend additional information into top module
+
+    Amended fields:
+        - size: register space
+        - clock: converted into ip_clock
+        - bus_device
+        - bus_host: none if doesn't exist
+        - available_input_list: empty list if doesn't exist
+        - available_output_list: empty list if doesn't exist
+        - available_inout_list: empty list if doesn't exist
+        - interrupt_list: empty list if doesn't exist
+        - (TBD) alert_list: empty list if doesn't exist
+    """
+    ip_list_in_top = [x["name"].lower() for x in top["module"]]
+    ipname = ip["name"].lower()
+    if not ipname in ip_list_in_top:
+        log.info("TOP doens't use the IP %s. Skip" % ip["name"])
+        return
+
+    # Find index of the IP
+    ip_idx = ip_list_in_top.index(ipname)
+
+    ip_module = top["module"][ip_idx]
+
+    # Size
+    if not "size" in ip_module:
+        ip_module["size"] = "0x%x" % max(ip["gensize"], 0x1000)
+    elif ip_module["size"] < ip["gensize"]:
+        log.error(
+            "given 'size' field in IP %s is smaller than the required space" %
+            ip_module["name"])
+
+    # ip_clock
+    if "clock" in ip:
+        ip_module["ip_clock"] = ip["clock"]
+    else:
+        ip_module["ip_clock"] = "main"
+
+    # bus_device
+    ip_module["bus_device"] = ip["bus_device"]
+
+    # bus_host
+    if "bus_host" in ip and ip["bus_host"] != "":
+        ip_module["bus_host"] = ip["bus_host"]
+    else:
+        ip_module["bus_host"] = "none"
+
+    # available_input_list , available_output_list, available_inout_list
+    if "available_input_list" in ip:
+        ip_module["available_input_list"] = ip["available_input_list"]
+        for i in ip_module["available_input_list"]:
+            i.pop('desc', None)
+            i["width"] = int(i["width"])
+    else:
+        ip_module["available_input_list"] = []
+    if "available_output_list" in ip:
+        ip_module["available_output_list"] = ip["available_output_list"]
+        for i in ip_module["available_output_list"]:
+            i.pop('desc', None)
+            i["width"] = int(i["width"])
+    else:
+        ip_module["available_output_list"] = []
+    if "available_inout_list" in ip:
+        ip_module["available_inout_list"] = ip["available_inout_list"]
+        for i in ip_module["available_inout_list"]:
+            i.pop('desc', None)
+            i["width"] = int(i["width"])
+    else:
+        ip_module["available_inout_list"] = []
+
+    # interrupt_list
+    if "interrupt_list" in ip:
+        ip_module["interrupt_list"] = ip["interrupt_list"]
+        for i in ip_module["interrupt_list"]:
+            i.pop('desc', None)
+            i["width"] = int(i["width"])
+    else:
+        ip_module["interrupt_list"] = []
+
+    # (TBD) alert_list
+
+
+# TODO: Replace this part to be configurable from hjson or template
+predefined_modules = {
+    "corei": "rv_core_ibex",
+    "cored": "rv_core_ibex",
+    "dm_sba": "rv_dm",
+    "debug_mem": "rv_dm"
+}
+
+
+def xbar_addhost(xbar, host):
+    # TODO: check if host is another crossbar
+    # Check and fetch host if exists in nodes
+    obj = list(filter(lambda node: node["name"] == host, xbar["nodes"]))
+    if len(obj) == 0:
+        log.warning(
+            "host %s doesn't exist in the node list. Using default values" %
+            host)
+        obj = {
+            "name": host,
+            "clock": xbar["clock"],
+            "type": "host",
+            "inst_type": "",
+            "pipeline": "false"
+        }
+        topxbar["nodes"].append(obj)
+    else:
+        obj[0]["clock"] = xbar["clock"]
+        obj[0]["inst_type"] = predefined_modules[
+            host] if host in predefined_modules else ""
+        obj[0]["pipeline"] = obj[0]["pipeline"] if "pipeline" in obj[
+            0] else "false"
+
+
+def xbar_adddevice(top, xbar, device):
+    """Add device nodes information
+
+    - clock: comes from module if exist. use main top clock for memory as of now
+    - inst_type: comes from module or memory if exist.
+    - base_addr: comes from module or memory, or assume rv_plic?
+    - size_byte: comes from module or memory
+    """
+    deviceobj = list(
+        filter(lambda node: node["name"] == device,
+               top["module"] + top["memory"]))
+    nodeobj = list(filter(lambda node: node["name"] == device, xbar["nodes"]))
+
+    xbar_list = [x["name"] for x in top["xbar"] if x["name"] != xbar["name"]]
+
+    if len(deviceobj) == 0:
+        # doesn't exist,
+        # case 1: another xbar --> check in xbar list
+        if device in xbar_list and len(nodeobj) == 0:
+            log.error(
+                "Another crossbar %s needs to be specified in the 'nodes' list"
+                % device)
+            return
+
+        # case 2: predefined_modules (debug_mem, rv_plic)
+        # TODO: Find configurable solution not from predefined but from object?
+        elif device in predefined_modules:
+            if device == "debug_mem":
+                if len(nodeobj) == 0:
+                    # Add new debug_mem
+                    xbar["nodes"].append({
+                        "name": "debug_mem",
+                        "type": "device",
+                        "clock": "main",
+                        "inst_type": predefined_modules["debug_mem"],
+                        "base_addr": top["debug_mem_base_addr"],
+                        "size_byte": "0x1000"
+                    }) # yapf: disable
+                else:
+                    # Update if exists
+                    node = nodeobj[0]
+                    node["inst_type"] = predefined_modules["debug_mem"]
+                    node["base_addr"] = top["debug_mem_base_addr"]
+                    node["size_byte"] = "0x1000"
+            else:
+                log.error("device %s shouldn't be host type" % device)
+                return
+        # case 3: not defined
+        else:
+            log.error(
+                "device %s doesn't exist in 'module', 'memory', or predefined"
+                % device)
+            return
+
+    # Search object from module or memory
+    elif len(nodeobj) == 0:
+        # found in module or memory but node object doesn't exist.
+        xbar["nodes"].append({
+            "name" : device,
+            "type" : "device",
+            "clock" : deviceobj[0]["clock"],
+            "inst_type" : deviceobj[0]["type"],
+            "base_addr" : deviceobj[0]["base_addr"],
+            "size_byte": deviceobj[0]["size"]
+        }) # yapf: disable
+
+    else:
+        # found and exist in the nodes too
+        node = nodeobj[0]
+        node["inst_type"] = deviceobj[0]["type"]
+        node["base_addr"] = deviceobj[0]["base_addr"]
+        node["size_byte"] = deviceobj[0]["size"]
+
+
+def amend_xbar(top, xbar):
+    """Amend crossbar informations to the top list
+
+    Amended fields
+    - clock: Adopt from module clock if exists
+    - inst_type: Module instance some module will be hard-coded
+                 the tool searches module list and memory list then put here
+    - base_addr: from top["module"]
+    - size: from top["module"]
+    """
+    xbar_list = [x["name"] for x in top["xbar"]]
+    if not xbar["name"] in xbar_list:
+        log.info(
+            "Xbar %s doesn't belong to the top %s. Check if the xbar doesn't need"
+            % (xbar["name"], top["name"]))
+        return
+
+    topxbar = list(
+        filter(lambda node: node["name"] == xbar["name"], top["xbar"]))[0]
+
+    topxbar["connections"] = deepcopy(xbar["connections"])
+    if "nodes" in xbar:
+        topxbar["nodes"] = deepcopy(xbar["nodes"])
+    else:
+        topxbar["nodes"] = []
+
+    # Build nodes from 'connections'
+    device_nodes = set()
+    for host, devices in xbar["connections"].items():
+        # add host first
+        xbar_addhost(topxbar, host)
+
+        # add device if doesn't exist
+        device_nodes.update(devices)
+
+    log.info(device_nodes)
+    for device in device_nodes:
+        xbar_adddevice(top, topxbar, device)
+
+
+def prefix_module(module, interrupt_list):
+    result = []
+    for i in interrupt_list:
+        result.append({
+            "name": module.lower() + "_" + i["name"],
+            "width": i["width"]
+        })
+
+    return result
+
+
+def amend_interrupt(top):
+    """Check interrupt_module if exists, or just use all modules
+    """
+    if not "interrupt_module" in top:
+        top["interrupt_module"] = [x["name"] for x in top["module"]]
+
+    if not "interrupt" in top or top["interrupt"] == "":
+        top["interrupt"] = []
+
+    for m in top["interrupt_module"]:
+        ip = list(filter(lambda module: module["name"] == m, top["module"]))
+        if len(ip) == 0:
+            log.warning(
+                "Cannot find IP %s which is used in the interrupt_module" % m)
+            continue
+
+        log.info("Adding interrupts from module %s" % ip[0]["name"])
+        top["interrupt"] += prefix_module(m, ip[0]["interrupt_list"])
+
+
+def merge_top(topcfg, ipobjs, xbarobjs):
+    gencfg = deepcopy(topcfg)
+
+    # Combine ip cfg into topcfg
+    for ip in ipobjs:
+        amend_ip(gencfg, ip)
+
+    # Combine the interrupt (should be processed prior to xbar)
+    amend_interrupt(gencfg)
+
+    # Combine xbar into topcfg
+    for xbar in xbarobjs:
+        amend_xbar(gencfg, xbar)
+
+    # remove unwanted fields 'debug_mem_base_addr'
+    gencfg.pop('debug_mem_base_addr', None)
+
+    return gencfg
diff --git a/util/topgen/validate.py b/util/topgen/validate.py
new file mode 100644
index 0000000..80a1878
--- /dev/null
+++ b/util/topgen/validate.py
@@ -0,0 +1,8 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+
+def validate_top(topcfg):
+    # return as it is for now
+    return topcfg
diff --git a/util/uvmdvgen.py b/util/uvmdvgen.py
new file mode 100755
index 0000000..31c63cc
--- /dev/null
+++ b/util/uvmdvgen.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Command-line tool to autogenerate boilerplate DV testbench code extended from dv_lib / cip_lib
+"""
+import argparse
+import os
+import sys
+
+from uvmdvgen import gen_agent, gen_env
+
+
+def main():
+    parser = argparse.ArgumentParser(
+        description=__doc__,
+        formatter_class=argparse.RawDescriptionHelpFormatter)
+    parser.add_argument(
+        "name",
+        metavar="[ip/block name]",
+        help="Name of the ip/block for which the UVM TB is being auto-generated"
+    )
+
+    parser.add_argument(
+        "-a",
+        "--gen_agent",
+        action='store_true',
+        help="Generate UVM agent code extended from DV library")
+
+    parser.add_argument(
+        "-s",
+        "--has_separate_host_device_driver",
+        action='store_true',
+        help=
+        """IP / block agent creates a separate driver for host and device modes.
+                              (ignored if -a switch is not passed)""")
+
+    parser.add_argument(
+        "-e",
+        "--gen_env",
+        action='store_true',
+        help="Generate testbench UVM env code")
+
+    parser.add_argument(
+        "-c",
+        "--is_cip",
+        action='store_true',
+        help=
+        """Is comportable IP - this will result in code being extended from CIP
+                              library. If switch is not passed, then the code will be extended from
+                              DV library instead. (ignored if -e switch is not passed)"""
+    )
+
+    parser.add_argument(
+        "-ea",
+        "--env_agents",
+        nargs="+",
+        metavar="agt1 agt2",
+        help="""Env creates an interface agent specified here. They are
+                              assumed to already exist. Note that the list is space-separated,
+                              and not comma-separated. (ignored if -e switch is not passed)"""
+    )
+
+    parser.add_argument(
+        "-ao",
+        "--agent_outdir",
+        default="name",
+        metavar="[hw/dv/sv]",
+        help="""Path to place the agent code. A directory called <name>_agent is
+                              created at this location. (default set to './<name>')"""
+    )
+
+    parser.add_argument(
+        "-eo",
+        "--env_outdir",
+        default="name",
+        metavar="[hw/ip/<ip>/dv]",
+        help="""Path to place the env code. 3 directories are created - env,
+                              tb and tests. (default set to './<name>')""")
+
+    args = parser.parse_args()
+    if args.agent_outdir == "name": args.agent_outdir = args.name
+    if args.env_outdir == "name": args.env_outdir = args.name
+
+    if args.gen_agent:
+        gen_agent.gen_agent(args.name, \
+                            args.has_separate_host_device_driver, \
+                            args.agent_outdir)
+
+    if args.gen_env:
+        if not args.env_agents: args.env_agents = []
+        gen_env.gen_env(args.name, \
+                        args.is_cip, \
+                        args.env_agents, \
+                        args.env_outdir)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/util/uvmdvgen/Makefile.tpl b/util/uvmdvgen/Makefile.tpl
new file mode 100644
index 0000000..cd358c8
--- /dev/null
+++ b/util/uvmdvgen/Makefile.tpl
@@ -0,0 +1,71 @@
+${'####################################################################################################'}
+${'## Copyright lowRISC contributors.                                                                ##'}
+${'## Licensed under the Apache License, Version 2.0, see LICENSE for details.                       ##'}
+${'## SPDX-License-Identifier: Apache-2.0                                                            ##'}
+${'####################################################################################################'}
+${'## Entry point test Makefile forr building and running tests.                                     ##'}
+${'## These are generic set of option groups that apply to all testbenches.                          ##'}
+${'## This flow requires the following options to be set:                                            ##'}
+${'## DV_DIR       - current dv directory that contains the test Makefile                            ##'}
+${'## DUT_TOP      - top level dut module name                                                       ##'}
+${'## TB_TOP       - top level tb module name                                                        ##'}
+${'## DOTF         - .f file used for compilation                                                    ##'}
+${'## COMPILE_KEY  - compile option set                                                              ##'}
+${'## TEST_NAME    - name of the test to run - this is supplied on the command line                  ##'}
+${'####################################################################################################'}
+DV_DIR          := ${'$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))'}
+export DUT_TOP  := ${name}
+export TB_TOP   := tb
+FUSESOC_CORE    := lowrisc:dv:${name}_sim:0.1
+COMPILE_KEY     ?= default
+
+UVM_TEST        ?= ${name}_base_test
+UVM_TEST_SEQ    ?= ${name}_base_vseq
+
+${'####################################################################################################'}
+${'##                     A D D    I N D I V I D U A L    T E S T S    B E L O W                     ##'}
+${'####################################################################################################'}
+TEST_NAME       ?= ${name}_sanity
+UVM_TEST        ?= ${name}_base_test
+UVM_TEST_SEQ    ?= ${name}_base_vseq
+
+ifeq (${'$'}{TEST_NAME},${name}_sanity)
+  UVM_TEST_SEQ   = ${name}_sanity_vseq
+endif
+
+ifeq (${'$'}{TEST_NAME},${name}_csr_hw_reset)
+  UVM_TEST_SEQ   = ${name}_csr_vseq
+  RUN_OPTS      += +csr_hw_reset
+  RUN_OPTS      += +en_scb=0
+endif
+
+ifeq (${'$'}{TEST_NAME},${name}_csr_rw)
+  UVM_TEST_SEQ   = ${name}_csr_vseq
+  RUN_OPTS      += +csr_rw
+  RUN_OPTS      += +en_scb=0
+endif
+
+ifeq (${'$'}{TEST_NAME},${name}_csr_bit_bash)
+  UVM_TEST_SEQ   = ${name}_csr_vseq
+  RUN_OPTS      += +csr_bit_bash
+  RUN_OPTS      += +en_scb=0
+endif
+
+ifeq (${'$'}{TEST_NAME},${name}_csr_aliasing)
+  UVM_TEST_SEQ   = ${name}_csr_vseq
+  RUN_OPTS      += +csr_aliasing
+  RUN_OPTS      += +en_scb=0
+endif
+
+${'# TODO: remove this test if there are no memories in the DUT'}
+  ifeq (${'$'}{TEST_NAME},${name}_mem_walk)
+  UVM_TEST_SEQ   = ${name}_csr_vseq
+  RUN_OPTS      += +csr_mem_walk
+  RUN_OPTS      += +en_scb=0
+endif
+
+${'####################################################################################################'}
+${'## Include the tool Makefile below                                                                ##'}
+${'## Dont add anything else below it!                                                               ##'}
+${'####################################################################################################'}
+include ${'$'}{DV_DIR}/../../../dv/tools/Makefile
diff --git a/util/uvmdvgen/README.md b/util/uvmdvgen/README.md
new file mode 100644
index 0000000..9bb701b
--- /dev/null
+++ b/util/uvmdvgen/README.md
@@ -0,0 +1,317 @@
+# uvmdvgen: UVM agent & complete testbench boilerplate code auto-generation tool
+
+uvmdvgen is a python3 based tool to generate the boilerplate code for a UVM agent
+as well as the complete UVM testbench for a given DUT. The tool generates all
+the relevant UVM-based classes including the package and the fusesoc core file
+to make it quickly plug-and-playable. The packages import the standard
+utility and library packages wherever applicable, to conform to our existing
+methodology and style.
+
+When starting with a new DV effort, user goes through a copy-paste exercise to
+replicate an existing UVM testbench code to the current one and has to go though
+several debug cycles to get it working. This tool aims to eliminate that. Also,
+as a part of OpenTitan DV methodology, we have several utilities and base class
+structures (such as DV lib and CIP lib) that share all of the common code. By
+extending a new DV enviroment from the common code, the effort is drastically
+reducecd.
+
+### Setup
+The tool uses the mako based templates, so the following tool is required as
+dependency:
+```
+$ pip3 install --user mako
+```
+
+### Help switch (-h)
+Running the tool with `-h` switch provides a brief description of all available
+switches.
+```
+$ util/uvmdvgen.py -h
+usage: uvmdvgen.py [-h] [-a] [-s] [-e] [-c] [-ea [name] [[name] ...]]
+                   [-ao [hw/dv/sv]] [-eo [hw/ip/<ip>/dv]]
+                   [ip/block name]
+
+Command-line tool to autogenerate boilerplate DV testbench code extended from dv_lib / cip_lib
+
+positional arguments:
+  [ip/block name]       Name of the ip/block for which the UVM TB is being
+                        auto-generated
+
+optional arguments:
+  -h, --help            show this help message and exit
+  -a, --gen_agent       Generate UVM agent code extended from DV library
+  -s, --has_separate_host_device_driver
+                        IP / block agent creates a separate driver for host
+                        and device modes. (ignored if -a switch is not passed)
+  -e, --gen_env         Generate testbench UVM environment code
+  -c, --is_cip          Is comportable IP - this will result in code being
+                        extended from CIP library. If switch is not passed,
+                        then the code will be extended from DV library
+                        instead. (ignored if -e switch is not passed)
+  -ea agt1 agt2 [agt1 agt2 ...], --env_agents agt1 agt2 [agt1 agt2 ...]
+                        Env creates an interface agent specified here. They are
+                        assumed to already exist. Note that the list is space-
+                        separated, and not comma-separated. (ignored if -e
+                        switch is not passed)
+  -ao [hw/dv/sv], --agent_outdir [hw/dv/sv]
+                        Path to place the agent code. A directory called
+                        <name>_agent is created at this location. (default set
+                        to './<name>')
+  -eo [hw/ip/<ip>/dv], --env_outdir [hw/ip/<ip>/dv]
+                        Path to place the env code. 3 directories are created
+                        - env, tb and tests. (default set to './<name>')
+```
+
+### Generating UVM agent
+The boilerplate code for a UVM agent for an interface can be generated using the
+`-a` switch. This results in the generation of complete agent with classes that
+extend from the [dv library](../../hw/dv/sv/dv_lib/README.md). Please see
+description for more details.
+
+The tool generates an interface, item, cfg, cov, monitor, driver and sequence
+library classes. Let's take `jtag` as the argument passed for the name of the IP.
+The following describes their contents in each source generated:
+
+* **jtag_if**
+
+  This is an empty shell of an interface. User is required to add content.
+
+* **jtag_item**
+
+  This is an empty transaction packet extended from `uvm_sequence_item`.
+
+* **jtag_agent_cfg**
+
+  This is the agent configuration object, it contains the virtual interface
+  handle for `jtag_if` and is called `vif`.
+
+* **jtag_agent_cov**
+
+  This is a coverage component extended from `dv_base_agent_cov`.
+
+* **jtag_monitor**
+
+  This is the monitor component extended from `dv_base_monitor`. It provides
+  the following items:
+  * **virtual protected task collect_trans(uvm_phase phase)**
+
+    This is a shell task within which user is required to add logic to detect
+    an event, sample the interface and create a transaction object and write
+    to the analysis port. This task is called in `dv_base_monitor::run_phase`.
+
+* **jtag_driver**
+
+  This is the monitor component extended from `jtag_driver` which is typedef'ed
+  in the pkg to `dv_base_driver` with the right parameter set. It provides the
+  following items:
+  * **virtual task reset_signals()**
+
+    This task is for resetting the initial value of the `vif` signals.
+
+  * **virtual task get_and_drive()**
+
+    This task is used to get the next item from the sequencer, apply it to the
+    interface and return the response back. This is again, an empty task at the
+    moment.
+
+  If the `-s` switch is passed, the tool creates `jtag_host_driver` and
+  `jtag_device_driver` instead, and their contents are exactly the same.
+
+* **seq_lib/jtag_base_seq**
+
+  This is extended from `dv_base_seq`.
+
+* **seq_lib/jtag_seq_list**
+
+  This is a list of sequences included in one place.
+
+* **jtag_agent_pkg**
+
+  This is the package file that includes all of the above sources and the imports
+  the dependent packages.
+
+* **jtag_agent.core**
+
+  This is the fusesoc core file that is used to generate the filelist for
+  the build.
+
+The tool does not create `jtag_sequencer` or `jtag_agent` classes separetely.
+Instead, it typedef's the `dv_base_sequencer` and `dv_base_agent` respectively
+with the right type-parameters in the pkg. The reason for this is having a
+dedicated sequencer and agent is not required since the `dv_base_agent` already
+has all the sub-component instantiations and connections; and `dv_base_sequencer`
+already has a handle to the agent cfg object and nothing more is typically needed.
+
+### Generating UVM environment & testbench
+The boilerplate code for a UVM environment and the testbench for a DUT can be
+generated using the `-e` switch. This results in the generation of classes
+that extend from [dv library](../../hw/dv/sv/dv_lib/README.md). If the `-c`
+switch is passed, it extends from [cip library](../../hw/dv/sv/cip_lib/README.md).
+With `-ea` switch, user can provide a list of downstream agents to create within
+the environment.
+Please see description for more details.
+
+The tool generates not only the UVM environment, but also the base test,
+testbench, top level fusesoc core file with sim target, Makefile that already
+includes the sanity and CSR test suite and more. With just a few tweaks, this
+enables the user to reach the V1 milestone much quicker.  Let's take `i2c_host`
+as the argument passed for the name of the IP. The following is the list of
+files generated with a brief description of their contents:
+
+* **env/i2c_host_env_cfg**
+
+  This is the env cfg object. It creates the downstream agent cfg objects that
+  were passed using the `-ea` switch in the ``initialize()` function which is
+  called in the `dv_base_test::build_phase()`. Since the cfg handle is passed to
+  all env components, those downstream agent cfg objects can be hierarchically
+  referenced.
+
+* **env/i2c_host_env_cov**
+
+  This is the coverage component class. A handle of this class is passed to the
+  scoreboard and the virtual sequencer so that covergroups can be sampled in the
+  scoreboard as well as sequences.
+
+* **env/i2c_host_reg_block**
+
+  This is the UVM reg based RAL model. This is created for completeness. The
+  actual RAL model needs to be generated prior to running simulations using the
+  [regtool](../reggen/README.md).
+
+* **env/i2c_host_scoreboard**
+
+  This is the scoreboard component that already creates the analysis fifos and
+  queues for the agents passed via `-ea` switch. It adds starter tasks for
+  processing each fifo in a forever loop and invokes them in the `run_phase`
+  using `fork-join` statement. If the `-c` switch is passed, it also adds a
+  `process_tl_access` task that is extended from `cip_base_scoreboard`. This
+  task provides a tilelink access packet for further processing.
+
+* **env/i2c_host_virtual_sequencer**
+
+  This is the virtual sequencer used by all test sequences to run the traffic.
+  It adds handles to downstream agent sequencers passed via `-ea` switch.
+  Sub-sequences can be started on them via the `p_sequencer` handle.
+
+* **env/seq_lib/i2c_host_base_vseq**
+
+  This is the base virtual sequence that user can use to add common tasks,
+  functions and variables that other extended test sequences can reuse. For
+  starters, it provides the `i2s_host_init()` task and `do_i2c_host_init` knob
+  for controllability.
+
+* **env/seq_lib/i2c_host_sanity_vseq**
+
+  This is the basic sanity test sequence that user needs to develop as the first
+  test sequence. It extends from `i2s_host_base_vseq`.
+
+* **env/seq_lib/i2c_host_csr_vseq**
+
+  This is the test sequence for the entire CSR suite of tests. It calls
+  `dv_base_vseq::run_csr_vseq_wrapper()` task which is a complete test sequence.
+  All the user needs to do is run the CSR tests and add exclusions if needed
+  using the `add_csr_exclusions()` function provided.
+
+* **env/seq_lib/i2c_host_vseq_list**
+
+  This is a list of test sequences included in one place.
+
+* **env/i2c_host_env**
+
+  This is the env class that creates the downstream agents passed via `-ea`
+  switch. It sets their correspodnding cfg objects (which are members of env cfg
+  object) into the `uvm_config_db`. It also makes the analysis port connections
+  in the `connect_phase` and sets the sequencer handles in the virtual
+  sequencer.
+
+* **env/i2c_host_env_pkg**
+
+  This is the env pkg file which includes all env classes and imports the
+  dependent packages.
+
+* **env/i2c_host_env.core**
+
+  This is the fusesoc core file for the env pkg compile unit.
+
+* **tests/i2c_host_base_test**
+
+  This is the base test class. The base test class it extends from already
+  creates the `env` and `cfg` objects, which are available for manipulation in
+  UVM phases. This class's name would be supplied to UVM_TESTNAME plusarg to run
+  tests using the UVM methodology.
+
+* **tests/i2c_host_test_pkg**
+
+  This is the test pkg file which includes all test classes and imports the
+  dependent packages.
+
+* **tests/i2c_host_test.core**
+
+  This is the fusesoc core file for the test pkg compile unit.
+
+* **tb/i2c_host_bind**
+
+  This is the assertion bind file that is compiled along with the testbench in a
+  multi-top architecture. If the `-c` switch is passed, it adds the `tlul_assert`
+  module bind to the `i2c_host` DUT.
+
+* **tb/tb**
+
+  This is the top level testbench module that instantiates the DUT along with
+  some of the interfaces that are required to be instantiated and connected and
+  passed on the the `uvm_config_db` since the base DV/CIP library classes
+  retrieve them. The user needs to look through the RTL and make additional
+  connections as needed.
+
+* **i2c_host_sim.core**
+
+  This is the top level fusesoc core file with the sim target. It adds the rtl
+  and dv dependencies to construct the complete filelist to pass to simulator's
+  build step.
+
+* **Makefile**
+
+  This is the simulation Makefile that is used as the starting point for
+  building and running tests using the [make flow](../../hw/dv/tools/README.md).
+  It already includes the sanity and CSR suite of tests to allow users to start
+  running tests right away.
+
+* **plan.md**
+
+  This is the empty DV plan document that will describe the entire testbench. A
+  template for this is available [here](../../hw/dv/doc/plan.tpl.md).
+
+#### Examples
+```
+util/uvmdvgen.py i2c -a
+```
+This will create `./i2c/i2c_agent` and place all sources there.
+
+```
+util/uvmdvgen.py jtag -a -ao hw/dv/sv
+```
+This will create `hw/dv/sv/jtag_agent` directory and place all the sources
+there.
+
+```
+util/uvmdvgen.py i2c -a -s -ao hw/dv/sv
+```
+This will create the I2C agent with separate 'host' mode and 'device' mode drivers.
+
+```
+util/uvmdvgen.py i2c_host -e -c -ea i2c -eo hw/ip/i2c_host/dv
+```
+This will create the complete i2c_host dv testbench extended from CIP lib and will
+instantiate `i2c_agent`.
+
+```
+util/uvmdvgen.py dma -e -eo hw/ip/dma/dv
+```
+This will create the complete dma dv testbench extended from DV lib. It does not
+instantiate any downstream agents due to absence of `-ea` switch.
+
+```
+util/uvmdvgen.py chip -e -ea uart i2c jtag -eo hw/top_earlgrey/dv
+```
+This will create the complete chip testbench DV lib and will instantiate `uart_agent`,
+`i2c_agent` and `jtag_agent` in the env.
diff --git a/util/uvmdvgen/README.md.tpl b/util/uvmdvgen/README.md.tpl
new file mode 100644
index 0000000..4164db7
--- /dev/null
+++ b/util/uvmdvgen/README.md.tpl
@@ -0,0 +1,3 @@
+{{% lowrisc-doc-hdr ${name.upper()} DV UVM Agent }}
+
+${name.upper()} DV UVM Agent is extended from DV library agent classes.
diff --git a/util/uvmdvgen/__init__.py b/util/uvmdvgen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/util/uvmdvgen/__init__.py
diff --git a/util/uvmdvgen/agent.core.tpl b/util/uvmdvgen/agent.core.tpl
new file mode 100644
index 0000000..0acc9c8
--- /dev/null
+++ b/util/uvmdvgen/agent.core.tpl
@@ -0,0 +1,33 @@
+CAPI=2:
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+name: "lowrisc:dv:${name}_agent:0.1"
+description: "${name.upper()} DV UVM agent"
+filesets:
+  files_dv:
+    depend:
+      - lowrisc:dv:dv_utils
+      - lowrisc:dv:dv_lib
+    files:
+      - ${name}_if.sv
+      - ${name}_agent_pkg.sv
+      - ${name}_agent_cfg.sv: {is_include_file: true}
+      - ${name}_agent_cov.sv: {is_include_file: true}
+      - ${name}_item.sv: {is_include_file: true}
+% if has_separate_host_device_driver:
+      - ${name}_host_driver.sv: {is_include_file: true}
+      - ${name}_device_driver.sv: {is_include_file: true}
+% else:
+      - ${name}_driver.sv: {is_include_file: true}
+% endif
+      - ${name}_monitor.sv: {is_include_file: true}
+      - ${name}_agent.sv: {is_include_file: true}
+      - seq_lib/${name}_base_seq.sv: {is_include_file: true}
+      - seq_lib/${name}_seq_list.sv: {is_include_file: true}
+    file_type: systemVerilogSource
+
+targets:
+  default:
+    filesets:
+      - files_dv
diff --git a/util/uvmdvgen/agent.sv.tpl b/util/uvmdvgen/agent.sv.tpl
new file mode 100644
index 0000000..e6ac624
--- /dev/null
+++ b/util/uvmdvgen/agent.sv.tpl
@@ -0,0 +1,28 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_agent extends dv_base_agent #(
+      .CFG_T          (${name}_agent_cfg),
+      .DRIVER_T       (${name}_driver),
+% if has_separate_host_device_driver:
+      .HOST_DRIVER_T  (${name}_host_driver),
+      .DEVICE_DRIVER_T(${name}_device_driver),
+% endif
+      .SEQUENCER_T    (${name}_sequencer),
+      .MONITOR_T      (${name}_monitor),
+      .COV_T          (${name}_agent_cov)
+  );
+
+  `uvm_component_utils(${name}_agent)
+
+  `uvm_component_new
+
+  function void build_phase(uvm_phase phase);
+    super.build_phase(phase);
+    // get ${name}_if handle
+    if (!uvm_config_db#(virtual ${name}_if)::get(this, "", "vif", cfg.vif))
+      `uvm_fatal(`gfn, "failed to get ${name}_if handle from uvm_config_db")
+  endfunction
+
+endclass
diff --git a/util/uvmdvgen/agent_cfg.sv.tpl b/util/uvmdvgen/agent_cfg.sv.tpl
new file mode 100644
index 0000000..f052017
--- /dev/null
+++ b/util/uvmdvgen/agent_cfg.sv.tpl
@@ -0,0 +1,15 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_agent_cfg extends dv_base_agent_cfg;
+
+// interface handle used by driver, monitor & the sequencer, via cfg handle
+  virtual ${name}_if vif;
+
+  `uvm_object_utils_begin(${name}_agent_cfg)
+  `uvm_object_utils_end
+
+  `uvm_object_new
+
+endclass
diff --git a/util/uvmdvgen/agent_cov.sv.tpl b/util/uvmdvgen/agent_cov.sv.tpl
new file mode 100644
index 0000000..91e48d4
--- /dev/null
+++ b/util/uvmdvgen/agent_cov.sv.tpl
@@ -0,0 +1,18 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_agent_cov extends dv_base_agent_cov #(${name}_agent_cfg);
+  `uvm_component_utils(${name}_agent_cov)
+
+  // the base class provides the following handles for use:
+  // ${name}_agent_cfg: cfg
+
+  // covergroups
+
+  function new(string name, uvm_component parent);
+    super.new(name, parent);
+    // instantiate all covergroups here
+  endfunction : new
+
+endclass
diff --git a/util/uvmdvgen/agent_pkg.sv.tpl b/util/uvmdvgen/agent_pkg.sv.tpl
new file mode 100644
index 0000000..e6630e2
--- /dev/null
+++ b/util/uvmdvgen/agent_pkg.sv.tpl
@@ -0,0 +1,49 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+package ${name}_agent_pkg;
+  // dep packages
+  import uvm_pkg::*;
+  import dv_utils_pkg::*;
+  import dv_lib_pkg::*;
+
+  // macro includes
+  `include "uvm_macros.svh"
+  `include "dv_macros.svh"
+
+  // parameters
+
+  // local types
+  // forward declare classes to allow typedefs below
+  typedef class ${name}_item;
+  typedef class ${name}_agent_cfg;
+
+% if has_separate_host_device_driver:
+  // add typedef for ${name}_driver which is dv_base_driver with the right parameter set
+  // ${name}_host_driver and ${name}_device_driver will extend from this
+  typedef dv_base_driver #(.ITEM_T        (${name}_item),
+                           .CFG_T         (${name}_agent_cfg)) ${name}_driver;
+
+% endif
+  // reuse dv_base_seqeuencer as is with the right parameter set
+  typedef dv_base_sequencer #(.ITEM_T     (${name}_item),
+                              .CFG_T      (${name}_agent_cfg)) ${name}_sequencer;
+
+  // functions
+
+  // package sources
+  `include "${name}_item.sv"
+  `include "${name}_agent_cfg.sv"
+  `include "${name}_agent_cov.sv"
+% if has_separate_host_device_driver:
+  `include "${name}_host_driver.sv"
+  `include "${name}_device_driver.sv"
+% else:
+  `include "${name}_driver.sv"
+% endif
+  `include "${name}_monitor.sv"
+  `include "${name}_agent.sv"
+  `include "${name}_seq_list.sv"
+
+  endpackage: ${name}_agent_pkg
diff --git a/util/uvmdvgen/base_seq.sv.tpl b/util/uvmdvgen/base_seq.sv.tpl
new file mode 100644
index 0000000..e3f523d
--- /dev/null
+++ b/util/uvmdvgen/base_seq.sv.tpl
@@ -0,0 +1,17 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_base_seq extends dv_base_seq #(
+    .CFG_T       (${name}_agent_cfg),
+    .SEQUENCER_T (${name}_sequencer)
+  );
+  `uvm_object_utils(${name}_base_seq)
+
+  `uvm_object_new
+
+  virtual task body();
+    `uvm_fatal(`gtn, "Need to override this when you extend from this class!")
+  endtask
+
+endclass
diff --git a/util/uvmdvgen/base_test.sv.tpl b/util/uvmdvgen/base_test.sv.tpl
new file mode 100644
index 0000000..d713bdf
--- /dev/null
+++ b/util/uvmdvgen/base_test.sv.tpl
@@ -0,0 +1,20 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_base_test extends dv_base_test #(
+    .ENV_T(${name}_env),
+    .CFG_T(${name}_env_cfg)
+  );
+  `uvm_component_utils(${name}_base_test)
+  `uvm_component_new
+
+  // the base class dv_base_test creates the following instances:
+  // ${name}_env_cfg: cfg
+  // ${name}_env:     env
+
+  // the base class also looks up UVM_TEST_SEQ plusarg to create and run that seq in
+  // the run_phase; as such, nothing more needs to be done
+
+endclass : ${name}_base_test
+
diff --git a/util/uvmdvgen/base_vseq.sv.tpl b/util/uvmdvgen/base_vseq.sv.tpl
new file mode 100644
index 0000000..4278bfc
--- /dev/null
+++ b/util/uvmdvgen/base_vseq.sv.tpl
@@ -0,0 +1,37 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+% if is_cip:
+class ${name}_base_vseq extends cip_base_vseq #(
+% else:
+class ${name}_base_vseq extends dv_base_vseq #(
+% endif
+    .CFG_T               (${name}_env_cfg),
+    .RAL_T               (${name}_reg_block),
+    .COV_T               (${name}_env_cov),
+    .VIRTUAL_SEQUENCER_T (${name}_virtual_sequencer)
+  );
+  `uvm_object_utils(${name}_base_vseq)
+
+  // various knobs to enable certain routines
+  bit do_${name}_init = 1'b1;
+
+  `uvm_object_new
+
+  virtual task dut_init(string reset_kind = "HARD");
+    super.dut_init();
+    if (do_${name}_init) ${name}_init();
+  endtask
+
+  virtual task dut_shutdown();
+    // check for pending ${name} operations and wait for them to complete
+    // TODO
+  endtask
+
+  // setup basic ${name} features
+  virtual task ${name}_init();
+    `uvm_error(`gfn, "FIXME")
+  endtask
+
+endclass : ${name}_base_vseq
diff --git a/util/uvmdvgen/bind.sv.tpl b/util/uvmdvgen/bind.sv.tpl
new file mode 100644
index 0000000..9570e68
--- /dev/null
+++ b/util/uvmdvgen/bind.sv.tpl
@@ -0,0 +1,16 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+module ${name}_bind;
+% if is_cip:
+
+  bind ${name} tlul_assert tlul_assert (
+    .clk_i,
+    .rst_ni,
+    .h2d  (tl_i),
+    .d2h  (tl_o)
+  );
+% endif
+
+endmodule
diff --git a/util/uvmdvgen/csr_vseq.sv.tpl b/util/uvmdvgen/csr_vseq.sv.tpl
new file mode 100644
index 0000000..bbdce09
--- /dev/null
+++ b/util/uvmdvgen/csr_vseq.sv.tpl
@@ -0,0 +1,30 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_csr_vseq extends ${name}_base_vseq;
+  `uvm_object_utils(${name}_csr_vseq)
+
+  constraint num_trans_c {
+    num_trans inside {[1:2]};
+  }
+  `uvm_object_new
+
+  virtual task body();
+    run_csr_vseq_wrapper(num_trans);
+  endtask : body
+
+  // function to add csr exclusions of the given type using the csr_excl_item item
+  virtual function void add_csr_exclusions(string           csr_test_type,
+                                           csr_excl_item    csr_excl,
+                                           string           scope = "ral");
+
+    // write exclusions - these should not apply to hw_reset test
+    if (csr_test_type != "hw_reset") begin
+      // TODO: below is a sample
+      // status reads back unexpected values due to writes to other csrs
+      // csr_excl.add_excl({scope, ".", "status"}, CsrExclWriteCheck);
+    end
+  endfunction
+
+endclass
diff --git a/util/uvmdvgen/device_driver.sv.tpl b/util/uvmdvgen/device_driver.sv.tpl
new file mode 100644
index 0000000..ae46511
--- /dev/null
+++ b/util/uvmdvgen/device_driver.sv.tpl
@@ -0,0 +1,26 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_device_driver extends ${name}_driver;
+  `uvm_component_utils(${name}_device_driver)
+
+  // the base class provides the following handles for use:
+  // ${name}_agent_cfg: cfg
+
+  `uvm_component_new
+
+  virtual task run_phase(uvm_phase phase);
+    // base class forks off reset_signals() and get_and_drive() tasks
+    super.run_phase(phase);
+  endtask
+
+  // reset signals
+  virtual task reset_signals();
+  endtask
+
+  // drive trans received from sequencer
+  virtual task get_and_drive();
+  endtask
+
+endclass
diff --git a/util/uvmdvgen/driver.sv.tpl b/util/uvmdvgen/driver.sv.tpl
new file mode 100644
index 0000000..074280c
--- /dev/null
+++ b/util/uvmdvgen/driver.sv.tpl
@@ -0,0 +1,37 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_driver extends dv_base_driver #(${name}_item, ${name}_agent_cfg);
+  `uvm_component_utils(${name}_driver)
+
+  // the base class provides the following handles for use:
+  // ${name}_agent_cfg: cfg
+
+  `uvm_component_new
+
+  virtual task run_phase(uvm_phase phase);
+    // base class forks off reset_signals() and get_and_drive() tasks
+    super.run_phase(phase);
+  endtask
+
+  // reset signals
+  virtual task reset_signals();
+  endtask
+
+  // drive trans received from sequencer
+  virtual task get_and_drive();
+    forever begin
+      seq_item_port.get_next_item(req);
+      $cast(rsp, req.clone());
+      rsp.set_id_info(req);
+      `uvm_info(`gfn, $sformatf("rcvd item:\n%0s", req.sprint()), UVM_HIGH)
+      // TODO: do the driving part
+      //
+      // send rsp back to seq
+      `uvm_info(`gfn, "item sent", UVM_HIGH)
+      seq_item_port.item_done(rsp);
+    end
+  endtask
+
+endclass
diff --git a/util/uvmdvgen/env.core.tpl b/util/uvmdvgen/env.core.tpl
new file mode 100644
index 0000000..ecc6081
--- /dev/null
+++ b/util/uvmdvgen/env.core.tpl
@@ -0,0 +1,34 @@
+CAPI=2:
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+name: "lowrisc:dv:${name}_env:0.1"
+description: "${name.upper()} DV UVM environment"
+filesets:
+  files_dv:
+    depend:
+% if is_cip:
+      - lowrisc:dv:cip_lib
+% else:
+      - lowrisc:dv:dv_lib
+% endif
+% for agent in env_agents:
+      - lowrisc:dv:${agent}_agent
+% endfor
+    files:
+      - ${name}_env_pkg.sv
+      - ${name}_env_cfg.sv: {is_include_file: true}
+      - ${name}_env_cov.sv: {is_include_file: true}
+      - ${name}_env.sv: {is_include_file: true}
+      - ${name}_reg_block.sv: {is_include_file: true}
+% if env_agents != []:
+      - ${name}_virtual_sequencer.sv: {is_include_file: true}
+% endif
+      - ${name}_scoreboard.sv: {is_include_file: true}
+      - seq_lib/${name}_vseq_list.sv: {is_include_file: true}
+    file_type: systemVerilogSource
+
+targets:
+  default:
+    filesets:
+      - files_dv
diff --git a/util/uvmdvgen/env.sv.tpl b/util/uvmdvgen/env.sv.tpl
new file mode 100644
index 0000000..58773e9
--- /dev/null
+++ b/util/uvmdvgen/env.sv.tpl
@@ -0,0 +1,51 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+% if is_cip:
+class ${name}_env extends cip_base_env #(
+% else:
+class ${name}_env extends dv_base_env #(
+% endif
+    .CFG_T              (${name}_env_cfg),
+    .COV_T              (${name}_env_cov),
+    .VIRTUAL_SEQUENCER_T(${name}_virtual_sequencer),
+    .SCOREBOARD_T       (${name}_scoreboard)
+  );
+  `uvm_component_utils(${name}_env)
+% if env_agents != []:
+
+% for agent in env_agents:
+  ${agent}_agent m_${agent}_agent;
+% endfor
+% endif
+
+  `uvm_component_new
+
+  function void build_phase(uvm_phase phase);
+    super.build_phase(phase);
+% for agent in env_agents:
+    m_${agent}_agent = ${agent}_agent::type_id::create("m_${agent}_agent", this);
+    uvm_config_db#(${agent}_agent_cfg)::set(this, "m_${agent}_agent*", "cfg", cfg.m_${agent}_agent_cfg);
+% endfor
+  endfunction
+
+  function void connect_phase(uvm_phase phase);
+    super.connect_phase(phase);
+% if env_agents != []:
+    if (cfg.en_scb) begin
+% endif
+% for agent in env_agents:
+      m_${agent}_agent.monitor.analysis_port.connect(scoreboard.${agent}_fifo.analysis_export);
+% endfor
+% if env_agents != []:
+    end
+% endif
+% for agent in env_agents:
+    if (cfg.is_active && cfg.m_${agent}_agent_cfg.is_active) begin
+      virtual_sequencer.${agent}_sequencer_h = m_${agent}_agent.sequencer;
+    end
+% endfor
+  endfunction
+
+endclass
diff --git a/util/uvmdvgen/env_cfg.sv.tpl b/util/uvmdvgen/env_cfg.sv.tpl
new file mode 100644
index 0000000..fb13359
--- /dev/null
+++ b/util/uvmdvgen/env_cfg.sv.tpl
@@ -0,0 +1,39 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+% if is_cip:
+class ${name}_env_cfg extends cip_base_env_cfg #(.RAL_T(${name}_reg_block));
+% else:
+class ${name}_env_cfg extends dv_base_env_cfg #(.RAL_T(${name}_reg_block));
+% endif
+
+  // ext component cfgs
+% for agent in env_agents:
+  rand ${agent}_agent_cfg m_${agent}_agent_cfg;
+% endfor
+
+  `uvm_object_utils_begin(${name}_env_cfg)
+% for agent in env_agents:
+    `uvm_field_object(m_${agent}_agent_cfg, UVM_DEFAULT)
+% endfor
+  `uvm_object_utils_end
+
+  `uvm_object_new
+
+  virtual function void initialize(bit [TL_AW-1:0] csr_base_addr = '1,
+                                   bit [TL_AW-1:0] csr_addr_map_size = 2048);
+    super.initialize();
+% for agent in env_agents:
+    // create ${agent} agent config obj
+    m_${agent}_agent_cfg = ${agent}_agent_cfg::type_id::create("m_${agent}_agent_cfg");
+% endfor
+% if is_cip:
+
+    // set num_interrupts & num_alerts which will be used to create coverage and more
+    num_interrupts = ral.intr_state.get_n_used_bits();
+    num_alerts = 0;
+% endif
+  endfunction
+
+endclass
diff --git a/util/uvmdvgen/env_cov.sv.tpl b/util/uvmdvgen/env_cov.sv.tpl
new file mode 100644
index 0000000..c07b312
--- /dev/null
+++ b/util/uvmdvgen/env_cov.sv.tpl
@@ -0,0 +1,22 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+% if is_cip:
+class ${name}_env_cov extends cip_base_env_cov #(.CFG_T(${name}_env_cfg));
+% else:
+class ${name}_env_cov extends dv_base_env_cov #(.CFG_T(${name}_env_cfg));
+% endif
+  `uvm_component_utils(${name}_env_cov)
+
+  // the base class provides the following handles for use:
+  // ${name}_env_cfg: cfg
+
+  // covergroups
+
+  function new(string name, uvm_component parent);
+    super.new(name, parent);
+    // instantiate all covergroups here
+  endfunction : new
+
+endclass
diff --git a/util/uvmdvgen/env_pkg.sv.tpl b/util/uvmdvgen/env_pkg.sv.tpl
new file mode 100644
index 0000000..c2eea67
--- /dev/null
+++ b/util/uvmdvgen/env_pkg.sv.tpl
@@ -0,0 +1,59 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+package ${name}_env_pkg;
+  // dep packages
+  import uvm_pkg::*;
+  import top_pkg::*;
+  import dv_utils_pkg::*;
+  import csr_utils_pkg::*;
+  import tl_agent_pkg::*;
+% for agent in env_agents:
+  import ${agent}_agent_pkg::*;
+% endfor
+  import dv_lib_pkg::*;
+% if is_cip:
+  import cip_base_pkg::*;
+% endif
+
+  // macro includes
+  `include "uvm_macros.svh"
+  `include "dv_macros.svh"
+
+  // parameters
+
+  // types
+% if env_agents == []:
+  // forward declare classes to allow typedefs below
+  typedef class ${name}_env_cfg;
+  typedef class ${name}_env_cov;
+
+% endif
+% if env_agents == [] and is_cip:
+  // reuse cip_base_virtual_seqeuencer as is with the right parameter set
+  typedef class cip_base_virtual_sequencer #(
+% elif env_agents == [] and not is_cip:
+  // reuse dv_base_virtual_seqeuencer as is with the right parameter set
+  typedef class dv_base_virtual_sequencer #(
+% endif
+% if env_agents == []:
+      .CFG_T(${name}_env_cfg),
+      .COV_T(${name}_env_cov)
+  ) ${name}_virtual_sequencer;
+% endif
+
+  // functions
+
+  // package sources
+  `include "${name}_reg_block.sv"
+  `include "${name}_env_cfg.sv"
+  `include "${name}_env_cov.sv"
+% if env_agents != []:
+  `include "${name}_virtual_sequencer.sv"
+% endif
+  `include "${name}_scoreboard.sv"
+  `include "${name}_env.sv"
+  `include "${name}_vseq_list.sv"
+
+endpackage
diff --git a/util/uvmdvgen/gen_agent.py b/util/uvmdvgen/gen_agent.py
new file mode 100644
index 0000000..afb848a
--- /dev/null
+++ b/util/uvmdvgen/gen_agent.py
@@ -0,0 +1,59 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Generate SystemVerilog UVM agent extended freom our DV lib
+"""
+
+import os
+
+from mako.template import Template
+from pkg_resources import resource_filename
+
+
+def gen_agent(name, has_separate_host_device_driver, root_dir):
+    # set sub name
+    agent_dir = root_dir + "/" + name + "_agent"
+
+    # yapf: disable
+    # 4-tuple - path, ip name, class name, file ext
+    agent_srcs = [(agent_dir,               name + '_', 'if',            '.sv'),
+                  (agent_dir,               name + '_', 'item',          '.sv'),
+                  (agent_dir,               name + '_', 'agent_cfg',     '.sv'),
+                  (agent_dir,               name + '_', 'agent_cov',     '.sv'),
+                  (agent_dir,               name + '_', 'monitor',       '.sv'),
+                  (agent_dir,               name + '_', 'driver',        '.sv'),
+                  (agent_dir,               name + '_', 'host_driver',   '.sv'),
+                  (agent_dir,               name + '_', 'device_driver', '.sv'),
+                  (agent_dir,               name + '_', 'agent_pkg',     '.sv'),
+                  (agent_dir,               name + '_', 'agent',         '.sv'),
+                  (agent_dir,               name + '_', 'agent',         '.core'),
+                  (agent_dir,               "",         'README',        '.md'),
+                  (agent_dir + "/seq_lib",  name + '_', 'seq_list',      '.sv'),
+                  (agent_dir + "/seq_lib",  name + '_', 'base_seq',      '.sv')]
+    # yapf: enable
+
+    for tup in agent_srcs:
+        path_dir = tup[0]
+        src_prefix = tup[1]
+        src = tup[2]
+        src_suffix = tup[3]
+
+        if has_separate_host_device_driver:
+            if src == "driver": continue
+        else:
+            if src == "host_driver": continue
+            if src == "device_driver": continue
+
+        ftpl = src + src_suffix + '.tpl'
+        fname = src_prefix + src + src_suffix
+
+        # read template
+        tpl = Template(filename=resource_filename('uvmdvgen', ftpl))
+
+        if not os.path.exists(path_dir): os.system("mkdir -p " + path_dir)
+        with open(path_dir + "/" + fname, 'w') as fout:
+            fout.write(
+                tpl.render(
+                    name=name,
+                    has_separate_host_device_driver=
+                    has_separate_host_device_driver))
diff --git a/util/uvmdvgen/gen_env.py b/util/uvmdvgen/gen_env.py
new file mode 100644
index 0000000..a7b2437
--- /dev/null
+++ b/util/uvmdvgen/gen_env.py
@@ -0,0 +1,55 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+"""Generate SystemVerilog UVM agent extended freom our DV lib
+"""
+
+import os
+
+from mako.template import Template
+from pkg_resources import resource_filename
+
+
+def gen_env(name, is_cip, env_agents, root_dir):
+    # yapf: disable
+    # 4-tuple - sub-path, ip name, class name, file ext
+    env_srcs = [('env',         name + '_', 'env_cfg',            '.sv'),
+                ('env',         name + '_', 'env_cov',            '.sv'),
+                ('env',         name + '_', 'env_pkg',            '.sv'),
+                ('env',         name + '_', 'reg_block',          '.sv'),
+                ('env',         name + '_', 'scoreboard',         '.sv'),
+                ('env',         name + '_', 'virtual_sequencer',  '.sv'),
+                ('env',         name + '_', 'env',                '.sv'),
+                ('env',         name + '_', 'env',                '.core'),
+                ('env/seq_lib', name + '_', 'base_vseq',          '.sv'),
+                ('env/seq_lib', name + '_', 'sanity_vseq',        '.sv'),
+                ('env/seq_lib', name + '_', 'csr_vseq',           '.sv'),
+                ('env/seq_lib', name + '_', 'vseq_list',          '.sv'),
+                ('tb',          '',         'tb',                 '.sv'),
+                ('tb',          name + '_', 'bind',               '.sv'),
+                ('tests',       name + '_', 'base_test',          '.sv'),
+                ('tests',       name + '_', 'test_pkg',           '.sv'),
+                ('tests',       name + '_', 'test',               '.core'),
+                ('.',           '',         'Makefile',           ''),
+                ('.',           '',         'plan',               '.md'),
+                ('.',           name + '_', 'sim',                '.core')]
+    # yapf: enable
+
+    for tup in env_srcs:
+        path_dir = root_dir + '/' + tup[0]
+        src_prefix = tup[1]
+        src = tup[2]
+        src_suffix = tup[3]
+
+        if env_agents == [] and src == "virtual_sequencer": continue
+
+        ftpl = src + src_suffix + '.tpl'
+        fname = src_prefix + src + src_suffix
+
+        # read template
+        tpl = Template(filename=resource_filename('uvmdvgen', ftpl))
+
+        if not os.path.exists(path_dir): os.system("mkdir -p " + path_dir)
+        with open(path_dir + "/" + fname, 'w') as fout:
+            fout.write(
+                tpl.render(name=name, is_cip=is_cip, env_agents=env_agents))
diff --git a/util/uvmdvgen/host_driver.sv.tpl b/util/uvmdvgen/host_driver.sv.tpl
new file mode 100644
index 0000000..08103e0
--- /dev/null
+++ b/util/uvmdvgen/host_driver.sv.tpl
@@ -0,0 +1,37 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_host_driver extends ${name}_driver;
+  `uvm_component_utils(${name}_host_driver)
+
+  // the base class provides the following handles for use:
+  // ${name}_agent_cfg: cfg
+
+  `uvm_component_new
+
+  virtual task run_phase(uvm_phase phase);
+    // base class forks off reset_signals() and get_and_drive() tasks
+    super.run_phase(phase);
+  endtask
+
+  // reset signals
+  virtual task reset_signals();
+  endtask
+
+  // drive trans received from sequencer
+  virtual task get_and_drive();
+    forever begin
+      seq_item_port.get_next_item(req);
+      $cast(rsp, req.clone());
+      rsp.set_id_info(req);
+      `uvm_info(`gfn, $sformatf("rcvd item:\n%0s", req.sprint()), UVM_HIGH)
+      // TODO: do the driving part
+      //
+      // send rsp back to seq
+      `uvm_info(`gfn, "item sent", UVM_HIGH)
+      seq_item_port.item_done(rsp);
+    end
+  endtask
+
+endclass
diff --git a/util/uvmdvgen/if.sv.tpl b/util/uvmdvgen/if.sv.tpl
new file mode 100644
index 0000000..5f15a32
--- /dev/null
+++ b/util/uvmdvgen/if.sv.tpl
@@ -0,0 +1,11 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+interface ${name}_if ();
+
+  // interface pins
+
+  // debug signals
+
+endinterface
diff --git a/util/uvmdvgen/item.sv.tpl b/util/uvmdvgen/item.sv.tpl
new file mode 100644
index 0000000..715c6d1
--- /dev/null
+++ b/util/uvmdvgen/item.sv.tpl
@@ -0,0 +1,14 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_item extends uvm_sequence_item;
+
+  // random variables
+
+  `uvm_object_utils_begin(${name}_item)
+  `uvm_object_utils_end
+
+  `uvm_object_new
+
+endclass
diff --git a/util/uvmdvgen/monitor.sv.tpl b/util/uvmdvgen/monitor.sv.tpl
new file mode 100644
index 0000000..7366fc1
--- /dev/null
+++ b/util/uvmdvgen/monitor.sv.tpl
@@ -0,0 +1,43 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_monitor extends dv_base_monitor #(
+    .ITEM_T (${name}_item),
+    .CFG_T  (${name}_agent_cfg),
+    .COV_T  (${name}_agent_cov)
+  );
+  `uvm_component_utils(${name}_monitor)
+
+  // the base class provides the following handles for use:
+  // ${name}_agent_cfg: cfg
+  // ${name}_agent_cov: cov
+  // uvm_analysis_port #(${name}_item): analysis_port
+
+  `uvm_component_new
+
+  function void build_phase(uvm_phase phase);
+    super.build_phase(phase);
+  endfunction
+
+  task run_phase(uvm_phase phase);
+    super.run_phase(phase);
+  endtask
+
+  // collect transactions forever - already forked in dv_base_moditor::run_phase
+  virtual protected task collect_trans(uvm_phase phase);
+    forever begin
+      // TODO: detect event
+
+      // TODO: sample the interface
+
+      // TODO: sample the covergroups
+
+      // TODO: write trans to analysis_port
+
+      // TODO: remove the line below: it is added to prevent zero delay loop in template code
+      #1us;
+    end
+  endtask
+
+endclass
diff --git a/util/uvmdvgen/plan.md.tpl b/util/uvmdvgen/plan.md.tpl
new file mode 100644
index 0000000..f7beb69
--- /dev/null
+++ b/util/uvmdvgen/plan.md.tpl
@@ -0,0 +1,116 @@
+<!-- Copy this file to hw/ip/${name}/dv/plan.md and make changes as needed. For
+convenience '${name}' in the document can be searched and replaced easily with the
+desired IP (with case sensitivity!). Also, use the testbench block diagram here:
+https://drive.google.com/open?id=1LfnTSutIW5E6zSCOCf4-scS8MQ8lXhPAPgSfFx2Aqh0
+as a starting point and modify it to reflect your ${name} testbench and save it
+to hw/ip/${name}/dv/tb.svg. It should get linked and rendered under the block
+diagram section below. Once done, remove this comment before making a PR. -->
+
+{{% lowrisc-doc-hdr Foo DV Plan }}
+
+{{% toc 3 }}
+
+## Current status
+* [${name.upper()} regression dashboard](../../../dv/regressions/weekly/${name}/dashboard.html)
+* Design milestone: D#
+* Verification milestone: [V#](v#_cl.md)
+
+## Design features
+For detailed information on ${name.upper()} design features, please see the
+[${name.upper()} design specification](../doc/${name}.md).
+
+## Testplan
+<!-- TODO add automation to get the testplan hjson to expand here -->
+{{% path to testplan hjson }}
+
+## Testbench architecture
+${name.upper()} testbench has been constructed based on the
+[CIP testbench architecture](../../../dv/sv/cip_lib/README.md).
+<!-- TODO if ${name.upper()} is not a CIP, then indicated that it is extended from DV
+library instead, if applicable. -->
+
+### Block diagram
+![Block diagram](tb.svg)
+
+### Testbench
+Top level testbench is located at `hw/ip/${name}/dv/tb/tb.sv`. It instantiates the
+${name.upper()} DUT module `hw/ip/${name}/rtl/${name}.sv`. In addition, it instantiates several
+interfaces for driving/sampling clock and reset, devmode, interrupts, alerts and
+tilelink host.
+
+### Common DV utility components
+* [common_ifs](../../../dv/sv/common_ifs/README.md)
+* [dv_utils_pkg](../../../dv/sv/dv_utils/README.md)
+* [csr_utils_pkg](../../../dv/sv/csr_utils/README.md)
+
+### Compile-time configurations
+[list compile time configurations, if any]
+
+### Local types & methods
+The following local types and methods defined in `foo_env_pkg` are in use:
+
+[list parameters, types & methods]
+
+### UVC/agent 1
+[Describe here or add link to its README]
+
+### UVC/agent 2
+[Describe here or add link to its README]
+
+### RAL
+The ${name.upper()} RAL model is constructed using the
+[regtool.py script](../../../../util/doc/rm/RegisterTool.md)
+and is placed at `env/foo_reg_block.sv`.
+
+### Reference models
+[Describe reference models in use if applicable, example: SHA256/HMAC]
+
+### Stimulus strategy
+#### Test sequences
+All test sequences reside in `hw/ip/${name}/dv/env/seq_lib`. The `foo_base_vseq`
+virtual sequence is extended from `cip_base_vseq` and serves as a starting point.
+All test sequences are extended from it. It provides commonly used handles,
+variables, functions and tasks that the test sequences can simple use / call.
+Some of the most commonly used tasks / functions are as
+follows:
+* task 1:
+* task 2:
+
+#### Functional coverage
+To ensure high quality constrained random stimulus, it is necessary to develop
+functional coverage model. The following covergroups have been developed to prove
+that the test intent has been adequately met:
+* cg1:
+* cg2:
+
+### Self-checking strategy
+#### Scoreboard
+The `foo_scoreboard` is primarily used for end to end checking. It creates the
+following analysis ports to retrieve the data monitored by corresponding
+interface agents:
+* analysis port1:
+* analysis port2:
+<!-- explain inputs monitored, flow of data and outputs checked -->
+
+#### Assertions
+* TLUL assertions: The `tb/foo_bind.sv` binds the `tlul_assert` assertions
+  to ${name} to ensure TileLink interface protocol compliance.
+* assert prop 1:
+* assert prop 2:
+
+## Building and running tests
+We are using our in-house developed
+[regression tool](../../../dv/tools/README.md)
+for building and running our tests and regressions. Please take a look at the link
+for detailed information on the usage, capabilities, features and known
+issues. Here's how to run a basic sanity test:
+```
+  $ cd hw/ip/${name}/dv
+  $ make TEST_NAME=foo_sanity
+```
+
+### Test list
+Tests developed towards executing the testplan are specifed in `hw/ip/${name}/dv/sim/tests`.
+
+### Regression list
+Regressions are specified in `hw/ip/${name}/dv/sim/regressions`.
diff --git a/util/uvmdvgen/reg_block.sv.tpl b/util/uvmdvgen/reg_block.sv.tpl
new file mode 100644
index 0000000..f06d9b3
--- /dev/null
+++ b/util/uvmdvgen/reg_block.sv.tpl
@@ -0,0 +1,16 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+class ${name}_reg_block extends dv_base_reg_block;
+  `uvm_object_utils(${name}_reg_block)
+
+  function new(string name = "${name}_reg_block", int has_coverage = UVM_NO_COVERAGE);
+    super.new(name, has_coverage);
+  endfunction : new
+
+  virtual function void build(uvm_reg_addr_t base_addr);
+    `uvm_fatal(`gfn, "this file does not seem to be auto-generated!")
+  endfunction : build
+
+endclass : ${name}_reg_block
diff --git a/util/uvmdvgen/sanity_vseq.sv.tpl b/util/uvmdvgen/sanity_vseq.sv.tpl
new file mode 100644
index 0000000..7fbc61b
--- /dev/null
+++ b/util/uvmdvgen/sanity_vseq.sv.tpl
@@ -0,0 +1,15 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+// basic sanity test vseq
+class ${name}_sanity_vseq extends ${name}_base_vseq;
+  `uvm_object_utils(${name}_sanity_vseq)
+
+  `uvm_object_new
+
+  task body();
+    `uvm_error(`gfn, "FIXME")
+  endtask : body
+
+endclass : ${name}_sanity_vseq
diff --git a/util/uvmdvgen/scoreboard.sv.tpl b/util/uvmdvgen/scoreboard.sv.tpl
new file mode 100644
index 0000000..3a2f9ab
--- /dev/null
+++ b/util/uvmdvgen/scoreboard.sv.tpl
@@ -0,0 +1,113 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+% if is_cip:
+class ${name}_scoreboard extends cip_base_scoreboard #(
+% else:
+class ${name}_scoreboard extends dv_base_scoreboard #(
+% endif
+    .CFG_T(${name}_env_cfg),
+    .RAL_T(${name}_reg_block),
+    .COV_T(${name}_env_cov)
+  );
+  `uvm_component_utils(${name}_scoreboard)
+
+  // local variables
+
+  // TLM agent fifos
+% for agent in env_agents:
+  uvm_tlm_analysis_fifo #(${agent}_item) ${agent}_fifo;
+% endfor
+
+  // local queues to hold incoming packets pending comparison
+% for agent in env_agents:
+  ${agent}_item ${agent}_q[$];
+% endfor
+
+  `uvm_component_new
+
+  function void build_phase(uvm_phase phase);
+    super.build_phase(phase);
+% for agent in env_agents:
+    ${agent}_fifo = new("${agent}_fifo", this);
+% endfor
+  endfunction
+
+  function void connect_phase(uvm_phase phase);
+    super.connect_phase(phase);
+  endfunction
+
+  task run_phase(uvm_phase phase);
+    super.run_phase(phase);
+    fork
+% for agent in env_agents:
+      process_${agent}_fifo();
+% endfor
+    join_none
+  endtask
+% for agent in env_agents:
+
+  virtual task process_${agent}_fifo();
+    ${agent}_item item;
+    forever begin
+      ${agent}_fifo.get(item);
+      `uvm_info(`gfn, $sformatf("received ${agent} item:\n%0s", item.sprint()), UVM_HIGH)
+    end
+  endtask
+% endfor
+% if is_cip:
+
+  virtual task process_tl_access(tl_seq_item item, tl_channels_e channel = DataChannel);
+    uvm_reg csr;
+    bit     do_read_check = 1'b1;
+    bit     write         = item.is_write();
+
+    // if access was to a valid csr, get the csr handle
+    if (item.a_addr inside {cfg.csr_addrs}) begin
+      csr = ral.default_map.get_reg_by_offset(item.a_addr);
+      `DV_CHECK_NE_FATAL(csr, null)
+    end
+    if (csr == null) begin
+      // we hit an oob addr - expect error response and return
+      `DV_CHECK_EQ(item.d_error, 1'b1)
+      return;
+    end
+
+    if (channel == AddrChannel) begin
+      // if incoming access is a write to a valid csr, then make updates right away
+      if (write) csr.predict(.value(item.a_data), .kind(UVM_PREDICT_WRITE), .be(item.a_mask));
+    end
+
+    // process the csr req
+    // for write, update local variable and fifo at address phase
+    // for read, update predication at address phase and compare at data phase
+    case (csr.get_name())
+      // add individual case item for each csr
+      default: begin
+        `uvm_fatal(`gfn, $sformatf("invalid csr: %0s", csr.get_full_name()))
+      end
+    endcase
+
+    // On reads, if do_read_check, is set, then check mirrored_value against item.d_data
+    if (!write && channel == DataChannel) begin
+      if (do_read_check) begin
+        `DV_CHECK_EQ(csr.get_mirrored_value(), item.d_data,
+                     $sformatf("reg name: %0s", csr.get_full_name()))
+      end
+      csr.predict(.value(item.d_data), .kind(UVM_PREDICT_READ));
+    end
+  endtask
+% endif
+
+  virtual function void reset(string kind = "HARD");
+    super.reset(kind);
+    // reset local fifos queues and variables
+  endfunction
+
+  function void check_phase(uvm_phase phase);
+    super.check_phase(phase);
+    // post test checks - ensure that all local fifos and queues are empty
+  endfunction
+
+endclass
diff --git a/util/uvmdvgen/seq_list.sv.tpl b/util/uvmdvgen/seq_list.sv.tpl
new file mode 100644
index 0000000..9b51507
--- /dev/null
+++ b/util/uvmdvgen/seq_list.sv.tpl
@@ -0,0 +1,5 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+`include "${name}_base_seq.sv"
diff --git a/util/uvmdvgen/sim.core.tpl b/util/uvmdvgen/sim.core.tpl
new file mode 100644
index 0000000..740bf6a
--- /dev/null
+++ b/util/uvmdvgen/sim.core.tpl
@@ -0,0 +1,28 @@
+CAPI=2:
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+name: "lowrisc:dv:${name}_sim:0.1"
+description: "${name.upper()} DV sim target"
+filesets:
+  files_rtl:
+    depend:
+      - lowrisc:ip:${name}:0.1
+    files:
+      - tb/${name}_bind.sv
+    file_type: systemVerilogSource
+
+  files_dv:
+    depend:
+      - lowrisc:dv:${name}_test
+    files:
+      - tb/tb.sv
+    file_type: systemVerilogSource
+
+targets:
+  sim:
+    toplevel: tb
+    filesets:
+      - files_rtl
+      - files_dv
+    default_tool: vcs
diff --git a/util/uvmdvgen/tb.sv.tpl b/util/uvmdvgen/tb.sv.tpl
new file mode 100644
index 0000000..6a44017
--- /dev/null
+++ b/util/uvmdvgen/tb.sv.tpl
@@ -0,0 +1,67 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+//
+module tb;
+  // dep packages
+  import uvm_pkg::*;
+  import dv_utils_pkg::*;
+  import ${name}_env_pkg::*;
+  import ${name}_test_pkg::*;
+
+  // macro includes
+  `include "uvm_macros.svh"
+  `include "dv_macros.svh"
+
+  wire clk, rst_n;
+% if is_cip:
+  wire [NUM_MAX_INTERRUPTS-1:0] interrupts;
+  wire [NUM_MAX_ALERTS-1:0] alerts;
+% endif
+
+  // interfaces
+  clk_rst_if clk_rst_if(.clk(clk), .rst_n(rst_n));
+% if is_cip:
+  pins_if #(NUM_MAX_INTERRUPTS) intr_if(interrupts);
+  pins_if #(NUM_MAX_ALERTS) alerts_if(alerts);
+  pins_if #(1) devmode_if();
+  tl_if tl_if(.clk(clk), .rst_n(rst_n));
+% endif
+% for agent in env_agents:
+  ${agent}_if ${agent}_if();
+% endfor
+
+  // dut
+  ${name} dut (
+    .clk_i                (clk        ),
+% if is_cip:
+    .rst_ni               (rst_n      ),
+
+    .tl_i                 (tl_if.h2d  ),
+    .tl_o                 (tl_if.d2h  )
+
+% else:
+    .rst_ni               (rst_n      )
+
+% endif
+    // TODO: add remaining IOs and hook them
+  );
+
+  initial begin
+    // drive clk and rst_n from clk_if
+    clk_rst_if.set_active();
+    uvm_config_db#(virtual clk_rst_if)::set(null, "*.env", "clk_rst_vif", clk_rst_if);
+% if is_cip:
+    uvm_config_db#(intr_vif)::set(null, "*.env", "intr_vif", intr_if);
+    uvm_config_db#(alerts_vif)::set(null, "*.env", "alerts_vif", alerts_if);
+    uvm_config_db#(devmode_vif)::set(null, "*.env", "devmode_vif", devmode_if);
+    uvm_config_db#(virtual tl_if)::set(null, "*.env.m_tl_agent*", "vif", tl_if);
+% endif
+% for agent in env_agents:
+    uvm_config_db#(virtual ${agent}_if)::set(null, "*.env.m_${agent}_agent*", "vif", ${agent}_if);
+% endfor
+    $timeformat(-12, 0, " ps", 12);
+    run_test();
+  end
+
+endmodule
diff --git a/util/uvmdvgen/test.core.tpl b/util/uvmdvgen/test.core.tpl
new file mode 100644
index 0000000..905850b
--- /dev/null
+++ b/util/uvmdvgen/test.core.tpl
@@ -0,0 +1,19 @@
+CAPI=2:
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+name: "lowrisc:dv:${name}_test:0.1"
+description: "${name.upper()} DV UVM test"
+filesets:
+  files_dv:
+    depend:
+      - lowrisc:dv:${name}_env
+    files:
+      - ${name}_test_pkg.sv
+      - ${name}_base_test.sv: {is_include_file: true}
+    file_type: systemVerilogSource
+
+targets:
+  default:
+    filesets:
+      - files_dv
diff --git a/util/uvmdvgen/test_pkg.sv.tpl b/util/uvmdvgen/test_pkg.sv.tpl
new file mode 100644
index 0000000..3bd6ea5
--- /dev/null
+++ b/util/uvmdvgen/test_pkg.sv.tpl
@@ -0,0 +1,26 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+package ${name}_test_pkg;
+  // dep packages
+  import uvm_pkg::*;
+% if is_cip:
+  import cip_base_pkg::*;
+% else:
+  import dv_lib_pkg::*;
+% endif
+  import ${name}_env_pkg::*;
+
+  // macro includes
+  `include "uvm_macros.svh"
+  `include "dv_macros.svh"
+
+  // local types
+
+  // functions
+
+  // package sources
+  `include "${name}_base_test.sv"
+
+endpackage
diff --git a/util/uvmdvgen/virtual_sequencer.sv.tpl b/util/uvmdvgen/virtual_sequencer.sv.tpl
new file mode 100644
index 0000000..e4cd3c4
--- /dev/null
+++ b/util/uvmdvgen/virtual_sequencer.sv.tpl
@@ -0,0 +1,21 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+% if is_cip:
+class ${name}_virtual_sequencer extends cip_base_virtual_sequencer #(
+% else:
+class ${name}_virtual_sequencer extends dv_base_virtual_sequencer #(
+% endif
+    .CFG_T(${name}_env_cfg),
+    .COV_T(${name}_env_cov)
+  );
+  `uvm_component_utils(${name}_virtual_sequencer)
+
+% for agent in env_agents:
+  ${agent}_sequencer ${agent}_sequencer_h;
+% endfor
+
+  `uvm_component_new
+
+endclass
diff --git a/util/uvmdvgen/vseq_list.sv.tpl b/util/uvmdvgen/vseq_list.sv.tpl
new file mode 100644
index 0000000..18a18f9
--- /dev/null
+++ b/util/uvmdvgen/vseq_list.sv.tpl
@@ -0,0 +1,7 @@
+// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+`include "${name}_base_vseq.sv"
+`include "${name}_sanity_vseq.sv"
+`include "${name}_csr_vseq.sv"
diff --git a/util/vendor_hw.py b/util/vendor_hw.py
new file mode 100755
index 0000000..32bcf36
--- /dev/null
+++ b/util/vendor_hw.py
@@ -0,0 +1,467 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+import argparse
+import fnmatch
+import logging as log
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+import textwrap
+from pathlib import Path
+
+import hjson
+
+DESC = """vendor_hw, copy hardware source code from upstream into this repository"""
+
+EXCLUDE_ALWAYS = ['.git']
+
+LOCK_FILE_HEADER = """// Copyright lowRISC contributors.
+// Licensed under the Apache License, Version 2.0, see LICENSE for details.
+// SPDX-License-Identifier: Apache-2.0
+
+// This file is generated by the vendor_hw script. Please do not modify it
+// manually.
+
+"""
+
+verbose = False
+
+
+def git_is_clean_workdir(git_workdir):
+    """Check if the git working directory is clean (no unstaged or staged changes)"""
+    cmd = ['git', 'status', '--untracked-files=no', '--porcelain']
+    modified_files = subprocess.run(cmd,
+                                    cwd=git_workdir,
+                                    check=True,
+                                    stdout=subprocess.PIPE,
+                                    stderr=subprocess.PIPE).stdout.strip()
+    return not modified_files
+
+
+def path_resolve(path, base_dir=Path.cwd()):
+    """Create an absolute path. Relative paths are resolved using base_dir as base."""
+
+    if isinstance(path, str):
+        path = Path(path)
+
+    if path.is_absolute():
+        return path
+
+    return (base_dir / path).resolve()
+
+
+def github_qualify_references(log, repo_userorg, repo_name):
+    """ Replace "unqualified" GitHub references with "fully qualified" one
+
+    GitHub automatically links issues and pull requests if they have a specific
+    format. Links can be qualified with the user/org name and the repository
+    name, or unqualified, if they only contain the issue or pull request number.
+
+    This function converts all unqualified references to qualified ones.
+
+    See https://help.github.com/en/articles/autolinked-references-and-urls#issues-and-pull-requests
+    for a documentation of all supported formats.
+    """
+
+    r = re.compile(r"(^|[^\w])(?:#|[gG][hH]-)(\d+)\b")
+    repl_str = r'\1%s/%s#\2' % (repo_userorg, repo_name)
+    return [r.sub(repl_str, l) for l in log]
+
+
+def test_github_qualify_references():
+    repo_userorg = 'lowRISC'
+    repo_name = 'ibex'
+
+    # Unqualified references, should be replaced
+    items_unqualified = [
+        '#28',
+        'GH-27',
+        'klaus #27',
+        'Fixes #27',
+        'Fixes #27 and #28',
+        '(#27)',
+        'something (#27) done',
+        '#27 and (GH-38)',
+    ]
+    exp_items_unqualified = [
+        'lowRISC/ibex#28',
+        'lowRISC/ibex#27',
+        'klaus lowRISC/ibex#27',
+        'Fixes lowRISC/ibex#27',
+        'Fixes lowRISC/ibex#27 and lowRISC/ibex#28',
+        '(lowRISC/ibex#27)',
+        'something (lowRISC/ibex#27) done',
+        'lowRISC/ibex#27 and (lowRISC/ibex#38)',
+    ]
+    assert github_qualify_references(items_unqualified, repo_userorg,
+                                     repo_name) == exp_items_unqualified
+
+    # Qualified references, should stay as they are
+    items_qualified = [
+        'Fixes lowrisc/ibex#27',
+        'lowrisc/ibex#2',
+    ]
+    assert github_qualify_references(items_qualified, repo_userorg,
+                                     repo_name) == items_qualified
+
+    # Invalid references, should stay as they are
+    items_invalid = [
+        'something#27',
+        'lowrisc/ibex#',
+    ]
+    assert github_qualify_references(items_invalid, repo_userorg,
+                                     repo_name) == items_invalid
+
+
+def test_github_parse_url():
+    assert github_parse_url('https://example.com/something/asdf.git') is None
+    assert github_parse_url('https://github.com/lowRISC/ibex.git') == (
+        'lowRISC', 'ibex')
+    assert github_parse_url('https://github.com/lowRISC/ibex') == ('lowRISC',
+                                                                   'ibex')
+    assert github_parse_url('git@github.com:lowRISC/ibex.git') == ('lowRISC',
+                                                                   'ibex')
+
+
+def github_parse_url(github_repo_url):
+    """Parse a GitHub repository URL into its parts.
+
+    Return a tuple (userorg, name), or None if the parsing failed.
+    """
+
+    regex = r"(?:@github\.com\:|\/github\.com\/)([a-zA-Z\d-]+)\/([a-zA-Z\d-]+)(?:\.git)?$"
+    m = re.search(regex, github_repo_url)
+    if m is None:
+        return None
+    return (m.group(1), m.group(2))
+
+
+def produce_shortlog(clone_dir, old_rev, new_rev):
+    """ Produce a list of changes between two revisions, one revision per line
+
+    Merges are excluded"""
+    cmd = [
+        'git', '-C',
+        str(clone_dir), 'log', '--pretty=format:%s (%aN)', '--no-merges',
+        old_rev + '..' + new_rev
+    ]
+    try:
+        proc = subprocess.run(cmd,
+                              cwd=clone_dir,
+                              check=True,
+                              stdout=subprocess.PIPE,
+                              stderr=subprocess.PIPE,
+                              encoding="UTF-8")
+        return proc.stdout.splitlines()
+    except subprocess.CalledProcessError as e:
+        log.error("Unable to capture shortlog: %s", e.stderr)
+        return ""
+
+
+def format_list_to_str(list, width=70):
+    """ Create Markdown-style formatted string from a list of strings """
+    wrapper = textwrap.TextWrapper(initial_indent="* ",
+                                   subsequent_indent="  ",
+                                   width=width)
+    return '\n'.join([wrapper.fill(s) for s in list])
+
+
+def refresh_patches(desc):
+    if not 'patch_repo' in desc:
+        log.fatal('Unable to refresh patches, patch_repo not set in config.')
+        sys.exit(1)
+
+    patch_dir_abs = path_resolve(desc['patch_dir'], desc['_base_dir'])
+    log.info('Refreshing patches in %s' % (str(patch_dir_abs), ))
+
+    # remove existing patches
+    for patch in patch_dir_abs.glob('*.patch'):
+        os.unlink(str(patch))
+
+    # get current patches
+    _export_patches(desc['patch_repo']['url'], patch_dir_abs,
+                    desc['patch_repo']['rev_base'],
+                    desc['patch_repo']['rev_patched'])
+
+
+def _export_patches(patchrepo_clone_url, target_patch_dir, upstream_rev,
+                    patched_rev):
+    clone_dir = Path(tempfile.mkdtemp())
+    try:
+        clone_git_repo(patchrepo_clone_url, clone_dir, patched_rev)
+        rev_range = 'origin/' + upstream_rev + '..' + 'origin/' + patched_rev
+        cmd = ['git', 'format-patch', '-o', str(target_patch_dir), rev_range]
+        if not verbose:
+            cmd += ['-q']
+        subprocess.run(cmd, cwd=clone_dir, check=True)
+
+    finally:
+        shutil.rmtree(str(clone_dir), ignore_errors=True)
+
+
+def import_from_upstream(upstream_path, target_path, exclude_files=[]):
+    log.info('Copying upstream sources to %s', target_path)
+    # remove existing directories before importing them again
+    shutil.rmtree(str(target_path), ignore_errors=True)
+
+    # import new contents for rtl directory
+    _cp_from_upstream(upstream_path, target_path, exclude_files)
+
+
+def apply_patch(basedir, patchfile, strip_level=1):
+    cmd = ['git', 'apply', '-p' + str(strip_level), patchfile]
+    if verbose:
+        cmd += ['--verbose']
+    subprocess.run(cmd, cwd=basedir, check=True)
+
+
+def clone_git_repo(repo_url, clone_dir, rev='master'):
+    log.info('Cloning upstream repository %s @ %s', repo_url, rev)
+
+    cmd = [
+        'git', 'clone', '--no-single-branch', '-b', rev, repo_url, clone_dir
+    ]
+    if not verbose:
+        cmd += ['-q']
+    subprocess.run(cmd, check=True)
+
+    # Get revision information
+    cmd = ['git', '-C', str(clone_dir), 'rev-parse', 'HEAD']
+    rev = subprocess.run(cmd,
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.PIPE,
+                         check=True,
+                         encoding='UTF-8').stdout.strip()
+    log.info('Cloned at revision %s', rev)
+    return rev
+
+
+def git_get_short_rev(clone_dir, rev):
+    """ Get the shortened SHA-1 hash for a revision """
+    cmd = ['git', '-C', str(clone_dir), 'rev-parse', '--short', rev]
+    short_rev = subprocess.run(cmd,
+                               stdout=subprocess.PIPE,
+                               stderr=subprocess.PIPE,
+                               check=True,
+                               encoding='UTF-8').stdout.strip()
+    return short_rev
+
+
+def git_add_commit(repo_base, paths, commit_msg):
+    """ Stage and commit all changes in paths"""
+
+    # Stage all changes
+    for p in paths:
+        cmd_add = ['git', '-C', str(repo_base), 'add', str(p)]
+        subprocess.run(cmd_add, check=True, encoding='UTF-8')
+
+    cmd_commit = ['git', '-C', str(repo_base), 'commit', '-F', '-']
+    try:
+        subprocess.run(cmd_commit,
+                       check=True,
+                       encoding='UTF-8',
+                       input=commit_msg)
+    except subprocess.CalledProcessError as e:
+        log.warning("Unable to create commit. Are there no changes?")
+
+
+def ignore_patterns(base_dir, *patterns):
+    """Similar to shutil.ignore_patterns, but with support for directory excludes."""
+    def _rel_to_base(path, name):
+        return os.path.relpath(os.path.join(path, name), base_dir)
+
+    def _ignore_patterns(path, names):
+        ignored_names = []
+        for pattern in patterns:
+            pattern_matches = [
+                n for n in names
+                if fnmatch.fnmatch(_rel_to_base(path, n), pattern)
+            ]
+            ignored_names.extend(pattern_matches)
+        return set(ignored_names)
+
+    return _ignore_patterns
+
+
+def _cp_from_upstream(src, dest, exclude=[]):
+    shutil.copytree(str(src),
+                    str(dest),
+                    ignore=ignore_patterns(str(src), *exclude))
+
+
+def main(argv):
+    parser = argparse.ArgumentParser(prog="vendor_hw", description=DESC)
+    parser.add_argument('--refresh-patches',
+                        action='store_true',
+                        help='Refresh the patches from the patch repository')
+    parser.add_argument('--commit',
+                        '-c',
+                        action='store_true',
+                        help='Commit the changes')
+    parser.add_argument('desc_file',
+                        metavar='file',
+                        type=argparse.FileType('r', encoding='UTF-8'),
+                        help='vendoring description file (*.vendor.hjson)')
+    parser.add_argument('--verbose', '-v', action='store_true', help='Verbose')
+    args = parser.parse_args()
+
+    global verbose
+    verbose = args.verbose
+    if (verbose):
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    desc_file_path = Path(args.desc_file.name).resolve()
+    vendor_file_base_dir = desc_file_path.parent
+
+    # Precondition: Ensure description file matches our naming rules
+    if not str(desc_file_path).endswith('.vendor.hjson'):
+        log.fatal("Description file names must have a .vendor.hjson suffix.")
+        raise SystemExit(1)
+
+    # Precondition: Check for a clean working directory when commit is requested
+    if args.commit:
+        if not git_is_clean_workdir(vendor_file_base_dir):
+            log.fatal("A clean git working directory is required for "
+                      "--commit/-c. git stash your changes and try again.")
+            raise SystemExit(1)
+
+    # Load description file
+    try:
+        desc = hjson.loads(args.desc_file.read(), use_decimal=True)
+    except ValueError:
+        raise SystemExit(sys.exc_info()[1])
+    desc['_base_dir'] = vendor_file_base_dir
+
+    # Load lock file contents (if possible)
+    desc_file_path_str = str(desc_file_path)
+    lock_file_path = Path(
+        desc_file_path_str[:desc_file_path_str.find('.vendor.hjson')] +
+        '.lock.hjson')
+    try:
+        with open(lock_file_path, 'r') as f:
+            lock = hjson.loads(f.read(), use_decimal=True)
+    except FileNotFoundError:
+        log.warning(
+            "Unable to read lock file %s. Assuming this is the first import.",
+            lock_file_path)
+        lock = None
+
+    if args.refresh_patches:
+        refresh_patches(desc)
+
+    clone_dir = Path(tempfile.mkdtemp())
+    try:
+        # clone upstream repository
+        upstream_new_rev = clone_git_repo(desc['upstream']['url'], clone_dir,
+                                          desc['upstream']['rev'])
+
+        # apply patches to upstream sources
+        if 'patch_dir' in desc:
+            patches = path_resolve(desc['patch_dir'],
+                                   vendor_file_base_dir).glob('*.patch')
+            for patch in sorted(patches):
+                log.info("Applying patch %s" % str(patch))
+                apply_patch(clone_dir, str(patch))
+
+        # import selected (patched) files from upstream repo
+        exclude_files = []
+        if 'exclude_from_upstream' in desc:
+            exclude_files += desc['exclude_from_upstream']
+        exclude_files += EXCLUDE_ALWAYS
+
+        import_from_upstream(
+            clone_dir, path_resolve(desc['target_dir'], vendor_file_base_dir),
+            exclude_files)
+
+        # get shortlog
+        get_shortlog = True
+        if not lock:
+            get_shortlog = False
+            log.warning(
+                "No lock file exists. Unable to get the log of changes.")
+        elif lock['upstream']['url'] != desc['upstream']['url']:
+            get_shortlog = False
+            log.warning(
+                "The repository URL changed since the last run. Unable to get log of changes."
+            )
+        elif upstream_new_rev == lock['upstream']['rev']:
+            get_shortlog = False
+            log.warning("Re-importing upstream revision %s", upstream_new_rev)
+
+        shortlog = None
+        if get_shortlog:
+            shortlog = produce_shortlog(clone_dir, lock['upstream']['rev'],
+                                        upstream_new_rev)
+
+            # Ensure fully-qualified issue/PR references for GitHub repos
+            gh_repo_info = github_parse_url(desc['upstream']['url'])
+            if gh_repo_info:
+                shortlog = github_qualify_references(shortlog, gh_repo_info[0],
+                                                     gh_repo_info[1])
+
+            log.info("Changes since the last import:\n" +
+                     format_list_to_str(shortlog))
+
+        # write lock file
+        lock = {}
+        lock['upstream'] = desc['upstream']
+        lock['upstream']['rev'] = upstream_new_rev
+        with open(lock_file_path, 'w', encoding='UTF-8') as f:
+            f.write(LOCK_FILE_HEADER)
+            hjson.dump(lock, f)
+            f.write("\n")
+            log.info("Wrote lock file %s", lock_file_path)
+
+        # Commit changes
+        if args.commit:
+            sha_short = git_get_short_rev(clone_dir, upstream_new_rev)
+
+            repo_info = github_parse_url(desc['upstream']['url'])
+            if repo_info is not None:
+                sha_short = "%s/%s@%s" % (repo_info[0], repo_info[1],
+                                          sha_short)
+
+            commit_msg_subject = 'Update %s to %s' % (desc['name'], sha_short)
+            intro = 'Update code from upstream repository %s to revision %s' % (
+                desc['upstream']['url'], upstream_new_rev)
+            commit_msg_body = textwrap.fill(intro, width=70)
+
+            if shortlog:
+                commit_msg_body += "\n\n"
+                commit_msg_body += format_list_to_str(shortlog, width=70)
+
+            commit_msg = commit_msg_subject + "\n\n" + commit_msg_body
+
+            commit_paths = []
+            commit_paths.append(
+                path_resolve(desc['target_dir'], vendor_file_base_dir))
+            if args.refresh_patches:
+                commit_paths.append(
+                    path_resolve(desc['patch_dir'], vendor_file_base_dir))
+            commit_paths.append(lock_file_path)
+
+            git_add_commit(vendor_file_base_dir, commit_paths, commit_msg)
+
+    finally:
+        shutil.rmtree(str(clone_dir), ignore_errors=True)
+
+    log.info('Import finished')
+
+
+if __name__ == '__main__':
+    try:
+        main(sys.argv)
+    except subprocess.CalledProcessError as e:
+        log.fatal("Called program '%s' returned with %d.\n"
+                  "STDOUT:\n%s\n"
+                  "STDERR:\n%s\n" %
+                  (" ".join(e.cmd), e.returncode, e.stdout, e.stderr))
+        raise
diff --git a/util/wavegen/LICENSE.wavedrom b/util/wavegen/LICENSE.wavedrom
new file mode 100644
index 0000000..cd49285
--- /dev/null
+++ b/util/wavegen/LICENSE.wavedrom
@@ -0,0 +1,24 @@
+This software is based on WaveDrom javascript. In particular the
+svg defs section and Bricks generation are substantial copies.
+
+The MIT License (MIT)
+
+Copyright (c) 2011-2018 Aliaksei Chapyzhenka
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/util/wavegen/README.md b/util/wavegen/README.md
new file mode 100644
index 0000000..dca02f1
--- /dev/null
+++ b/util/wavegen/README.md
@@ -0,0 +1,215 @@
+# Wavegen -- Waveform generator in Python
+
+Wavegen is a python3 tool to read waveform descriptions in hjson and
+generate svg pictures of the waveform.
+
+The source is a json representation of the waveform using the wavejson
+format defined for Wavedrom. This is sort of described at
+https://github.com/drom/wavedrom/wiki/WaveJSON
+
+Note that the same wavejson could be embedded in a webpage and
+wavedrom javascript used to render it directly in the browser. An
+online editor for wavejson can be found at https://wavedrom.com/
+
+The example commands assume $REPO_TOP is set to the toplevel directory
+of the repo.
+
+### Setup
+
+If packages have not previously been installed you will need to set a
+few things up. First use `pip3` to install some required packages:
+```
+$ pip3 install --user hjson
+```
+
+### Examples using standalone wavetool
+
+Normally for documentation the docgen tool will automatically use
+wavegen when it encounters a code block labeled for wavejson. See the
+examples in the docgen module.
+
+The wavetool provides a standalone way to run wavegen.
+
+The basic test mode can be invoked with -T. In this case a builtin
+wavejson string is used as the source and is instantiated twice (it
+should look the same but the second svg will not include the defines
+that can be accessed by id from the first).
+
+```
+$ cd $REPO_TOP/util
+$ ./wavetool.py -T > /tmp/out.html
+```
+
+The examples directory contains the example wavejson from the
+[Tutorial](https://wavedrom.com/tutorial.html). These can be formed in
+to a webpage to allow comparison with the tutorial output.
+
+```
+$ cd $REPO_TOP/util
+$ ./wavetool.py -v wavegen/examples/* > /tmp/out.html
+```
+
+## WaveJSON format and extension
+
+The tool includes an extension to the wavejson format. In addition to
+data: being used to label fields that are of type '=2345' the
+alternative cdata can be used to label all active data cycles
+'01=2345ud' (for example this allows the start and stop bits to be
+labeled for the uart in the -T test).
+
+
+### The signal object
+
+The main object in the WaveJSON is the 'signal'. This consists of a
+list of signal values or groups.
+
+```
+{ "signal" : [
+    value1,
+    value2,
+    ...
+]}
+```
+The value is either:
+
+* A group. This is a list with the group name as a string as the first
+  element, and values for the other elements. `[ "Group name", {
+  value1 }, { value2 }]` Groups may be nested, the code allows for
+  three deep nesting without the name area being made larger.
+
+* A gap. This element with no waveform generates a gap. `{}` There may
+  be a `node:` element in a gap to add markers and allow labeling of
+  cycles (see examples).
+
+* A real signal! This is a comma separated list of key/value pairs:
+  * `name:` the name that the signal will be labeled with
+  * `wave:` a string describing the waveform using characters listed below.
+  * `data:` an array of comma separated labels (or a string of space
+    separated lables). The labels will be added to any of the value
+    types (see below) in the waveform, four or five characters is the
+    maximum that will fit for hscale=1, period=1.
+  * `cdata:` **Extension to wavedrom** an array of comma separated
+    labels (or a string of space separated lables). The labels will be
+    added to any of the cycle types (see below) in the waveform, four
+    or five characters is the maximum that will fit for hscale=1,
+    period=1.
+  * `period:` A positive integer that specifies the period for this signal
+  * `phase:` A half-integer (integer or integer.5) specifing the phase
+    shift of the signal. Positive values cause the start of the wave
+    field to be discarded. Typically 0.5 to shift things back a
+    half-cycle which causes the first half of the first character in
+    wave to be skipped in the drawing.
+  * `node:` A string specifying where timing markers should be
+    placed. The string mirrors the wave string. If the node string
+    contains a `.` then no marker is inserted. If it contains a
+    lower-case letter then a marker with that name will be inserted
+    and the letter shown on the waveform. If it contains an upper-case
+    letter then a marker will be inserted but no letter shown in the
+    waveform. The markers are used to add arrows in the edge
+    directive.
+
+Other than signal the top-level keys are:
+
+* `config:` A list of key/value pairs the only one used is hscale
+  which is an integer that sets the horizontal scale (1= normal,
+  2=twice width etc). `config: { hscale: 2 }` **Note: skins in the
+  config are not currently implemented.**
+
+* `head:` Details of the header. This contains:
+  * `text:` the string to put in the header. May also be a list with
+    first element 'tspan' to add attributes/formatiing. The tspan list
+    can be a three element list where the second item in `{` to `}` is
+    a comma separated list of key:value pairs that will become
+    `key="value"` arguments of the svg tspan and the third item is
+    another string/tspan. Or the tspan list is a list of string/tspan.
+  * `tick:` integer, optional. Add cycle labels and lines on rising
+    egdes. The first edge is numbered with the integer given and
+    subsequent edges with incrementing integers.
+  * `tock:` integer, optional. Add cycle labels and lines on falling
+    egdes. The first edge is numbered with the integer given and
+    subsequent edges with incrementing integers.
+
+* `tail:` As head but text and labels go below
+
+* `edge:` An array of strings containing details of edge arrows to be
+  drawn on the waveforms.
+
+
+### wave characters
+
+Characters in the wave= element describe the waveform. There are more
+in the wavedrom tutorial than in the WaveJSON description. Listing the
+supported ones here.
+
+These generate a sharp square wave edge into the new state.
+
+- `p` - (usually first in string) creates a cycle of a positive edged clock
+- `n` - (usually first in string) creates a cycle of a negative edged clock
+- `P` - same as p but with arrow
+- `N` - same as n but with arrow
+- `l` - low level
+- `h` - high level
+- `L` - low level with an arrow
+- `H` - high level with an arrow
+
+These generate a soft edge into the state. The data: list can be used
+to labal any of the ones marked "value". Extending WaveJSON the cdata:
+list can be used to label any of these. Note that a label will be
+centered on the cycle and subsequent '.' extensions.
+
+- `0` - low level
+- `1` - high level
+- `=` - value (default color 2)
+- `2` - value with color 2
+- `3` - value with color 3
+- `4` - value with color 4
+- `5` - value with color 5
+- `x` - undefined value (crosshatch)
+- `z` - high-impedance state
+- `u` - pull-up (weak 1)
+- `d` - pull-down (weak 0)
+
+These generate extensions of the previous cycle:
+- `.` - extends previous cycle
+- `|` - extends previous cycle and draw discontinuity gap on top of it
+
+### edge arrows
+
+Each element in the `edge:` array is a single string that describes
+an arrow or line and the label that is associated with the arrow/line.
+
+```
+{ ...
+  "edge"   : ["a->b edge 1", "b-~>c My Second Edge"]
+}
+```
+
+The string may contain multiple words separated by spaces. The first
+word defines the from and to points and the arrow shape. The rest of
+the text (after first space gap) will be placed as the label on the
+line. The first and last characters in the arrow text select the
+markers (defined in a node string) used as the endpoints for a line
+(if these are upper case letters then no text is shown for the marker
+but the line will be drawn to the corresponding point). The central
+characters define the line shape. If the first shape character is a
+`<` an arrowhead is inserted pointing to the start marker. If the last
+character in the shape is a `>` an arrowhead is inserted pointing to
+the end marker. (Note the original wavedrom may not support only
+having an arrow to the start marker.) The remaining characters define
+the arrow to be used:
+
+* `-` a straight line between the markers, label centered on line
+* `~ ` a curved line between the markers, label centered on line
+* `-~` a curved line between the markers biased to end, label closer
+  to end marker
+* `~-` a curved line between the markers biased to start, label closer
+  to start marker
+* `-|` a straight line horizontally from the start then vertically to
+  end, label closer to the end
+* `|-` a straight line vertically from the start then horizontally to
+  end, label closer to the start
+* `-|-` a straight line horizontally from the start to the horizontal
+  mid-point, then vertically, then horizontal to the end, label
+  centered
+
+See examples for how the different sorts of arrows actually look.
diff --git a/util/wavegen/__init__.py b/util/wavegen/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/util/wavegen/__init__.py
diff --git a/util/wavegen/examples/step2 b/util/wavegen/examples/step2
new file mode 100644
index 0000000..ae67c7e
--- /dev/null
+++ b/util/wavegen/examples/step2
@@ -0,0 +1,12 @@
+{ signal: [
+  { name: "pclk", wave: 'p.......' },
+  { name: "Pclk", wave: 'P.......' },
+  { name: "nclk", wave: 'n.......' },
+  { name: "Nclk", wave: 'N.......' },
+  {},
+  { name: 'clk0', wave: 'phnlPHNL' },
+  { name: 'clk1', wave: 'xhlhLHl.' },
+  { name: 'clk2', wave: 'hpHplnLn' },
+  { name: 'clk3', wave: 'nhNhplPl' },
+  { name: 'clk4', wave: 'xlh.L.Hx' },
+]}
diff --git a/util/wavegen/examples/step3 b/util/wavegen/examples/step3
new file mode 100644
index 0000000..ac5b4ed
--- /dev/null
+++ b/util/wavegen/examples/step3
@@ -0,0 +1,5 @@
+{ signal: [
+  { name: "clk",  wave: "P......" },
+  { name: "bus",  wave: "x.==.=x", data: ["head", "body", "tail", "data"] },
+  { name: "wire", wave: "0.1..0." }
+]}
diff --git a/util/wavegen/examples/step4 b/util/wavegen/examples/step4
new file mode 100644
index 0000000..8bac595
--- /dev/null
+++ b/util/wavegen/examples/step4
@@ -0,0 +1,7 @@
+{ signal: [
+  { name: "clk",         wave: "p.....|..." },
+  { name: "Data",        wave: "x.345x|=.x", data: ["head", "body", "tail", "data"] },
+  { name: "Request",     wave: "0.1..0|1.0" },
+  {},
+  { name: "Acknowledge", wave: "1.....|01." }
+]}
diff --git a/util/wavegen/examples/step5 b/util/wavegen/examples/step5
new file mode 100644
index 0000000..8ea7d82
--- /dev/null
+++ b/util/wavegen/examples/step5
@@ -0,0 +1,18 @@
+{ signal: [
+  {    name: 'clk',   wave: 'p..Pp..P'},
+  ['Master',
+    ['ctrl',
+      {name: 'write', wave: '01.0....'},
+      {name: 'read',  wave: '0...1..0'}
+    ],
+    {  name: 'addr',  wave: 'x3.x4..x', data: 'A1 A2'},
+    {  name: 'wdata', wave: 'x3.x....', data: 'D1'   },
+  ],
+  {},
+  ['Slave',
+    ['ctrl',
+      {name: 'ack',   wave: 'x01x0.1x'},
+    ],
+    {  name: 'rdata', wave: 'x.....4x', data: 'Q2'},
+  ]
+]}
diff --git a/util/wavegen/examples/step6 b/util/wavegen/examples/step6
new file mode 100644
index 0000000..c2e8b5e
--- /dev/null
+++ b/util/wavegen/examples/step6
@@ -0,0 +1,7 @@
+{ signal: [
+  { name: "CK",   wave: "P.......",                                              period: 2  },
+  { name: "CMD",  wave: "x.3x=x4x=x=x=x=x", data: "RAS NOP CAS NOP NOP NOP NOP", phase: 0.5 },
+  { name: "ADDR", wave: "x.=x..=x........", data: "ROW COL",                     phase: 0.5 },
+  { name: "DQS",  wave: "z.......0.1010z." },
+  { name: "DQ",   wave: "z.........5555z.", data: "D0 D1 D2 D3" }
+]}
diff --git a/util/wavegen/examples/step7 b/util/wavegen/examples/step7
new file mode 100644
index 0000000..0b124b6
--- /dev/null
+++ b/util/wavegen/examples/step7
@@ -0,0 +1,7 @@
+{ signal: [
+  { name: "clk",     wave: "p...." },
+  { name: "Data",    wave: "x345x",  data: ["head", "body", "tail"] },
+  { name: "Request", wave: "01..0" }
+  ],
+  config: { hscale: 1 }
+}
diff --git a/util/wavegen/examples/step7a b/util/wavegen/examples/step7a
new file mode 100644
index 0000000..6cf7de9
--- /dev/null
+++ b/util/wavegen/examples/step7a
@@ -0,0 +1,7 @@
+{ signal: [
+  { name: "clk",     wave: "p...." },
+  { name: "Data",    wave: "x345x",  data: ["head", "body", "tail"] },
+  { name: "Request", wave: "01..0" }
+  ],
+  config: { hscale: 2 }
+}
diff --git a/util/wavegen/examples/step7b b/util/wavegen/examples/step7b
new file mode 100644
index 0000000..3404cce
--- /dev/null
+++ b/util/wavegen/examples/step7b
@@ -0,0 +1,7 @@
+{ signal: [
+  { name: "clk",     wave: "p...." },
+  { name: "Data",    wave: "x345x",  data: ["head", "body", "tail"] },
+  { name: "Request", wave: "01..0" }
+  ],
+  config: { hscale: 3 }
+}
diff --git a/util/wavegen/examples/step7c b/util/wavegen/examples/step7c
new file mode 100644
index 0000000..11e3a18
--- /dev/null
+++ b/util/wavegen/examples/step7c
@@ -0,0 +1,14 @@
+{signal: [
+  {name:'clk',         wave: 'p....' },
+  {name:'Data',        wave: 'x345x', data: 'a b c' },
+  {name:'Request',     wave: '01..0' }
+],
+ head:{
+   text:'WaveDrom example',
+   tick:0,
+ },
+ foot:{
+   text:'Figure 100',
+   tock:9
+ },
+}
diff --git a/util/wavegen/examples/step7d b/util/wavegen/examples/step7d
new file mode 100644
index 0000000..6816eaa
--- /dev/null
+++ b/util/wavegen/examples/step7d
@@ -0,0 +1,29 @@
+{signal: [
+  {name:'clk', wave: 'p.....PPPPp....' },
+  {name:'dat', wave: 'x....2345x.....', data: 'a b c d' },
+  {name:'req', wave: '0....1...0.....' }
+],
+head: {text:
+  ['tspan',
+    ['tspan', {class:'error h1'}, 'error '],
+    ['tspan', {class:'warning h2'}, 'warning '],
+    ['tspan', {class:'info h3'}, 'info '],
+    ['tspan', {class:'success h4'}, 'success '],
+    ['tspan', {class:'muted h5'}, 'muted '],
+    ['tspan', {class:'h6'}, 'h6 '],
+    'default ',
+    ['tspan', {fill:'pink', 'font-weight':'bold', 'font-style':'italic'}, 'pink-bold-italic']
+  ]
+},
+foot: {text:
+  ['tspan', 'E=mc',
+    ['tspan', {dy:'-5'}, '2'],
+    ['tspan', {dy: '5'}, '. '],
+    ['tspan', {'font-size':'25'}, 'B '],
+    ['tspan', {'text-decoration':'overline'},'over '],
+    ['tspan', {'text-decoration':'underline'},'under '],
+    ['tspan', {'baseline-shift':'sub'}, 'sub '],
+    ['tspan', {'baseline-shift':'super'}, 'super ']
+  ],tock:-5
+}
+}
diff --git a/util/wavegen/examples/step8 b/util/wavegen/examples/step8
new file mode 100644
index 0000000..176f8ca
--- /dev/null
+++ b/util/wavegen/examples/step8
@@ -0,0 +1,12 @@
+{ signal: [
+  { name: 'A', wave: '01........0....',  node: '.a........j' },
+  { name: 'B', wave: '0.1.......0.1..',  node: '..b.......i' },
+  { name: 'C', wave: '0..1....0...1..',  node: '...c....h..' },
+  { name: 'D', wave: '0...1..0.....1.',  node: '....d..g...' },
+  { name: 'E', wave: '0....10.......1',  node: '.....ef....' }
+  ],
+  edge: [
+    'a~b t1', 'c-~a t2', 'c-~>d time 3', 'd~-e',
+    'e~>f', 'f->g', 'g-~>h', 'h~>i some text', 'h~->j'
+  ]
+}
diff --git a/util/wavegen/examples/step8-Bforb b/util/wavegen/examples/step8-Bforb
new file mode 100644
index 0000000..a7ffcc8
--- /dev/null
+++ b/util/wavegen/examples/step8-Bforb
@@ -0,0 +1,12 @@
+{ signal: [
+  { name: 'A', wave: '01........0....',  node: '.a........j' },
+  { name: 'B', wave: '0.1.......0.1..',  node: '..B.......i' },
+  { name: 'C', wave: '0..1....0...1..',  node: '...c....h..' },
+  { name: 'D', wave: '0...1..0.....1.',  node: '....d..g...' },
+  { name: 'E', wave: '0....10.......1',  node: '.....ef....' }
+  ],
+  edge: [
+    'a~B t1', 'c-~a t2', 'c-~>d time 3', 'd~-e',
+    'e~>f', 'f->g', 'g-~>h', 'h~>i some text', 'h~->j'
+  ]
+}
diff --git a/util/wavegen/examples/step8a b/util/wavegen/examples/step8a
new file mode 100644
index 0000000..2d0972b
--- /dev/null
+++ b/util/wavegen/examples/step8a
@@ -0,0 +1,11 @@
+{ signal: [
+  { name: 'A', wave: '01..0..',  node: '.a..e..' },
+  { name: 'B', wave: '0.1..0.',  node: '..b..d.', phase:0.5 },
+  { name: 'C', wave: '0..1..0',  node: '...c..f' },
+  {                              node: '...g..h' }
+  ],
+  edge: [
+    'b-|a t1', 'a-|c t2', 'b-|-c t3', 'c-|->e t4', 'e-|>f more text',
+    'e|->d t6', 'c-g', 'f-h', 'g<->h 3 ms'
+  ]
+}
diff --git a/util/wavegen/examples/tickbug b/util/wavegen/examples/tickbug
new file mode 100644
index 0000000..7de3a68
--- /dev/null
+++ b/util/wavegen/examples/tickbug
@@ -0,0 +1,14 @@
+{ signal: [
+  { name: 'CSB',  wave: '10.........|....1.'},
+  { name: 'SCK',  wave: '0.p........|....l.'},
+  { name: 'MOSI', wave: 'z.=..=.=.=.=.=.=.=.=.=|=.=.=.=.z....',
+    data:['R07','R06','R05','R04','R03','R02','R01','R00','R17',
+          'R74','R73','R72','R71','R70'], period:0.5, },
+  { name: 'MISO', wave: 'z...=.=.=.=.=.=.=.=.=.=|=.=.=.=.z...',
+    data:['T07','T06','T05','T04','T03','T02','T01','T00','T17',
+          'T74','T73','T72','T71','T70'], period:0.5}],
+  head:{
+    text: 'Data Transfer',
+    tick: ['-2 -1 0 1 2 3 4 5 6 7 8 9 60 61 62 63     ']
+  }
+}
diff --git a/util/wavegen/examples/tickbug2 b/util/wavegen/examples/tickbug2
new file mode 100644
index 0000000..9240927
--- /dev/null
+++ b/util/wavegen/examples/tickbug2
@@ -0,0 +1,27 @@
+{ signal: [
+  { name: 'CSB',  wave: '10.........|....1.'},
+  { name: 'SCK',  wave: '0.p........|....l.'},
+  { name: 'MOSI', wave: 'z.=..=.=.=.=.=.=.=.=.=|=.=.=.=.z....',
+    data:['R07','R06','R05','R04','R03','R02','R01','R00','R17',
+          'R74','R73','R72','R71','R70'], period:0.5, },
+  { name: 'MISO', wave: 'z.=========|====z..',
+    data:['T07','T06','T05','T04','T03','T02','T01','T00','T17',
+          'T73','T72','T71','T70'] },
+  { name: 'MIS0.5', wave: 'z...=.=.=.=.=.=.=.=.=.=|=.=.=.=.z..',
+    data:['T07','T06','T05','T04','T03','T02','T01','T00','T17',
+          '','T73','T72','T71','T70'], period:0.5 },
+  { name: 'MIS1', wave: 'z...=.=.=.=.=.=.=.=.=.=|=.=.=.=.z...',
+    data:['T07','T06','T05','T04','T03','T02','T01','T00','T17',
+          'T74','T73','T72','T71','T70'] },
+  { name: 'MIS1.5', wave: 'z...=.=.=.=.=.=.=.=.=.=|=.=.=.=.z...',
+    data:['T07','T06','T05','T04','T03','T02','T01','T00','T17',
+          'T74','T73','T72','T71','T70'], period:1.5 }],
+  head:{
+    text: 'Data Transfer',
+    tick: ['-2', '-1', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '60', '61', '62', '63']
+  }
+  foot:{
+    text: "text here"
+    tock:  '-2 -1 0 1 2 3 4 5 6 7 8 9 60 61 62 63     '
+  }
+}
diff --git a/util/wavegen/wavesvg.py b/util/wavegen/wavesvg.py
new file mode 100644
index 0000000..12eadf2
--- /dev/null
+++ b/util/wavegen/wavesvg.py
@@ -0,0 +1,761 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# portions adapted from the javascript wavedrom.js
+# https://github.com/drom/wavedrom/blob/master/wavedrom.js
+# see LICENSE.wavedrom
+
+import io
+import logging as log
+
+from wavegen import wavesvg_data
+
+# Generate brick: follows wavedrom.js
+
+
+def gen_brick(texts, extra, times):
+    res = []
+
+    # length of four indicates 2 phases each with 2 bricks
+    if (len(texts) == 4):
+        if extra != int(extra):
+            log.error("Clock must have an integer period")
+        for j in range(times):
+            res.append(texts[0])
+            for i in range(int(extra)):
+                res.append(texts[1])
+            res.append(texts[2])
+            for i in range(int(extra)):
+                res.append(texts[3])
+        return res
+
+    if (len(texts) == 1):
+        t1 = texts[0]
+    else:
+        t1 = texts[1]
+
+    res.append(texts[0])
+    for i in range(times * int(2 * (extra + 1)) - 1):
+        res.append(t1)
+    return res
+
+
+# Generate first brick in a line: follows wavedrom.js
+def gen_first_wavebrick(text, extra, times):
+
+    gentext = {
+        'p': ['pclk', '111', 'nclk', '000'],
+        'n': ['nclk', '000', 'pclk', '111'],
+        'P': ['Pclk', '111', 'nclk', '000'],
+        'N': ['Nclk', '000', 'pclk', '111'],
+        'l': ['000'],
+        'L': ['000'],
+        '0': ['000'],
+        'h': ['111'],
+        'H': ['111'],
+        '1': ['111'],
+        '=': ['vvv-2'],
+        '2': ['vvv-2'],
+        '3': ['vvv-3'],
+        '4': ['vvv-4'],
+        '5': ['vvv-5'],
+        'd': ['ddd'],
+        'u': ['uuu'],
+        'z': ['zzz']
+    }
+    return gen_brick(gentext.get(text, ['xxx']), extra, times)
+
+
+# Generate subsequent bricks: text contains before and after states
+# Follows wavedrom
+def gen_wavebrick(text, extra, times):
+
+    # new states that have a hard edge going in to them
+    new_hardedges = {
+        'p': 'pclk',
+        'n': 'nclk',
+        'P': 'Pclk',
+        'N': 'Nclk',
+        'h': 'pclk',
+        'l': 'nclk',
+        'H': 'Pclk',
+        'L': 'Nclk'
+    }
+
+    # new state with a soft edge
+    new_softedges = {
+        '0': '0',
+        '1': '1',
+        'x': 'x',
+        'd': 'd',
+        'u': 'u',
+        'z': 'z',
+        '=': 'v',
+        '2': 'v',
+        '3': 'v',
+        '4': 'v',
+        '5': 'v'
+    }
+
+    # state we are coming from
+    old_edges = {
+        'p': '0',
+        'n': '1',
+        'P': '0',
+        'N': '1',
+        'h': '1',
+        'l': '0',
+        'H': '1',
+        'L': '0',
+        '0': '0',
+        '1': '1',
+        'x': 'x',
+        'd': 'd',
+        'u': 'u',
+        'z': 'z',
+        '=': 'v',
+        '2': 'v',
+        '3': 'v',
+        '4': 'v',
+        '5': 'v'
+    }
+
+    # tags (basically the colour) -- js had two arrays for this
+    tags = {
+        'p': '',
+        'n': '',
+        'P': '',
+        'N': '',
+        'h': '',
+        'l': '',
+        'H': '',
+        'L': '',
+        '0': '',
+        '1': '',
+        'x': '',
+        'd': '',
+        'u': '',
+        'z': '',
+        '=': '-2',
+        '2': '-2',
+        '3': '-3',
+        '4': '-4',
+        '5': '-5'
+    }
+
+    # drawing for the second half of the new state
+    new_secondbricks = {
+        'p': '111',
+        'n': '000',
+        'P': '111',
+        'N': '000',
+        'h': '111',
+        'l': '000',
+        'H': '111',
+        'L': '000',
+        '0': '000',
+        '1': '111',
+        'x': 'xxx',
+        'd': 'ddd',
+        'u': 'uuu',
+        'z': 'zzz',
+        '=': 'vvv-2',
+        '2': 'vvv-2',
+        '3': 'vvv-3',
+        '4': 'vvv-4',
+        '5': 'vvv-5'
+    }
+
+    phase2_firstbricks = {'p': 'nclk', 'n': 'pclk', 'P': 'nclk', 'N': 'pclk'}
+
+    phase2_secondbricks = {'p': '000', 'n': '111', 'P': '000', 'N': '111'}
+
+    xclude = {
+        'hp': '111',
+        'Hp': '111',
+        'ln': '000',
+        'Ln': '000',
+        'nh': '111',
+        'Nh': '111',
+        'pl': '000',
+        'Pl': '000'
+    }
+
+    secondbrick = new_secondbricks.get(text[1])
+    hardbrick = new_hardedges.get(text[1])
+    if hardbrick == None:
+        # a soft edge gets the brick type constructed from the
+        # old state, m, new state. Old and new states may have
+        # tags which basically represent the colour
+        newch = new_softedges.get(text[1])
+        oldch = old_edges.get(text[0])
+        if newch == None or oldch == None:
+            # unknown: can't find the characters to make an edge
+            return gen_brick(['xxx'], extra, times)
+        else:
+            # soft curves
+            return gen_brick([
+                oldch + 'm' + newch + tags[text[0]] + tags[text[1]],
+                secondbrick
+            ], extra, times)
+    else:
+        specialcase = xclude.get(text)
+        if specialcase != None:
+            hardbrick = specialcase
+
+        # sharp curves
+        twophase = phase2_firstbricks.get(text[1])
+        if twophase == None:
+            # hlHL
+            return gen_brick([hardbrick, secondbrick], extra, times)
+        else:
+            # pnPN
+            return gen_brick([
+                hardbrick, secondbrick, twophase,
+                phase2_secondbricks.get(text[1])
+            ], extra, times)
+
+
+# text is the wave member of the signal object
+# extra = hscale-1 ( padding )
+
+
+def parse_wavelane(text, extra):
+    res = []
+    pos = 1
+    tlen = len(text)
+    subCycle = False
+    if tlen == 0:
+        return res;
+    next = text[0]
+
+    repeats = 1
+    while pos < tlen and (text[pos] == '.' or text[pos] == '|'):
+        pos += 1
+        repeats += 1
+
+    res = gen_first_wavebrick(next, extra, repeats)
+
+    while pos < tlen:
+        top = next
+        next = text[pos]
+        pos += 1
+        if next == '<':  # sub-cycles on
+            subCycle = True
+            next = text[pos]
+            pos += 1
+
+        if next == '>':  # sub-cycles off
+            subCycle = False
+            next = text[pos]
+            pos += 1
+
+        repeats = 1
+        while pos < tlen and (text[pos] == '.' or text[pos] == '|'):
+            pos += 1
+            repeats += 1
+        if subCycle:
+            res.extend(gen_wavebrick(top + next, 0, repeats - lane.period))
+        else:
+            res.extend(gen_wavebrick(top + next, extra, repeats))
+
+    # res is array of half brick types, each is item is string
+    return res
+
+
+def render_svghead(out, width, height, bricksused, svgn):
+    out.write('  <svg id="svgcontent_' + str(svgn) + '"\n')
+    out.write(wavesvg_data.head1)
+    out.write('height="' + str(height) + '" width="' + str(width) + '"\n')
+    out.write('viewBox="0 0 ' + str(width) + ' ' + str(height) + '">\n')
+    out.write(wavesvg_data.head2)
+    if len(bricksused) > 0:
+        out.write(wavesvg_data.defs_head)
+        for br in bricksused:
+            out.write(wavesvg_data.use_defs[br])
+        out.write(wavesvg_data.defs_tail)
+
+
+def render_svgtail(out):
+    out.write(wavesvg_data.tail)
+
+
+def render_lanes_head(out, xoffset, yoffset, svgn):
+    out.write('    <g id="lanes_' + str(svgn) + '" transform="translate(' +
+              str(xoffset) + ', ' + str(yoffset) + ')">\n')
+
+
+def render_events(out, svgn, events, edges):
+    out.write('      <g id="wavearcs_' + str(svgn) + '">\n')
+    for edge in edges:
+        sp_edge = edge.split(None, 1)
+        log.info("Edge " + str(edge) + " splits " + str(sp_edge))
+        ev_from = events.get(sp_edge[0][0])
+        ev_to = events.get(sp_edge[0][-1])
+        shape = sp_edge[0][1:-1]
+        if (len(sp_edge) > 1):
+            label = sp_edge[1]
+        else:
+            label = ''
+
+        if ev_from == None or ev_to == None or len(shape) < 1:
+            log.warn("Could not find events for " + sp_edge[0])
+            continue
+
+        dx = ev_to[0] - ev_from[0]
+        dy = ev_to[1] - ev_from[1]
+        # lx,ly is the center of the label, it may be adjusted
+        lx = (ev_to[0] + ev_from[0]) // 2
+        ly = (ev_to[1] + ev_from[1]) // 2
+
+        if shape[0] == '<' and shape[-1] == '>':
+            path_s = 'marker-end:url(#arrowhead);' \
+                     'marker-start:url(#arrowtail);stroke:#0041c4;' \
+                     'stroke-width:1;fill:none'
+            shape = shape[1:-1]
+        elif shape[-1] == '>':
+            path_s = 'marker-end:url(#arrowhead);stroke:#0041c4;' \
+                     'stroke-width:1;fill:none'
+            shape = shape[:-1]
+        elif shape[0] == '<':
+            path_s = 'marker-start:url(#arrowtail);stroke:#0041c4;' \
+                     'stroke-width:1;fill:none'
+            shape = shape[1:]
+        else:
+            path_s = 'fill:none;stroke:#00F;stroke-width:1'
+
+        # SVG uses the case to indicate abs or relative
+        path_type = 'M'
+        # always start at the from point
+        path_d = ' ' + str(ev_from)[1:-1]
+
+        if shape == '~':
+            path_d += (' c ' + str(0.7 * dx) + ', 0 ' + str(0.3 * dx) + ', ' +
+                       str(dy) + ' ' + str(dx) + ', ' + str(dy))
+        elif shape == '-~':
+            path_d += (' c ' + str(0.7 * dx) + ', 0 ' + str(dx) + ', ' +
+                       str(dy) + ' ' + str(dx) + ', ' + str(dy))
+            lx = ev_from[0] + dx * 0.75
+        elif shape == '~-':
+            path_d += (' c 0, 0 ' + str(0.3 * dx) + ', ' + str(dy) + ' ' +
+                       str(dx) + ', ' + str(dy))
+            lx = ev_from[0] + dx * 0.25
+        elif shape == '-|':
+            path_d += ' ' + str(dx) + ',0 0,' + str(dy)
+            path_type = 'm'
+            lx = ev_to[0]
+        elif shape == '|-':
+            path_d += ' 0,' + str(dy) + ' ' + str(dx) + ',0'
+            path_type = 'm'
+            lx = ev_from[0]
+        elif shape == '-|-':
+            path_d += (' ' + str(dx / 2) + ',0 0,' + str(dy) + ' ' + str(
+                dx / 2) + ',0')
+            path_type = 'm'
+        else:  # catch - here (and anything else)
+            path_d += ' ' + str(ev_to)[1:-1]
+
+        out.write('      <path id="gmark_' + sp_edge[0][0] + '_' +
+                  sp_edge[0][-1] + '" d="' + path_type + path_d + '" style="' +
+                  path_s + '"></path>\n')
+        if len(label) != 0:
+            out.write('        <rect height="9" '
+                      'style="fill: rgb(255, 255, 255);" '
+                      'width="' + str(5 * len(label)) + '" '
+                      'x="' + str(lx - 2.5 * len(label)) + '" '
+                      'y="' + str(ly - 8) + '"></rect>\n')
+            out.write('      <text style="font-size: 10px;" '
+                      'text-anchor="middle" xml:space="preserve" '
+                      'x="' + str(lx) + '" y="' + str(ly) + '">'
+                      '<tspan>' + label + '</tspan></text>\n')
+
+    # Do events last so they are on top
+    for e in events:
+        log.info("Event " + e)
+        if e.islower():
+            (evx, evy) = events[e]
+            # rectangles are taller than in js because it looks better to me
+            out.write('        <rect y="' + str(evy - 8) + '" height="12" '
+                      'x="' + str(evx - 3) + '" width="6" '
+                      'style="fill: rgb(255, 255, 255);"></rect>\n')
+            out.write('        <text style="font-size: 8px;" '
+                      'x="' + str(evx) + '" y="' + str(evy) + '" '
+                      'text-anchor="middle">' + e + '</text>\n')
+    out.write('      </g>\n')
+
+
+def render_wavelanes(out, xscale, yscale, lanes, svgn, events):
+    lnum = 0
+    x_edgeoff = 6
+    for lane in lanes:
+        phase = lane[5]
+
+        out.write('      <g id="wavelane_' + str(svgn) + '_' + str(lnum) + '"'
+                  ' transform="translate(0,' + str(lnum * yscale + 5) +
+                  ')">\n')
+        if len(lane[0]) != 0:
+            out.write('        <text x="-15" y="15" class="info" '
+                      'text-anchor="end" xml:space="preserve">'
+                      '<tspan>' + lane[0] + '</tspan></text>\n')
+        out.write('        <g id="wavelane_draw_' + str(svgn) + '_' +
+                  str(lnum) + '" '
+                  'transform="translate(0,0)">\n')
+        if phase < 0:
+            bnum = abs(phase)
+            lstart = 0
+        else:
+            bnum = 0
+            lstart = phase
+
+        for x in lane[1][lstart:]:
+            out.write('          <use xlink:href="#' + x + '" '
+                      'transform="translate(' + str(bnum * xscale) +
+                      ')"></use>\n')
+            bnum += 1
+        dpos = 0
+        dend = len(lane[3])
+        period = lane[4]
+        if phase < 0:
+            i = 0
+        else:
+            # start point ensures bnum below is never less than -1
+            i = phase // 2
+
+        if dend > 0 and lane[3][-1] == '!cdata!':
+            lane[3].pop()
+            dend -= 1;
+            labelif = '01=2345ud'
+        else:
+            labelif = '=2345'
+
+        scan_max = max(len(lane[2]), len(lane[6]))
+        while (i < scan_max):
+            bnum = i * 2 - phase
+
+            if (i < len(lane[2])):
+                x = lane[2][i]
+                if dpos < dend and x in labelif:
+                    nslot = 1
+                    while (i + nslot) < len(
+                            lane[2]) and lane[2][i + nslot] == '.':
+                        nslot += 1
+                    xcenter = period * xscale * nslot + bnum * period * xscale
+                    # the center needs to be offset by the width of the
+                    # edge because that lives in the first brick
+                    xcenter += x_edgeoff
+                    out.write('        <text x="' + str(xcenter))
+                    out.write('" y="' + str(yscale / 2) + '" '
+                              'text-anchor="middle" xml:space="preserve">')
+                    tspan_or_text(out, lane[3][dpos], True)
+                    out.write('</text>')
+                    dpos = dpos + 1
+                if x == '|':
+                    # render a gap (this diverges from how the js does it
+                    # where it is in a different g
+                    out.write('        <use xlink:href="#gap" transform="'
+                              'translate(' +
+                              str(period * (bnum * xscale + xscale)) + ')"'
+                              '></use>\n')
+
+            if i < len(lane[6]):
+                ev = lane[6][i]
+                if ev != '.':
+                    events[ev] = (period * bnum * xscale + x_edgeoff,
+                                  lnum * yscale + 2 + yscale // 2)
+            i += 1
+
+        out.write('        </g>\n      </g>\n')
+        lnum += 1
+
+
+def render_marks(out, nbricks, xscale, ylen, svgn):
+    out.write('      <g id="gmarks_' + str(svgn) + '">\n')
+    mnum = 0
+    for i in range(nbricks // 2):
+        out.write('        <path id="gmark_' + str(svgn) + '_' + str(mnum) +
+                  '" '
+                  'd="m ' + str(i * 2 * xscale) + ',0 0,' + str(ylen) + '" '
+                  'style="stroke: rgb(136, 136, 136); '
+                  'stroke-width:0.5; stroke-dasharray:1,3"'
+                  '></path>\n')
+        mnum += 1
+    out.write('      </g>\n')
+
+
+def tspan_or_text(out, text, outerspan):
+    if isinstance(text, str):
+        if outerspan: out.write('<tspan>')
+        out.write(text)
+        if (outerspan): out.write('</tspan>')
+    else:
+        if text[0] != 'tspan':
+            log.warn('Expecting tspan, got ' + str(text[0]))
+            return
+        if len(text) == 3 and isinstance(text[1], dict):
+            out.write('<tspan ')
+            for x in text[1]:
+                out.write(x + '="' + text[1][x] + '" ')
+            out.write('>')
+            tspan_or_text(out, text[2], False)
+        else:
+            out.write('<tspan>')
+            for x in text[1:]:
+                tspan_or_text(out, x, False)
+        out.write('</tspan>')
+
+
+def render_caption(out, text, nbricks, xscale, yoffset):
+    out.write('        <text x="' + str(nbricks * xscale // 2) + '" '
+              'y="' + str(yoffset) + '" '
+              ' text-anchor="middle" fill="#000" xml:space="preserve">')
+    tspan_or_text(out, text, True)
+    out.write('</text>\n')
+
+
+def render_ticktock(out, info, xoff, xsep, yoff, num):
+    # info could be a number/string representing the lowest tick number
+    # or a string containing a list of space separated labels
+    # or a list containing separate strings or a single string
+
+    if isinstance(info, int) or (isinstance(info, str) and info.isdecimal()):
+        base = int(info)
+        for i in range(num):
+            out.write('<text x="' + str(xoff + i * xsep) + '" y="' +
+                      str(yoff) + '" '
+                      'text-anchor="middle" class="muted" xml:space="preserve"'
+                      '>' + str(i + base) + '</text>\n')
+    else:
+        if isinstance(info, list):
+            if len(info) == 1:
+                labels = info[0].split()
+            else:
+                labels = info
+        else:
+            labels = info.split()
+        if num > len(labels):
+            num = len(labels)
+        for i in range(num):
+            out.write('<text x="' + str(xoff + i * xsep) + '" y="' +
+                      str(yoff) + '" '
+                      'text-anchor="middle" class="muted" xml:space="preserve"'
+                      '>' + labels[i] + '</text>\n')
+
+
+def render_lanes_tail(out):
+    out.write('    </g>\n')
+
+
+# group array is [0=Name, 1=startlane, 2=endlane+1, 3=depth]
+def render_groups(out, groups, yhead, yoff, snum):
+    gdepth = 0
+    if len(groups) == 0:
+        return
+
+    for gr in groups:
+        if gr[3] > gdepth:
+            gdepth = gr[3]
+    xgoff = 80 - gdepth * 25
+
+    out.write('    <g id="groups_' + str(snum) + '">\n')
+
+    gnum = 0
+    for gr in groups:
+        ymin = yhead + gr[1] * yoff
+        ylen = (gr[2] - gr[1]) * yoff
+
+        out.write('      <path id="group_' + str(snum) + '_' + str(gnum) +
+                  '" ')
+        out.write('d="m ' + str(xgoff + 25 * gr[3]) + ',' + str(ymin + 3.5) +
+                  ' c -3,0 -5,2 -5,5 l 0,' + str(ylen - 16) + ' c 0,3 2,5 5,5"'
+                  'style="stroke: rgb(0, 65, 196); stroke-width: 1; '
+                  'fill: none;"></path>\n')
+        if len(gr[0]) > 0:
+            out.write('      <g transform="translate(' +
+                      str(xgoff - 10 + 25 * gr[3]) + ',' +
+                      str(ymin + ylen // 2) + ')">\n')
+            out.write('        <g transform="rotate(270)">\n')
+            out.write('          <text text-anchor="middle" class="info" '
+                      'xml:space="preserve"><tspan>' + gr[0] +
+                      '</tspan></text>\n        </g>\n      </g>')
+        gnum += 1
+    out.write('    </g>\n')
+
+
+def parse_wave(x, hscale, lanes, groups, gdepth, bricksused):
+    sname = ""
+    wave = ''
+    node = ''
+    labels = []
+    bricks = []
+    extra = hscale - 1
+    phase = 0
+    xmax = 0
+    global prevdefs
+
+    if isinstance(x, list):
+        gname = x[0]
+        startlane = len(lanes)
+        for y in x[1:]:
+            ymax = parse_wave(y, hscale, lanes, groups, gdepth + 1, bricksused)
+            if ymax > xmax:
+                xmax = ymax
+        groups.append([gname, startlane, len(lanes), gdepth])
+        return xmax
+
+    if 'name' in x:
+        sname = x['name']
+
+    # period must be before wave because it changes extra
+    if 'period' in x:
+        fp = float(x['period'])
+        if fp < 0 or fp * 2 != int(fp * 2):
+            log.error("Period must be integer or 0.5")
+        extra = hscale * fp - 1
+    if 'phase' in x:
+        phase = int(x['phase'] * 2)
+    if 'wave' in x:
+        wave = x['wave']
+        bricks = parse_wavelane(wave, extra)
+        for br in bricks:
+            if not br in bricksused and not br in prevdefs:
+                bricksused.append(br)
+        if 'data' in x:
+            labels = x['data']
+            if isinstance(labels, str):
+                labels = labels.split()
+        if 'cdata' in x:
+            labels = x['cdata']
+            if isinstance(labels, str):
+                labels = labels.split()
+            labels.append('!cdata!')
+
+    if 'node' in x:
+        node = x['node']
+
+    lanes.append([sname, bricks, wave, labels, extra + 1, phase, node])
+    return len(bricks)
+
+
+# obj is hjson parsed object with wavejson
+# svg_num is a number that makes this svg unique. First one must be 0
+
+
+def convert(obj, svg_num):
+    xs = 20  # x scale = width of cycle
+    ys = 20  # y scale = height of wave
+    yo = int(ys * 1.5)  # y offest between lines of waves
+    xg = 150  # xoffset of waves (space for names and groups)
+    yh0 = 0  # height allocated for header tick/tock labels
+    yh1 = 0  # height allocated for header string
+    headtext = ''  # header string
+    headticktock = 0  # does header have tick=1/tock=2
+    yf0 = 0  # height allocated for footer tick/tock labels
+    yf1 = 0  # height allocated for footer string
+    foottext = ''  # footer string
+    footticktock = 0  # does footer have tick=1/tock=2
+    global prevdefs  # holds bricks previously defined
+    events = {}
+
+    if svg_num == 0:
+        bricksused = ['gap']
+        prevdefs = []
+    else:
+        bricksused = []
+
+    # section was parseConfig in js
+    if 'config' in obj and 'hscale' in obj['config']:
+        hscale = int(obj['config']['hscale'])
+        log.info("Set hscale to " + str(hscale))
+    else:
+        hscale = 1
+
+    if 'head' in obj:
+        head = obj['head']
+        if 'tick' in head:
+            yh0 = 20
+            headtt = head['tick']
+            headticktock = 1
+        elif 'tock' in head:
+            yh0 = 20
+            headtt = head['tock']
+            headticktock = 2
+
+        if 'text' in head:
+            yh1 = 46
+            headtext = head['text']
+
+    if 'foot' in obj:
+        foot = obj['foot']
+        if 'tick' in foot:
+            yf0 = 20
+            foottt = foot['tick']
+            footticktock = 1
+        elif 'tock' in foot:
+            yf0 = 20
+            foottt = foot['tock']
+            footticktock = 2
+
+        if 'text' in foot:
+            yf1 = 46
+            foottext = foot['text']
+
+    if 'edge' in obj:
+        if 'arrows' not in prevdefs:
+            bricksused.append('arrows')
+        edges = obj['edge']
+        log.info("Got edge: " + str(edges))
+    else:
+        edges = []
+
+    # build the signal bricks array
+
+    lanes = []
+    groups = []
+    xmax = 0
+    if 'signal' in obj:
+        for x in obj['signal']:
+            xlen = parse_wave(x, hscale, lanes, groups, 0, bricksused)
+            if xlen > xmax:
+                xmax = xlen
+
+    log.info("Got " + str(len(lanes)) + " lanes. xmax is " + str(xmax))
+    log.info(str(lanes))
+
+    outbuf = io.StringIO()
+
+    height = len(lanes) * yo + yh0 + yh1 + yf0 + yf1
+    width = xg + xmax * xs + xs
+    wheight = len(lanes) * yo
+
+    render_svghead(outbuf, width, height, bricksused, svg_num)
+    render_lanes_head(outbuf, xg, yh0 + yh1, svg_num)
+    render_marks(outbuf, xmax, xs, wheight, svg_num)
+    if yh1 != 0:
+        render_caption(outbuf, headtext, xmax, xs, -33 if yh0 != 0 else -13)
+
+    if yf1 != 0:
+        render_caption(outbuf, foottext, xmax, xs,
+                       wheight + (45 if yf0 != 0 else 25))
+
+    if headticktock == 1:
+        render_ticktock(outbuf, headtt, 0, 2 * xs, -5, xmax // 2)
+    if headticktock == 2:
+        render_ticktock(outbuf, headtt, xs, 2 * xs, -5, xmax // 2)
+
+    if footticktock == 1:
+        render_ticktock(outbuf, foottt, 0, 2 * xs, wheight + 15, xmax // 2)
+    if footticktock == 2:
+        render_ticktock(outbuf, foottt, xs, 2 * xs, wheight + 15, xmax // 2)
+
+    render_wavelanes(outbuf, xs, yo, lanes, svg_num, events)
+    if (len(events) > 0):
+        render_events(outbuf, svg_num, events, edges)
+    render_lanes_tail(outbuf)
+    render_groups(outbuf, groups, yh0 + yh1, yo, svg_num)
+    render_svgtail(outbuf)
+    prevdefs.extend(bricksused)
+
+    generated = outbuf.getvalue()
+    outbuf.close()
+    return generated
diff --git a/util/wavegen/wavesvg_data.py b/util/wavegen/wavesvg_data.py
new file mode 100644
index 0000000..6463b2b
--- /dev/null
+++ b/util/wavegen/wavesvg_data.py
@@ -0,0 +1,985 @@
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+
+# portions adapted from the javascript wavedrom.js
+# https://github.com/drom/wavedrom/blob/master/wavedrom.js
+# see LICENSE.wavedrom
+
+head1 = """
+   xmlns="http://www.w3.org/2000/svg"
+   xmlns:xlink="http://www.w3.org/1999/xlink"
+   overflow="hidden"
+"""
+
+# Styles are from wavedrom.js
+head2 = """
+    <style type="text/css">
+      text { font-size: 11pt; font-style: normal; font-variant:
+      normal; font-weight: normal; font-stretch: normal;
+      text-align: center; fill-opacity: 1; font-family:
+      Helvetica }
+
+      .muted {
+      fill: #aaa
+      }
+
+      .warning {
+      fill: #f6b900
+      }
+
+      .error {
+      fill: #f60000
+      }
+
+      .info {
+      fill: #0041c4
+      }
+
+      .success {
+      fill: #00ab00
+      }
+
+      .h1 {
+      font-size: 33pt;
+      font-weight: bold
+      }
+
+      .h2 {
+      font-size: 27pt;
+      font-weight: bold
+      }
+
+      .h3 {
+      font-size: 20pt;
+      font-weight: bold
+      }
+
+      .h4 {
+      font-size: 14pt;
+      font-weight: bold
+      }
+
+      .h5 {
+      font-size: 11pt;
+      font-weight: bold
+      }
+
+      .h6 {
+      font-size: 8pt;
+      font-weight: bold
+      }
+
+      .s1 {
+      fill: none;
+      stroke: #000;
+      stroke-width: 1;
+      stroke-linecap: round;
+      stroke-linejoin: miter;
+      stroke-miterlimit: 4;
+      stroke-opacity: 1;
+      stroke-dasharray: none
+      }
+
+      .s2 {
+      fill: none;
+      stroke: #000;
+      stroke-width: 0.5;
+      stroke-linecap: round;
+      stroke-linejoin: miter;
+      stroke-miterlimit: 4;
+      stroke-opacity: 1;
+      stroke-dasharray: none
+      }
+
+      .s3 {
+      color: #000;
+      fill: none;
+      stroke: #000;
+      stroke-width: 1;
+      stroke-linecap: round;
+      stroke-linejoin: miter;
+      stroke-miterlimit: 4;
+      stroke-opacity: 1;
+      stroke-dasharray: 1, 3;
+      stroke-dashoffset: 0;
+      marker: none;
+      visibility: visible;
+      display: inline;
+      overflow: visible;
+      enable-background: accumulate
+      }
+
+      .s4 {
+      color: #000;
+      fill: none;
+      stroke: #000;
+      stroke-width: 1;
+      stroke-linecap: round;
+      stroke-linejoin: miter;
+      stroke-miterlimit: 4;
+      stroke-opacity: 1;
+      stroke-dasharray: none;
+      stroke-dashoffset: 0;
+      marker: none;
+      visibility: visible;
+      display: inline;
+      overflow: visible
+      }
+
+      .s5 {
+      fill: #fff;
+      stroke: none
+      }
+
+      .s6 {
+      color: #000;
+      fill: #ffffb4;
+      fill-opacity: 1;
+      fill-rule: nonzero;
+      stroke: none;
+      stroke-width: 1px;
+      marker: none;
+      visibility: visible;
+      display: inline;
+      overflow: visible;
+      enable-background: accumulate
+      }
+
+      .s7 {
+      color: #000;
+      fill: #ffe0b9;
+      fill-opacity: 1;
+      fill-rule: nonzero;
+      stroke: none;
+      stroke-width: 1px;
+      marker: none;
+      visibility: visible;
+      display: inline;
+      overflow: visible;
+      enable-background: accumulate
+      }
+
+      .s8 {
+      color: #000;
+      fill: #b9e0ff;
+      fill-opacity: 1;
+      fill-rule: nonzero;
+      stroke: none;
+      stroke-width: 1px;
+      marker: none;
+      visibility: visible;
+      display: inline;
+      overflow: visible;
+      enable-background: accumulate
+      }
+
+      .s9 {
+      fill: #000;
+      fill-opacity: 1;
+      stroke: none
+      }
+
+      .s10 {
+      color: #000;
+      fill: #fff;
+      fill-opacity: 1;
+      fill-rule: nonzero;
+      stroke: none;
+      stroke-width: 1px;
+      marker: none;
+      visibility: visible;
+      display: inline;
+      overflow: visible;
+      enable-background: accumulate
+      }
+
+      .s11 {
+      fill: #0041c4;
+      fill-opacity: 1;
+      stroke: none
+      }
+
+      .s12 {
+      fill: none;
+      stroke: #0041c4;
+      stroke-width: 1;
+      stroke-linecap: round;
+      stroke-linejoin: miter;
+      stroke-miterlimit: 4;
+      stroke-opacity: 1;
+      stroke-dasharray: none
+      }
+    </style>
+"""
+
+defs_head = """
+    <defs>
+"""
+
+defs_tail = """
+    </defs>
+"""
+
+tail = """
+  </svg>
+"""
+
+# Brick definitions from wavedrom.js
+# Split out here so only the ones that are used are inserted in the svg
+
+use_defs = {
+    'arrows':
+    '''      <marker id="arrowhead" style="fill: rgb(0, 65, 196);" markerHeight="7" markerWidth="10" markerUnits="strokeWidth" viewBox="0 -4 11 8" refX="15" refY="0" orient="auto">
+        <path d="M0 -4 11 0 0 4z"></path>
+      </marker>
+      <marker id="arrowtail" style="fill: rgb(0, 65, 196);" markerHeight="7" markerWidth="10" markerUnits="strokeWidth" viewBox="-11 -4 11 8" refX="-15" refY="0" orient="auto">
+        <path d="M0 -4 -11 0 0 4z"></path>
+      </marker>
+''',
+    'socket':
+    '''      <g id="socket">
+        <rect y="15" x="6" height="20" width="20"></rect>
+      </g>''',
+    'pclk':
+    '''      <g id="pclk">
+        <path d="M0,20 0,0 20,0" class="s1"></path>
+      </g>''',
+    'nclk':
+    '''      <g id="nclk">
+        <path d="m0,0 0,20 20,0" class="s1"></path>
+      </g>''',
+    '000':
+    '''      <g id="000">
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    '0m0':
+    '''      <g id="0m0">
+        <path d="m0,20 3,0 3,-10 3,10 11,0" class="s1"></path>
+      </g>''',
+    '0m1':
+    '''      <g id="0m1">
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    '0mx':
+    '''      <g id="0mx">
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 5,20" class="s2"></path>
+        <path d="M20,0 4,16" class="s2"></path>
+        <path d="M15,0 6,9" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    '0md':
+    '''      <g id="0md">
+        <path d="m8,20 10,0" class="s3"></path>
+        <path d="m0,20 5,0" class="s1"></path>
+      </g>''',
+    '0mu':
+    '''      <g id="0mu">
+        <path d="m0,20 3,0 C 7,10 10.107603,0 20,0" class="s1"></path>
+      </g>''',
+    '0mz':
+    '''      <g id="0mz">
+        <path d="m0,20 3,0 C 10,10 15,10 20,10" class="s1"></path>
+      </g>''',
+    '111':
+    '''      <g id="111">
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    '1m0':
+    '''      <g id="1m0">
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+      </g>''',
+    '1m1':
+    '''      <g id="1m1">
+        <path d="M0,0 3,0 6,10 9,0 20,0" class="s1"></path>
+      </g>''',
+    '1mx':
+    '''      <g id="1mx">
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 6,9" class="s2"></path>
+        <path d="M10,0 5,5" class="s2"></path>
+        <path d="M3.5,1.5 5,0" class="s2"></path>
+      </g>''',
+    '1md':
+    '''      <g id="1md">
+        <path d="m0,0 3,0 c 4,10 7,20 17,20" class="s1"></path>
+      </g>''',
+    '1mu':
+    '''      <g id="1mu">
+        <path d="M0,0 5,0" class="s1"></path>
+        <path d="M8,0 18,0" class="s3"></path>
+      </g>''',
+    '1mz':
+    '''      <g id="1mz">
+        <path d="m0,0 3,0 c 7,10 12,10 17,10" class="s1"></path>
+      </g>''',
+    'xxx':
+    '''      <g id="xxx">
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,5 5,0" class="s2"></path>
+        <path d="M0,10 10,0" class="s2"></path>
+        <path d="M0,15 15,0" class="s2"></path>
+        <path d="M0,20 20,0" class="s2"></path>
+        <path d="M5,20 20,5" class="s2"></path>
+        <path d="M10,20 20,10" class="s2"></path>
+        <path d="m15,20 5,-5" class="s2"></path>
+      </g>''',
+    'xm0':
+    '''      <g id="xm0">
+        <path d="M0,0 4,0 9,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,5 4,1" class="s2"></path>
+        <path d="M0,10 5,5" class="s2"></path>
+        <path d="M0,15 6,9" class="s2"></path>
+        <path d="M0,20 7,13" class="s2"></path>
+        <path d="M5,20 8,17" class="s2"></path>
+      </g>''',
+    'xm1':
+    '''      <g id="xm1">
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,20 4,20 9,0" class="s1"></path>
+        <path d="M0,5 5,0" class="s2"></path>
+        <path d="M0,10 9,1" class="s2"></path>
+        <path d="M0,15 7,8" class="s2"></path>
+        <path d="M0,20 5,15" class="s2"></path>
+      </g>''',
+    'xmx':
+    '''      <g id="xmx">
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,5 5,0" class="s2"></path>
+        <path d="M0,10 10,0" class="s2"></path>
+        <path d="M0,15 15,0" class="s2"></path>
+        <path d="M0,20 20,0" class="s2"></path>
+        <path d="M5,20 20,5" class="s2"></path>
+        <path d="M10,20 20,10" class="s2"></path>
+        <path d="m15,20 5,-5" class="s2"></path>
+      </g>''',
+    'xmd':
+    '''      <g id="xmd">
+        <path d="m0,0 4,0 c 3,10 6,20 16,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,5 4,1" class="s2"></path>
+        <path d="M0,10 5.5,4.5" class="s2"></path>
+        <path d="M0,15 6.5,8.5" class="s2"></path>
+        <path d="M0,20 8,12" class="s2"></path>
+        <path d="m5,20 5,-5" class="s2"></path>
+        <path d="m10,20 2.5,-2.5" class="s2"></path>
+      </g>''',
+    'xmu':
+    '''      <g id="xmu">
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="m0,20 4,0 C 7,10 10,0 20,0" class="s1"></path>
+        <path d="M0,5 5,0" class="s2"></path>
+        <path d="M0,10 10,0" class="s2"></path>
+        <path d="M0,15 10,5" class="s2"></path>
+        <path d="M0,20 6,14" class="s2"></path>
+      </g>''',
+    'xmz':
+    '''      <g id="xmz">
+        <path d="m0,0 4,0 c 6,10 11,10 16,10" class="s1"></path>
+        <path d="m0,20 4,0 C 10,10 15,10 20,10" class="s1"></path>
+        <path d="M0,5 4.5,0.5" class="s2"></path>
+        <path d="M0,10 6.5,3.5" class="s2"></path>
+        <path d="M0,15 8.5,6.5" class="s2"></path>
+        <path d="M0,20 11.5,8.5" class="s2"></path>
+      </g>''',
+    'ddd':
+    '''      <g id="ddd">
+        <path d="m0,20 20,0" class="s3"></path>
+      </g>''',
+    'dm0':
+    '''      <g id="dm0">
+        <path d="m0,20 10,0" class="s3"></path>
+        <path d="m12,20 8,0" class="s1"></path>
+      </g>''',
+    'dm1':
+    '''      <g id="dm1">
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'dmx':
+    '''      <g id="dmx">
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 5,20" class="s2"></path>
+        <path d="M20,0 4,16" class="s2"></path>
+        <path d="M15,0 6,9" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'dmd':
+    '''      <g id="dmd">
+        <path d="m0,20 20,0" class="s3"></path>
+      </g>''',
+    'dmu':
+    '''      <g id="dmu">
+        <path d="m0,20 3,0 C 7,10 10.107603,0 20,0" class="s1"></path>
+      </g>''',
+    'dmz':
+    '''      <g id="dmz">
+        <path d="m0,20 3,0 C 10,10 15,10 20,10" class="s1"></path>
+      </g>''',
+    'uuu':
+    '''      <g id="uuu">
+        <path d="M0,0 20,0" class="s3"></path>
+      </g>''',
+    'um0':
+    '''      <g id="um0">
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+      </g>''',
+    'um1':
+    '''      <g id="um1">
+        <path d="M0,0 10,0" class="s3"></path>
+        <path d="m12,0 8,0" class="s1"></path>
+      </g>''',
+    'umx':
+    '''      <g id="umx">
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 6,9" class="s2"></path>
+        <path d="M10,0 5,5" class="s2"></path>
+        <path d="M3.5,1.5 5,0" class="s2"></path>
+      </g>''',
+    'umd':
+    '''      <g id="umd">
+        <path d="m0,0 3,0 c 4,10 7,20 17,20" class="s1"></path>
+      </g>''',
+    'umu':
+    '''      <g id="umu">
+        <path d="M0,0 20,0" class="s3"></path>
+      </g>''',
+    'umz':
+    '''      <g id="umz">
+        <path d="m0,0 3,0 c 7,10 12,10 17,10" class="s4"></path>
+      </g>''',
+    'zzz':
+    '''      <g id="zzz">
+        <path d="m0,10 20,0" class="s1"></path>
+      </g>''',
+    'zm0':
+    '''      <g id="zm0">
+        <path d="m0,10 6,0 3,10 11,0" class="s1"></path>
+      </g>''',
+    'zm1':
+    '''      <g id="zm1">
+        <path d="M0,10 6,10 9,0 20,0" class="s1"></path>
+      </g>''',
+    'zmx':
+    '''      <g id="zmx">
+        <path d="m6,10 3,10 11,0" class="s1"></path>
+        <path d="M0,10 6,10 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 6.5,8.5" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+      </g>''',
+    'zmd':
+    '''      <g id="zmd">
+        <path d="m0,10 7,0 c 3,5 8,10 13,10" class="s1"></path>
+      </g>''',
+    'zmu':
+    '''      <g id="zmu">
+        <path d="m0,10 7,0 C 10,5 15,0 20,0" class="s1"></path>
+      </g>''',
+    'zmz':
+    '''      <g id="zmz">
+        <path d="m0,10 20,0" class="s1"></path>
+      </g>''',
+    'gap':
+    '''      <g id="gap">
+        <path d="m7,-2 -4,0 c -5,0 -5,24 -10,24 l 4,0 C 2,22 2,-2 7,-2 z" class="s5"></path>
+        <path d="M-7,22 C -2,22 -2,-2 3,-2" class="s1"></path>
+        <path d="M-3,22 C 2,22 2,-2 7,-2" class="s1"></path>
+      </g>''',
+    '0mv-3':
+    '''      <g id="0mv-3">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s6"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    '1mv-3':
+    '''      <g id="1mv-3">
+        <path d="M2.875,0 20,0 20,20 9,20 z" class="s6"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'xmv-3':
+    '''      <g id="xmv-3">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s6"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,5 3.5,1.5" class="s2"></path>
+        <path d="M0,10 4.5,5.5" class="s2"></path>
+        <path d="M0,15 6,9" class="s2"></path>
+        <path d="M0,20 4,16" class="s2"></path>
+      </g>''',
+    'dmv-3':
+    '''      <g id="dmv-3">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s6"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'umv-3':
+    '''      <g id="umv-3">
+        <path d="M3,0 20,0 20,20 9,20 z" class="s6"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'zmv-3':
+    '''      <g id="zmv-3">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s6"></path>
+        <path d="m6,10 3,10 11,0" class="s1"></path>
+        <path d="M0,10 6,10 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vvv-3':
+    '''      <g id="vvv-3">
+        <path d="M20,20 0,20 0,0 20,0" class="s6"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vm0-3':
+    '''      <g id="vm0-3">
+        <path d="M0,20 0,0 3,0 9,20" class="s6"></path>
+        <path d="M0,0 3,0 9,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vm1-3':
+    '''      <g id="vm1-3">
+        <path d="M0,0 0,20 3,20 9,0" class="s6"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0" class="s1"></path>
+      </g>''',
+    'vmx-3':
+    '''      <g id="vmx-3">
+        <path d="M0,0 0,20 3,20 6,10 3,0" class="s6"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 7,8" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+      </g>''',
+    'vmd-3':
+    '''      <g id="vmd-3">
+        <path d="m0,0 0,20 20,0 C 10,20 7,10 3,0" class="s6"></path>
+        <path d="m0,0 3,0 c 4,10 7,20 17,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vmu-3':
+    '''      <g id="vmu-3">
+        <path d="m0,0 0,20 3,0 C 7,10 10,0 20,0" class="s6"></path>
+        <path d="m0,20 3,0 C 7,10 10,0 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vmz-3':
+    '''      <g id="vmz-3">
+        <path d="M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20" class="s6"></path>
+        <path d="m0,0 3,0 c 7,10 12,10 17,10" class="s1"></path>
+        <path d="m0,20 3,0 C 10,10 15,10 20,10" class="s1"></path>
+      </g>''',
+    'vmv-3-3':
+    '''      <g id="vmv-3-3">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s6"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s6"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-3-4':
+    '''      <g id="vmv-3-4">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s7"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s6"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-3-5':
+    '''      <g id="vmv-3-5">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s8"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s6"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-4-3':
+    '''      <g id="vmv-4-3">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s6"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s7"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-4-4':
+    '''      <g id="vmv-4-4">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s7"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s7"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-4-5':
+    '''      <g id="vmv-4-5">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s8"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s7"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-5-3':
+    '''      <g id="vmv-5-3">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s6"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s8"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-5-4':
+    '''      <g id="vmv-5-4">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s7"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s8"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-5-5':
+    '''      <g id="vmv-5-5">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s8"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s8"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    '0mv-4':
+    '''      <g id="0mv-4">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s7"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    '1mv-4':
+    '''      <g id="1mv-4">
+        <path d="M2.875,0 20,0 20,20 9,20 z" class="s7"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'xmv-4':
+    '''      <g id="xmv-4">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s7"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,5 3.5,1.5" class="s2"></path>
+        <path d="M0,10 4.5,5.5" class="s2"></path>
+        <path d="M0,15 6,9" class="s2"></path>
+        <path d="M0,20 4,16" class="s2"></path>
+      </g>''',
+    'dmv-4':
+    '''      <g id="dmv-4">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s7"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'umv-4':
+    '''      <g id="umv-4">
+        <path d="M3,0 20,0 20,20 9,20 z" class="s7"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'zmv-4':
+    '''      <g id="zmv-4">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s7"></path>
+        <path d="m6,10 3,10 11,0" class="s1"></path>
+        <path d="M0,10 6,10 9,0 20,0" class="s1"></path>
+      </g>''',
+    '0mv-5':
+    '''      <g id="0mv-5">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s8"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    '1mv-5':
+    '''      <g id="1mv-5">
+        <path d="M2.875,0 20,0 20,20 9,20 z" class="s8"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'xmv-5':
+    '''      <g id="xmv-5">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s8"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,5 3.5,1.5" class="s2"></path>
+        <path d="M0,10 4.5,5.5" class="s2"></path>
+        <path d="M0,15 6,9" class="s2"></path>
+        <path d="M0,20 4,16" class="s2"></path>
+      </g>''',
+    'dmv-5':
+    '''      <g id="dmv-5">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s8"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'umv-5':
+    '''      <g id="umv-5">
+        <path d="M3,0 20,0 20,20 9,20 z" class="s8"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'zmv-5':
+    '''      <g id="zmv-5">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s8"></path>
+        <path d="m6,10 3,10 11,0" class="s1"></path>
+        <path d="M0,10 6,10 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vvv-4':
+    '''      <g id="vvv-4">
+        <path d="M20,20 0,20 0,0 20,0" class="s7"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vm0-4':
+    '''      <g id="vm0-4">
+        <path d="M0,20 0,0 3,0 9,20" class="s7"></path>
+        <path d="M0,0 3,0 9,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vm1-4':
+    '''      <g id="vm1-4">
+        <path d="M0,0 0,20 3,20 9,0" class="s7"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0" class="s1"></path>
+      </g>''',
+    'vmx-4':
+    '''      <g id="vmx-4">
+        <path d="M0,0 0,20 3,20 6,10 3,0" class="s7"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 7,8" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+      </g>''',
+    'vmd-4':
+    '''      <g id="vmd-4">
+        <path d="m0,0 0,20 20,0 C 10,20 7,10 3,0" class="s7"></path>
+        <path d="m0,0 3,0 c 4,10 7,20 17,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vmu-4':
+    '''      <g id="vmu-4">
+        <path d="m0,0 0,20 3,0 C 7,10 10,0 20,0" class="s7"></path>
+        <path d="m0,20 3,0 C 7,10 10,0 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vmz-4':
+    '''      <g id="vmz-4">
+        <path d="M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20" class="s7"></path>
+        <path d="m0,0 3,0 c 7,10 12,10 17,10" class="s1"></path>
+        <path d="m0,20 3,0 C 10,10 15,10 20,10" class="s1"></path>
+      </g>''',
+    'vvv-5':
+    '''      <g id="vvv-5">
+        <path d="M20,20 0,20 0,0 20,0" class="s8"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vm0-5':
+    '''      <g id="vm0-5">
+        <path d="M0,20 0,0 3,0 9,20" class="s8"></path>
+        <path d="M0,0 3,0 9,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vm1-5':
+    '''      <g id="vm1-5">
+        <path d="M0,0 0,20 3,20 9,0" class="s8"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0" class="s1"></path>
+      </g>''',
+    'vmx-5':
+    '''      <g id="vmx-5">
+        <path d="M0,0 0,20 3,20 6,10 3,0" class="s8"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 7,8" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+      </g>''',
+    'vmd-5':
+    '''      <g id="vmd-5">
+        <path d="m0,0 0,20 20,0 C 10,20 7,10 3,0" class="s8"></path>
+        <path d="m0,0 3,0 c 4,10 7,20 17,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vmu-5':
+    '''      <g id="vmu-5">
+        <path d="m0,0 0,20 3,0 C 7,10 10,0 20,0" class="s8"></path>
+        <path d="m0,20 3,0 C 7,10 10,0 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vmz-5':
+    '''      <g id="vmz-5">
+        <path d="M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20" class="s8"></path>
+        <path d="m0,0 3,0 c 7,10 12,10 17,10" class="s1"></path>
+        <path d="m0,20 3,0 C 10,10 15,10 20,10" class="s1"></path>
+      </g>''',
+    'Pclk':
+    '''      <g id="Pclk">
+        <path d="M-3,12 0,3 3,12 C 1,11 -1,11 -3,12 z" class="s9"></path>
+        <path d="M0,20 0,0 20,0" class="s1"></path>
+      </g>''',
+    'Nclk':
+    '''      <g id="Nclk">
+        <path d="M-3,8 0,17 3,8 C 1,9 -1,9 -3,8 z" class="s9"></path>
+        <path d="m0,0 0,20 20,0" class="s1"></path>
+      </g>''',
+    'vvv-2':
+    '''      <g id="vvv-2">
+        <path d="M20,20 0,20 0,0 20,0" class="s10"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vm0-2':
+    '''      <g id="vm0-2">
+        <path d="M0,20 0,0 3,0 9,20" class="s10"></path>
+        <path d="M0,0 3,0 9,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vm1-2':
+    '''      <g id="vm1-2">
+        <path d="M0,0 0,20 3,20 9,0" class="s10"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0" class="s1"></path>
+      </g>''',
+    'vmx-2':
+    '''      <g id="vmx-2">
+        <path d="M0,0 0,20 3,20 6,10 3,0" class="s10"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m20,15 -5,5" class="s2"></path>
+        <path d="M20,10 10,20" class="s2"></path>
+        <path d="M20,5 8,17" class="s2"></path>
+        <path d="M20,0 7,13" class="s2"></path>
+        <path d="M15,0 7,8" class="s2"></path>
+        <path d="M10,0 9,1" class="s2"></path>
+      </g>''',
+    'vmd-2':
+    '''      <g id="vmd-2">
+        <path d="m0,0 0,20 20,0 C 10,20 7,10 3,0" class="s10"></path>
+        <path d="m0,0 3,0 c 4,10 7,20 17,20" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'vmu-2':
+    '''      <g id="vmu-2">
+        <path d="m0,0 0,20 3,0 C 7,10 10,0 20,0" class="s10"></path>
+        <path d="m0,20 3,0 C 7,10 10,0 20,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'vmz-2':
+    '''      <g id="vmz-2">
+        <path d="M0,0 3,0 C 10,10 15,10 20,10 15,10 10,10 3,20 L 0,20" class="s10"></path>
+        <path d="m0,0 3,0 c 7,10 12,10 17,10" class="s1"></path>
+        <path d="m0,20 3,0 C 10,10 15,10 20,10" class="s1"></path>
+      </g>''',
+    '0mv-2':
+    '''      <g id="0mv-2">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s10"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    '1mv-2':
+    '''      <g id="1mv-2">
+        <path d="M2.875,0 20,0 20,20 9,20 z" class="s10"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'xmv-2':
+    '''      <g id="xmv-2">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s10"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,5 3.5,1.5" class="s2"></path>
+        <path d="M0,10 4.5,5.5" class="s2"></path>
+        <path d="M0,15 6,9" class="s2"></path>
+        <path d="M0,20 4,16" class="s2"></path>
+      </g>''',
+    'dmv-2':
+    '''      <g id="dmv-2">
+        <path d="M9,0 20,0 20,20 3,20 z" class="s10"></path>
+        <path d="M3,20 9,0 20,0" class="s1"></path>
+        <path d="m0,20 20,0" class="s1"></path>
+      </g>''',
+    'umv-2':
+    '''      <g id="umv-2">
+        <path d="M3,0 20,0 20,20 9,20 z" class="s10"></path>
+        <path d="m3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,0 20,0" class="s1"></path>
+      </g>''',
+    'zmv-2':
+    '''      <g id="zmv-2">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s10"></path>
+        <path d="m6,10 3,10 11,0" class="s1"></path>
+        <path d="M0,10 6,10 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-3-2':
+    '''      <g id="vmv-3-2">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s10"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s6"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-4-2':
+    '''      <g id="vmv-4-2">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s10"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s7"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-5-2':
+    '''      <g id="vmv-5-2">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s10"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s8"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-2-3':
+    '''      <g id="vmv-2-3">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s6"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s10"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-2-4':
+    '''      <g id="vmv-2-4">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s7"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s10"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-2-5':
+    '''      <g id="vmv-2-5">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s8"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s10"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+    'vmv-2-2':
+    '''      <g id="vmv-2-2">
+        <path d="M9,0 20,0 20,20 9,20 6,10 z" class="s10"></path>
+        <path d="M3,0 0,0 0,20 3,20 6,10 z" class="s10"></path>
+        <path d="m0,0 3,0 6,20 11,0" class="s1"></path>
+        <path d="M0,20 3,20 9,0 20,0" class="s1"></path>
+      </g>''',
+}
diff --git a/util/wavetool.py b/util/wavetool.py
new file mode 100755
index 0000000..cfe8c12
--- /dev/null
+++ b/util/wavetool.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""Command-line tool to convert wavejson to svg
+"""
+
+import argparse
+import logging as log
+import sys
+
+import hjson
+import pkg_resources  # part of setuptools
+
+from reggen import version
+from wavegen import wavesvg
+
+USAGE = """
+  wavetool [options]
+  wavetool [options] <input> [<input>]...
+"""
+
+wavejson = """
+{signal: [
+  {name:'Baud Clock',  wave: 'p...........' },
+  {name:'Data 8 bit',        wave: '10========1=',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "next" ] },
+  {name:'Data 7 bit',        wave: '10=======1=.',
+   data: [ "lsb", "", "", "", "", "", "msb", "next" ] },
+  {name:'Data 6 bit',        wave: '10======1=..',
+   data: [ "lsb", "", "", "", "", "msb", "next" ] },
+  {name:'5 bit,halfbaud',        wave: '10=====1=.|.', period:2,
+   data: [ "lsb", "", "", "", "msb", "next" ] },
+  {},
+  {name:'8 with Parity', wave: '10=========1',
+   data: [ "lsb", "", "", "", "", "", "", "msb", "par" ] },
+  {name:'10udz1xHL', wave: '10udz1xHL' },
+  {name:'5 bit,cdata',        wave: '10=====1=...',
+   cdata: [ "idle", "start", "lsb", "", "", "", "msb", "stop", "next" ] },
+ ],
+ head:{
+   text:'Serial Line format (one stop bit)',
+   tock:-1,
+ }
+}
+
+"""
+
+
+def main():
+    done_stdin = False
+    parser = argparse.ArgumentParser(
+        prog="wavetool",
+        formatter_class=argparse.RawDescriptionHelpFormatter,
+        usage=USAGE,
+        description=__doc__,
+        epilog='defaults or the filename - can be used for stdin/stdout')
+    parser.add_argument(
+        '--version', action='store_true', help='Show version and exit')
+    parser.add_argument(
+        '-v',
+        '--verbose',
+        action='store_true',
+        help='Verbose output during processing')
+    parser.add_argument(
+        '-T',
+        '--testmode',
+        action='store_true',
+        help='Run test with built-in source')
+    parser.add_argument(
+        '-o',
+        '--output',
+        type=argparse.FileType('w'),
+        default=sys.stdout,
+        metavar='file',
+        help='Output file (default stdout)')
+    parser.add_argument(
+        'srcfile',
+        nargs='*',
+        metavar='input',
+        default='-',
+        help='source wavejson file (default stdin)')
+    args = parser.parse_args()
+
+    if args.version:
+        version.show_and_exit(__file__, ["Hjson"])
+
+    if (args.verbose):
+        log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG)
+    else:
+        log.basicConfig(format="%(levelname)s: %(message)s")
+
+    outfile = args.output
+
+    with outfile:
+        if args.testmode:
+            obj = hjson.loads(wavejson)
+
+            svg0 = wavesvg.convert(obj, 0)
+            svg1 = wavesvg.convert(obj, 1)
+            outfile.write(svg0)
+            outfile.write('<h2>Generate again, should not repeat defs</h2>\n')
+            outfile.write(svg1)
+        else:
+            num = 0
+            for filename in args.srcfile:
+                if (filename == '-'):
+                    if (done_stdin):
+                        log.warn("Ignore stdin after first use\n")
+                        continue
+                    done_stdin = True
+                    infile = sys.stdin
+                else:
+                    infile = open(filename, 'r', encoding='UTF-8')
+                with infile:
+                    obj = hjson.load(infile)
+                    log.info("\nFile now " + filename)
+                    outfile.write("<H2>" + filename + "</H2>")
+                    outfile.write(wavesvg.convert(obj, num))
+                    num += 1
+
+
+if __name__ == '__main__':
+    main()