Run lintpy for reggen/tlgen/topgen

Signed-off-by: Weicai Yang <weicai@google.com>
diff --git a/util/reggen/data.py b/util/reggen/data.py
index 479d5ea..db16782 100644
--- a/util/reggen/data.py
+++ b/util/reggen/data.py
@@ -3,6 +3,7 @@
 # SPDX-License-Identifier: Apache-2.0
 
 from collections import OrderedDict
+
 from .field_enums import HwAccess, SwAccess, SwRdAccess, SwWrAccess
 
 
diff --git a/util/reggen/field_enums.py b/util/reggen/field_enums.py
index 8ca5af1..dd572b7 100644
--- a/util/reggen/field_enums.py
+++ b/util/reggen/field_enums.py
@@ -1,7 +1,6 @@
 # Copyright lowRISC contributors.
 # Licensed under the Apache License, Version 2.0, see LICENSE for details.
 # SPDX-License-Identifier: Apache-2.0
-
 """Enumerated types for fields
 Generated by validation, used by backends
 """
diff --git a/util/reggen/gen_cheader.py b/util/reggen/gen_cheader.py
index 08f88ce..f59b28b 100644
--- a/util/reggen/gen_cheader.py
+++ b/util/reggen/gen_cheader.py
@@ -104,8 +104,10 @@
 
         if field['bitinfo'][1] == 1:
             # single bit
-            genout(outstr,
-                   gen_define(dname + '_BIT', [], str(fieldlsb), existing_defines))
+            genout(
+                outstr,
+                gen_define(dname + '_BIT', [], str(fieldlsb),
+                           existing_defines))
         else:
             # multiple bits (unless it is the whole register)
             if field['bitinfo'][1] != width:
@@ -253,8 +255,9 @@
 
     if interrupt['bitinfo'][1] == 1:
         # single bit
-        genout(outstr, gen_define(defname + '_BIT', [], str(fieldlsb),
-                                  existing_defines))
+        genout(
+            outstr,
+            gen_define(defname + '_BIT', [], str(fieldlsb), existing_defines))
     else:
         # multiple bits (unless it is the whole register)
         if interrupt['bitinfo'][1] != regwidth:
diff --git a/util/reggen/gen_dv.py b/util/reggen/gen_dv.py
index 75cb8d1..3fcc646 100644
--- a/util/reggen/gen_dv.py
+++ b/util/reggen/gen_dv.py
@@ -1,7 +1,6 @@
 # Copyright lowRISC contributors.
 # Licensed under the Apache License, Version 2.0, see LICENSE for details.
 # SPDX-License-Identifier: Apache-2.0
-
 '''Generate DV code for an IP block'''
 
 import logging as log
diff --git a/util/reggen/gen_fpv.py b/util/reggen/gen_fpv.py
index bd49dd1..0d412fb 100644
--- a/util/reggen/gen_fpv.py
+++ b/util/reggen/gen_fpv.py
@@ -14,23 +14,27 @@
 from mako.template import Template
 from pkg_resources import resource_filename
 
+from .data import *
 from .field_enums import HwAccess, SwAccess, SwRdAccess, SwWrAccess
 from .gen_rtl import json_to_reg
-from .data import *
+
 
 # function get write property name
 def wpname(r):
     return r.name + "_wr_p"
 
+
 # function get read property name
 def rpname(r):
     return r.name + "_rd_p"
 
+
 def gen_fpv(obj, outdir):
     # obj: OrderedDict
     block = json_to_reg(obj)
     gen_assertion(block, outdir)
 
+
 def gen_assertion(block, outdir):
     # Read Register templates
     fpv_csr_tpl = Template(
diff --git a/util/reggen/gen_json.py b/util/reggen/gen_json.py
index ff7db14..86eb217 100644
--- a/util/reggen/gen_json.py
+++ b/util/reggen/gen_json.py
@@ -9,23 +9,24 @@
 
 def gen_json(obj, outfile, format):
     if format == 'json':
-        hjson.dumpJSON(
-            obj,
-            outfile,
-            ensure_ascii=False,
-            use_decimal=True,
-            indent='  ',
-            for_json=True)
+        hjson.dumpJSON(obj,
+                       outfile,
+                       ensure_ascii=False,
+                       use_decimal=True,
+                       indent='  ',
+                       for_json=True)
     elif format == 'compact':
-        hjson.dumpJSON(
-            obj,
-            outfile,
-            ensure_ascii=False,
-            for_json=True,
-            use_decimal=True,
-            separators=(',', ':'))
+        hjson.dumpJSON(obj,
+                       outfile,
+                       ensure_ascii=False,
+                       for_json=True,
+                       use_decimal=True,
+                       separators=(',', ':'))
     elif format == 'hjson':
-        hjson.dump(
-            obj, outfile, ensure_ascii=False, for_json=True, use_decimal=True)
+        hjson.dump(obj,
+                   outfile,
+                   ensure_ascii=False,
+                   for_json=True,
+                   use_decimal=True)
     else:
         raise ValueError('Invalid JSON format ' + format)
diff --git a/util/reggen/gen_rtl.py b/util/reggen/gen_rtl.py
index 1c0036f..1454987 100644
--- a/util/reggen/gen_rtl.py
+++ b/util/reggen/gen_rtl.py
@@ -6,11 +6,11 @@
 
 import logging as log
 
-from mako.template import Template
 from mako import exceptions
+from mako.template import Template
 from pkg_resources import resource_filename
 
-from .data import Field, Reg, MultiReg, Window, Block
+from .data import Block, Field, MultiReg, Reg, Window
 from .field_enums import HwAccess, SwRdAccess, SwWrAccess
 
 
diff --git a/util/reggen/gen_selfdoc.py b/util/reggen/gen_selfdoc.py
index 153ee51..898f6cf 100644
--- a/util/reggen/gen_selfdoc.py
+++ b/util/reggen/gen_selfdoc.py
@@ -217,9 +217,9 @@
         desc_txt = desc
 
     if val_type is not None:
-        genout(outfile,
-               '{} | {} | {} | {}\n'
-               .format(key, validate.key_use[use], val_type, desc_txt))
+        genout(
+            outfile, '{} | {} | {} | {}\n'.format(key, validate.key_use[use],
+                                                  val_type, desc_txt))
     else:
         genout(outfile, key + " | " + desc_txt + "\n")
 
diff --git a/util/reggen/validate.py b/util/reggen/validate.py
index 63dcb95..4d658e6 100644
--- a/util/reggen/validate.py
+++ b/util/reggen/validate.py
@@ -6,12 +6,12 @@
 """
 
 import logging as log
-import re
 import operator
+import re
 from collections import OrderedDict
 from copy import deepcopy
 
-from reggen.field_enums import SwWrAccess, SwRdAccess, SwAccess, HwAccess
+from reggen.field_enums import HwAccess, SwAccess, SwRdAccess, SwWrAccess
 
 
 # Routine that can be used for Hjson object_pairs_hook
@@ -151,33 +151,40 @@
         if randtype != "none":
 
             if randcount <= 0:
-                log.error(err_prefix +
-                          ' randwith for parameter ' + y['name'] + ' must be greater > 0.')
+                log.error(err_prefix + ' randwith for parameter ' + y['name'] +
+                          ' must be greater > 0.')
                 return error + 1
 
             if randtype not in ['perm', 'data']:
-                log.error(err_prefix +
-                          ' parameter ' + y['name'] + ' has unknown randtype ' + randtype)
+                log.error(err_prefix + ' parameter ' + y['name'] +
+                          ' has unknown randtype ' + randtype)
                 return error + 1
 
             if y.get('type') is None:
-                log.error(err_prefix + ' parameter ' + y['name'] + ' has undefined type. '
-                          'It is required to define the type in the IP package.')
+                log.error(
+                    err_prefix + ' parameter ' + y['name'] +
+                    ' has undefined type. '
+                    'It is required to define the type in the IP package.')
                 return error + 1
 
             if not y.get('name').lower().startswith('rndcnst'):
-                log.error(err_prefix + ' parameter ' + y['name'] + ' is defined as a compile-time '
-                          'random netlist constant. The name must therefore start with RndCnst.')
+                log.error(
+                    err_prefix + ' parameter ' + y['name'] +
+                    ' is defined as a compile-time '
+                    'random netlist constant. The name must therefore start with RndCnst.'
+                )
                 return error + 1
 
-            overrides = [('local', 'false'),
-                         ('default', ''),
+            overrides = [('local', 'false'), ('default', ''),
                          ('expose', 'false')]
 
             for key, value in overrides:
                 if y.setdefault(key, value) != value:
-                    log.error(err_prefix + ' ' + key + ' for parameter ' + y['name'] +
-                              ' must not be set since it will be defined automatically.')
+                    log.error(
+                        err_prefix + ' ' + key + ' for parameter ' +
+                        y['name'] +
+                        ' must not be set since it will be defined automatically.'
+                    )
                     return error + 1
 
         # TODO: Check if PascalCase or ALL_CAPS
@@ -196,9 +203,8 @@
             y["expose"] = "false"
 
         if y["local"] == "true" and y["expose"] == "true":
-            log.error(
-                err_prefix + ' element ' + x + '["' + y["name"] +
-                '"]' + ' cannot be local and exposed to top level')
+            log.error(err_prefix + ' element ' + x + '["' + y["name"] + '"]' +
+                      ' cannot be local and exposed to top level')
             return error + 1
 
         if "default" in y:
@@ -212,17 +218,15 @@
             # Don't make assumptions for exposed parameters. These must have
             # a default.
             if y["expose"] == "true":
-                log.error(
-                    err_prefix + ' element ' + x + '["' + y["name"] + '"]' +
-                    ' has no defined default value')
+                log.error(err_prefix + ' element ' + x + '["' + y["name"] +
+                          '"]' + ' has no defined default value')
             elif y["type"][:3] == "int":
                 y["default"] = "1"
             elif y["type"] == "string":
                 y["default"] = ""
             else:
-                log.error(
-                    err_prefix + ' element ' + x + '["' + y["name"] + '"]' +
-                    ' type is not supported')
+                log.error(err_prefix + ' element ' + x + '["' + y["name"] +
+                          '"]' + ' type is not supported')
                 return error + 1
 
     return error
@@ -392,8 +396,8 @@
         "information in a comment at the top of the "
         "file."
     ],
-    'hier_path': [None,
-                  'additional hierarchy path before the reg block instance']
+    'hier_path':
+    [None, 'additional hierarchy path before the reg block instance']
 }
 top_added = {
     'genrnames': ['pl', "list of register names"],
@@ -426,7 +430,8 @@
     'default': ['s', "item default value"],
     'local': ['pb', "to be localparam"],
     'expose': ['pb', "to be exposed to top"],
-    'randcount': ['s', "number of bits to randomize in the parameter. 0 by default."],
+    'randcount':
+    ['s', "number of bits to randomize in the parameter. 0 by default."],
     'randtype': ['s', "type of randomization to perform. none by default"],
 }
 
@@ -566,10 +571,9 @@
         'pb', "If true, regwen term increments"
         " along with current multireg count."
     ],
-    'compact': [
-        'pb', "If true, allow multireg compacting."
-        "If false, do not compact."
-    ],
+    'compact':
+    ['pb', "If true, allow multireg compacting."
+     "If false, do not compact."],
 })
 
 multireg_added = {
@@ -917,8 +921,7 @@
                     resval &= max_in_field
 
                 if (full_resval is not None and
-                    (resval !=
-                     ((full_resval >> field_lsb) & max_in_field))):
+                    (resval != ((full_resval >> field_lsb) & max_in_field))):
                     error += 1
                     log.error(fname + ": Field resval " + field['resval'] +
                               " differs from value in main register resval " +
@@ -971,8 +974,9 @@
 
                 old_name = enum_val_names.get(val)
                 if old_name is not None:
-                    log.warning('{}: both {!r} and {!r} have enum value {}.'
-                                .format(fname, old_name, ename, val))
+                    log.warning(
+                        '{}: both {!r} and {!r} have enum value {}.'.format(
+                            fname, old_name, ename, val))
                 enum_val_names[val] = ename
 
             # Check whether every possible bit pattern has a named enum value,
@@ -981,9 +985,8 @@
             if len(enum_val_names) < max_in_field + 1:
                 field['genrsvdenum'] = True
                 log.debug('{}: Enum values not complete '
-                          '({} of {} values named).'
-                          .format(fname,
-                                  len(enum_val_names), max_in_field + 1))
+                          '({} of {} values named).'.format(
+                              fname, len(enum_val_names), max_in_field + 1))
 
     return error, gen_resval, gen_resmask, bits_used
 
@@ -1258,7 +1261,8 @@
     num_fields = len(mreg['fields'])
 
     # Maximum number of fields per reg
-    max_fields_per_reg = 1 if num_fields > 1 else min(mcount, int(width / bits_used))
+    max_fields_per_reg = 1 if num_fields > 1 else min(mcount,
+                                                      int(width / bits_used))
 
     # list of created registers
     rlist = []
@@ -1278,8 +1282,8 @@
     # will there be multiple registers?
     is_mult = (mcount > max_fields_per_reg) or (not is_compact and mcount > 1)
 
-    log.debug("Multireg attributes 0x{:x} {} {} {}"
-              .format(field_bitmask, bits_used, num_fields, max_fields_per_reg))
+    log.debug("Multireg attributes 0x{:x} {} {} {}".format(
+        field_bitmask, bits_used, num_fields, max_fields_per_reg))
     while idx < mcount:
 
         genreg = deepcopy(template_reg)
@@ -1297,7 +1301,8 @@
             for fnum in range(0, min(mcount - idx, max_fields_per_reg)):
                 new_field = deepcopy(mreg['fields'][0])
                 new_field['name'] += "_" + str(idx)
-                new_field['bits'] = bitfield_add(new_field['bits'], fnum * bits_used)
+                new_field['bits'] = bitfield_add(new_field['bits'],
+                                                 fnum * bits_used)
                 _multi_simplify(new_field, cname, idx)
                 genreg['fields'].append(new_field)
                 idx += 1
@@ -1424,11 +1429,11 @@
     new_reg = make_intr_alert_reg(regs, 'INTR_STATE', offset, 'rw1c', 'hrw',
                                   'Interrupt State Register')
     iregs.append(new_reg)
-    new_reg = make_intr_alert_reg(regs, 'INTR_ENABLE', offset + addrsep, 'rw', 'hro',
-                                  'Interrupt Enable Register')
+    new_reg = make_intr_alert_reg(regs, 'INTR_ENABLE', offset + addrsep, 'rw',
+                                  'hro', 'Interrupt Enable Register')
     iregs.append(new_reg)
-    new_reg = make_intr_alert_reg(regs, 'INTR_TEST', offset + 2 * addrsep, 'wo', 'hro',
-                                  'Interrupt Test Register')
+    new_reg = make_intr_alert_reg(regs, 'INTR_TEST', offset + 2 * addrsep,
+                                  'wo', 'hro', 'Interrupt Test Register')
     iregs.append(new_reg)
     return iregs, 0
 
@@ -1574,16 +1579,14 @@
     for reg in mreg_reg_list:
         # There is only one field
         if len(reg['fields']) == 1:
-            genreg_list.append((reg['name'].lower(),
-                                reg['fields'][0]['genresval'],
-                                reg['fields'][0]['swaccess'],
-                                reg['fields'][0]['hwaccess']))
+            genreg_list.append(
+                (reg['name'].lower(), reg['fields'][0]['genresval'],
+                 reg['fields'][0]['swaccess'], reg['fields'][0]['hwaccess']))
         else:
             for f in reg['fields']:
-                genreg_list.append(((reg['name'] + "_" + f['name']).lower(),
-                                    f['genresval'],
-                                    f['swaccess'],
-                                    f['hwaccess']))
+                genreg_list.append(
+                    ((reg['name'] + "_" + f['name']).lower(), f['genresval'],
+                     f['swaccess'], f['hwaccess']))
 
     # Need to check in register names and field list in case of multireg
     reg_list.extend(genreg_list)
@@ -1709,8 +1712,10 @@
             alert_width = int(alert.get('width', '1'), 0)
             num_alerts += alert_width
             if alert_width > 1:
-                log.warning("{}: Consider naming each alert individually instead of "
-                            "declaring an alert signal with width > 1.".format(alert['name']))
+                log.warning(
+                    "{}: Consider naming each alert individually instead of "
+                    "declaring an alert signal with width > 1.".format(
+                        alert['name']))
 
         if num_alerts != 0:
             param = ''
@@ -1722,7 +1727,8 @@
                 if (param['type'] != 'int' or
                         param['default'] != str(num_alerts) or
                         param['local'] != 'true'):
-                    log.error('Conflicting definition of NumAlerts parameter found.')
+                    log.error(
+                        'Conflicting definition of NumAlerts parameter found.')
                     error += 1
             else:
                 # Generate the NumAlerts parameter.
diff --git a/util/regtool.py b/util/regtool.py
index 764bbdc..8a81ec1 100755
--- a/util/regtool.py
+++ b/util/regtool.py
@@ -13,8 +13,8 @@
 
 import hjson
 
-from reggen import (gen_cheader, gen_ctheader, gen_dv, gen_html, gen_json,
-                    gen_rtl, gen_fpv, gen_selfdoc, validate, version)
+from reggen import (gen_cheader, gen_ctheader, gen_dv, gen_fpv, gen_html,
+                    gen_json, gen_rtl, gen_selfdoc, validate, version)
 
 DESC = """regtool, generate register info from Hjson source"""
 
@@ -115,25 +115,18 @@
     # name of the format. dirspec is None if the output is a single file; if
     # the output needs a directory, it is a default path relative to the source
     # file (used when --outdir is not given).
-    arg_to_format = [
-        ('j', ('json', None)),
-        ('c', ('compact', None)),
-        ('d', ('html', None)),
-        ('doc', ('doc', None)),
-        ('r', ('rtl', 'rtl')),
-        ('s', ('dv', 'dv')),
-        ('f', ('fpv', 'fpv/vip')),
-        ('cdefines', ('cdh', None)),
-        ('ctdefines', ('cth', None))
-    ]
+    arg_to_format = [('j', ('json', None)), ('c', ('compact', None)),
+                     ('d', ('html', None)), ('doc', ('doc', None)),
+                     ('r', ('rtl', 'rtl')), ('s', ('dv', 'dv')),
+                     ('f', ('fpv', 'fpv/vip')), ('cdefines', ('cdh', None)),
+                     ('ctdefines', ('cth', None))]
     format = None
     dirspec = None
     for arg_name, spec in arg_to_format:
         if getattr(args, arg_name):
             if format is not None:
                 log.error('Multiple output formats specified on '
-                          'command line ({} and {}).'
-                          .format(format, spec[0]))
+                          'command line ({} and {}).'.format(format, spec[0]))
                 sys.exit(1)
             format, dirspec = spec
     if format is None:
@@ -149,16 +142,14 @@
     if dirspec is None:
         if args.outdir is not None:
             log.error('The {} format expects an output file, '
-                      'not an output directory.'
-                      .format(format))
+                      'not an output directory.'.format(format))
             sys.exit(1)
 
         outfile = args.outfile
     else:
         if args.outfile is not sys.stdout:
             log.error('The {} format expects an output directory, '
-                      'not an output file.'
-                      .format(format))
+                      'not an output file.'.format(format))
             sys.exit(1)
 
         if args.outdir is not None:
@@ -167,10 +158,11 @@
             outdir = str(PurePath(infile.name).parents[1].joinpath(dirspec))
         else:
             # We're using sys.stdin, so can't infer an output directory name
-            log.error('The {} format writes to an output directory, which '
-                      'cannot be inferred automatically if the input comes '
-                      'from stdin. Use --outdir to specify it manually.'
-                      .format(format))
+            log.error(
+                'The {} format writes to an output directory, which '
+                'cannot be inferred automatically if the input comes '
+                'from stdin. Use --outdir to specify it manually.'.format(
+                    format))
             sys.exit(1)
 
     if format == 'doc':
diff --git a/util/tlgen/doc.py b/util/tlgen/doc.py
index a2c1f6f..5c0037d 100644
--- a/util/tlgen/doc.py
+++ b/util/tlgen/doc.py
@@ -6,6 +6,7 @@
 import logging as log
 
 from reggen.validate import val_types
+
 from .validate import root
 
 doc_intro = """
diff --git a/util/tlgen/elaborate.py b/util/tlgen/elaborate.py
index e65b6b6..6b2b935 100644
--- a/util/tlgen/elaborate.py
+++ b/util/tlgen/elaborate.py
@@ -161,8 +161,9 @@
                 dnode.hpass = 1
                 dnode.hdepth = 0
 
-            log.info("Finished processing socket1n {}, pass={}, depth={}"
-                     .format(dnode.name, dnode.hpass, dnode.hdepth))
+            log.info(
+                "Finished processing socket1n {}, pass={}, depth={}".format(
+                    dnode.name, dnode.hpass, dnode.hdepth))
 
         elif dnode.node_type == NodeType.SOCKET_M1:
             idx = dnode.us.index(host.ds[0])
@@ -179,8 +180,9 @@
                 dnode.hpass = dnode.hpass | (1 << idx)
                 dnode.hdepth = dnode.hdepth & ~(0xF << idx * 4)
 
-            log.info("Finished processing socketm1 {}, pass={}, depth={}"
-                     .format(dnode.name, dnode.hpass, dnode.hdepth))
+            log.info(
+                "Finished processing socketm1 {}, pass={}, depth={}".format(
+                    dnode.name, dnode.hpass, dnode.hdepth))
 
     for device in xbar.devices:
         # go upstream and set DReq/RspPass at the first instance.
@@ -227,8 +229,8 @@
                 unode.dpass = unode.dpass | (1 << idx)
                 unode.ddepth = unode.ddepth & ~(0xF << idx * 4)
 
-            log.info("Finished processing socket1n {}, pass={:x}, depth={:x}"
-                     .format(unode.name, unode.dpass, unode.ddepth))
+            log.info("Finished processing socket1n {}, pass={:x}, depth={:x}".
+                     format(unode.name, unode.dpass, unode.ddepth))
 
         elif unode.node_type == NodeType.SOCKET_M1:
             if full_fifo:
@@ -244,7 +246,7 @@
                 unode.dpass = 1
                 unode.ddepth = 0
 
-            log.info("Finished processing socketm1 {}, pass={:x}, depth={:x}"
-                     .format(unode.name, unode.dpass, unode.ddepth))
+            log.info("Finished processing socketm1 {}, pass={:x}, depth={:x}".
+                     format(unode.name, unode.dpass, unode.ddepth))
 
     return xbar
diff --git a/util/tlgen/validate.py b/util/tlgen/validate.py
index 84f2708..95f6a95 100644
--- a/util/tlgen/validate.py
+++ b/util/tlgen/validate.py
@@ -8,9 +8,8 @@
 from reggen.validate import check_bool, check_int, val_types
 
 from .item import Node, NodeType
-from .xbar import Xbar
-
 from .lib import simplify_addr
+from .xbar import Xbar
 
 # val_types = {
 #     'd': ["int", "integer (binary 0b, octal 0o, decimal, hex 0x)"],
diff --git a/util/topgen/__init__.py b/util/topgen/__init__.py
index 8661abf..d469f1e 100644
--- a/util/topgen/__init__.py
+++ b/util/topgen/__init__.py
@@ -2,7 +2,7 @@
 # Licensed under the Apache License, Version 2.0, see LICENSE for details.
 # SPDX-License-Identifier: Apache-2.0
 
+from .lib import get_hjsonobj_xbars, search_ips  # noqa: F401
 # noqa: F401 These functions are used in topgen.py
-from .merge import merge_top, amend_clocks  # noqa: F401
+from .merge import amend_clocks, merge_top  # noqa: F401
 from .validate import validate_top  # noqa: F401
-from .lib import search_ips, get_hjsonobj_xbars  # noqa: F401
diff --git a/util/topgen/c.py b/util/topgen/c.py
index d062825..de116cc 100644
--- a/util/topgen/c.py
+++ b/util/topgen/c.py
@@ -5,11 +5,10 @@
 `top_{name}.h`.
 """
 from collections import OrderedDict
+from math import ceil
 
 from mako.template import Template
 
-from math import ceil
-
 
 class Name(object):
     """We often need to format names in specific ways; this class does so."""
@@ -222,8 +221,7 @@
             # adding a bit-index suffix
             if "width" in intr and int(intr["width"]) != 1:
                 for i in range(int(intr["width"])):
-                    name = Name.from_snake_case(
-                        intr["name"]) + Name([str(i)])
+                    name = Name.from_snake_case(intr["name"]) + Name([str(i)])
                     irq_id = interrupts.add_constant(name,
                                                      docstring="{} {}".format(
                                                          intr["name"], i))
@@ -276,10 +274,10 @@
         for alert in self.top["alert"]:
             if "width" in alert and int(alert["width"]) != 1:
                 for i in range(int(alert["width"])):
-                    name = Name.from_snake_case(
-                        alert["name"]) + Name([str(i)])
-                    irq_id = alerts.add_constant(name, docstring="{} {}".format(
-                        alert["name"], i))
+                    name = Name.from_snake_case(alert["name"]) + Name([str(i)])
+                    irq_id = alerts.add_constant(name,
+                                                 docstring="{} {}".format(
+                                                     alert["name"], i))
                     source_name = source_name_map[alert["module_name"]]
                     alert_mapping.add_entry(irq_id, source_name)
             else:
@@ -318,15 +316,15 @@
         for signal in pinmux_info["inouts"] + pinmux_info["inputs"]:
             if "width" in signal and int(signal["width"]) != 1:
                 for i in range(int(signal["width"])):
-                    name = Name.from_snake_case(
-                        signal["name"]) + Name([str(i)])
+                    name = Name.from_snake_case(signal["name"]) + Name(
+                        [str(i)])
                     peripheral_in.add_constant(name,
                                                docstring="{} {}".format(
                                                    signal["name"], i))
             else:
                 peripheral_in.add_constant(Name.from_snake_case(
                     signal["name"]),
-                    docstring=signal["name"])
+                                           docstring=signal["name"])
         peripheral_in.add_last_constant("Last valid peripheral input")
 
         # Pinmux Input Selects
@@ -358,8 +356,8 @@
         for signal in pinmux_info["inouts"] + pinmux_info["outputs"]:
             if "width" in signal and int(signal["width"]) != 1:
                 for i in range(int(signal["width"])):
-                    name = Name.from_snake_case(
-                        signal["name"]) + Name([str(i)])
+                    name = Name.from_snake_case(signal["name"]) + Name(
+                        [str(i)])
                     outsel.add_constant(name,
                                         docstring="{} {}".format(
                                             signal["name"], i))
@@ -374,11 +372,13 @@
         self.pinmux_outsel = outsel
 
     def _init_pwrmgr_wakeups(self):
-        enum = CEnum(self._top_name + Name(["power", "manager", "wake", "ups"]))
+        enum = CEnum(self._top_name +
+                     Name(["power", "manager", "wake", "ups"]))
 
         for signal in self.top["wakeups"]:
-            enum.add_constant(Name.from_snake_case(signal["module"]) +
-                              Name.from_snake_case(signal["name"]))
+            enum.add_constant(
+                Name.from_snake_case(signal["module"]) +
+                Name.from_snake_case(signal["name"]))
 
         enum.add_last_constant("Last valid pwrmgr wakeup signal")
 
@@ -386,10 +386,13 @@
 
     # Enumerates the positions of all software controllable resets
     def _init_rstmgr_sw_rsts(self):
-        sw_rsts = [rst for rst in self.top["resets"]["nodes"] if 'sw' in rst
-                   and rst['sw'] == 1]
+        sw_rsts = [
+            rst for rst in self.top["resets"]["nodes"]
+            if 'sw' in rst and rst['sw'] == 1
+        ]
 
-        enum = CEnum(self._top_name + Name(["reset", "manager", "sw", "resets"]))
+        enum = CEnum(self._top_name +
+                     Name(["reset", "manager", "sw", "resets"]))
 
         for rst in sw_rsts:
             enum.add_constant(Name.from_snake_case(rst["name"]))
@@ -399,11 +402,13 @@
         self.rstmgr_sw_rsts = enum
 
     def _init_pwrmgr_reset_requests(self):
-        enum = CEnum(self._top_name + Name(["power", "manager", "reset", "requests"]))
+        enum = CEnum(self._top_name +
+                     Name(["power", "manager", "reset", "requests"]))
 
         for signal in self.top["reset_requests"]:
-            enum.add_constant(Name.from_snake_case(signal["module"]) +
-                              Name.from_snake_case(signal["name"]))
+            enum.add_constant(
+                Name.from_snake_case(signal["module"]) +
+                Name.from_snake_case(signal["name"]))
 
         enum.add_last_constant("Last valid pwrmgr reset_request signal")
 
@@ -421,9 +426,9 @@
         clock manager has separate register interfaces for each group.
         """
 
-
         aon_clocks = set()
-        for src in self.top['clocks']['srcs'] + self.top['clocks']['derived_srcs']:
+        for src in self.top['clocks']['srcs'] + self.top['clocks'][
+                'derived_srcs']:
             if src['aon'] == 'yes':
                 aon_clocks.add(src['name'])
 
@@ -437,7 +442,8 @@
                 if source not in aon_clocks:
                     # All these clocks start with `clk_` which is redundant.
                     clock_name = Name.from_snake_case(name).remove_part("clk")
-                    docstring = "Clock {} in group {}".format(name, group['name'])
+                    docstring = "Clock {} in group {}".format(
+                        name, group['name'])
                     if group["sw_cg"] == "yes":
                         gateable_clocks.add_constant(clock_name, docstring)
                     elif group["sw_cg"] == "hint":
diff --git a/util/topgen/lib.py b/util/topgen/lib.py
index 099b61c..1e550e1 100644
--- a/util/topgen/lib.py
+++ b/util/topgen/lib.py
@@ -264,10 +264,12 @@
     """Return dict of unused resets and associated domain
     """
     unused_resets = OrderedDict()
-    unused_resets = {reset['name']: domain
-                     for reset in top['resets']['nodes']
-                     for domain in top['power']['domains']
-                     if reset['type'] == 'top' and domain not in reset['domains']}
+    unused_resets = {
+        reset['name']: domain
+        for reset in top['resets']['nodes']
+        for domain in top['power']['domains']
+        if reset['type'] == 'top' and domain not in reset['domains']
+    }
 
     log.debug("Unused resets are {}".format(unused_resets))
     return unused_resets
diff --git a/util/topgen/merge.py b/util/topgen/merge.py
index 35e5a32..9d6688f 100644
--- a/util/topgen/merge.py
+++ b/util/topgen/merge.py
@@ -4,12 +4,12 @@
 
 import logging as log
 import random
+from collections import OrderedDict
 from copy import deepcopy
 from functools import partial
-from collections import OrderedDict
 from math import ceil, log2
 
-from topgen import lib, c
+from topgen import c, lib
 
 
 def _get_random_data_hex_literal(width):
@@ -130,13 +130,15 @@
                 par_name = i["name"]
                 if par_name.lower().startswith("sec") and not i["expose"]:
                     log.warning("{} has security-critical parameter {} "
-                                "not exposed to top".format(mod_name, par_name))
+                                "not exposed to top".format(
+                                    mod_name, par_name))
                 # Move special prefixes to the beginnining of the parameter name.
                 param_prefixes = ["Sec", "RndCnst"]
                 cc_mod_name = c.Name.from_snake_case(mod_name).as_camel_case()
                 for prefix in param_prefixes:
                     if par_name.lower().startswith(prefix.lower()):
-                        i["name_top"] = prefix + cc_mod_name + par_name[len(prefix):]
+                        i["name_top"] = prefix + cc_mod_name + par_name[
+                            len(prefix):]
                         break
                 else:
                     i["name_top"] = cc_mod_name + par_name
@@ -149,7 +151,8 @@
                 elif i["randtype"] == "perm":
                     i["default"] = _get_random_perm_hex_literal(i["randcount"])
                     # Effective width of the random vector
-                    i["randwidth"] = int(i["randcount"]) * int(ceil(log2(float(i["randcount"]))))
+                    i["randwidth"] = int(i["randcount"]) * int(
+                        ceil(log2(float(i["randcount"]))))
         else:
             ip_module["param_list"] = []
 
@@ -189,7 +192,8 @@
 
         # reset request
         if "reset_request_list" in ip:
-            ip_module["reset_request_list"] = deepcopy(ip["reset_request_list"])
+            ip_module["reset_request_list"] = deepcopy(
+                ip["reset_request_list"])
             for i in ip_module["reset_request_list"]:
                 i.pop('desc', None)
         else:
@@ -313,9 +317,8 @@
     xbar_list = [x["name"] for x in top["xbar"] if x["name"] != xbar["name"]]
 
     # case 1: another xbar --> check in xbar list
-    log.info("Handling xbar device {}, devlen {}, nodelen {}".format(device,
-                                                                     len(deviceobj),
-                                                                     len(nodeobj)))
+    log.info("Handling xbar device {}, devlen {}, nodelen {}".format(
+        device, len(deviceobj), len(nodeobj)))
     if device in xbar_list and len(nodeobj) == 0:
         log.error(
             "Another crossbar %s needs to be specified in the 'nodes' list" %
@@ -375,27 +378,24 @@
         else:
             # Crossbar check
             if len(nodeobj) == 0:
-                log.error(
-                    """
+                log.error("""
                     Device %s doesn't exist in 'module', 'memory', predefined,
                     or as a node object
-                    """
-                    % device)
+                    """ % device)
             else:
                 node = nodeobj[0]
                 node["xbar"] = False
                 required_keys = ["addr_range"]
                 if "stub" in node and node["stub"]:
-                    log.info(
-                        """
+                    log.info("""
                         Device %s definition is a stub and does not exist in
                         'module', 'memory' or predefined
-                        """
-                        % device)
+                        """ % device)
 
                     if all(key in required_keys for key in node.keys()):
-                        log.error("{}, The xbar only node is missing fields, see {}".format(
-                            node['name'], required_keys))
+                        log.error(
+                            "{}, The xbar only node is missing fields, see {}".
+                            format(node['name'], required_keys))
                     process_pipeline_var(node)
                 else:
                     log.error("Device {} definition is not a stub!")
@@ -663,7 +663,8 @@
 
     # add entry to inter_module automatically
     for intf in top['exported_clks']:
-        top['inter_module']['external']['clkmgr.clocks_{}'.format(intf)] = "clks_{}".format(intf)
+        top['inter_module']['external']['clkmgr.clocks_{}'.format(
+            intf)] = "clks_{}".format(intf)
 
     # add to intermodule connections
     for ep in trans_eps:
@@ -672,7 +673,6 @@
 
 
 def amend_resets(top):
-
     """Generate exported reset structure and automatically connect to
        intermodule.
     """
@@ -699,14 +699,15 @@
 
     # add entry to inter_module automatically
     for intf in top['exported_rsts']:
-        top['inter_module']['external']['rstmgr.resets_{}'.format(intf)] = "rsts_{}".format(intf)
-
+        top['inter_module']['external']['rstmgr.resets_{}'.format(
+            intf)] = "rsts_{}".format(intf)
     """Discover the full path and selection to each reset connection.
        This is done by modifying the reset connection of each end point.
     """
     for end_point in top['module'] + top['memory'] + top['xbar']:
         for port, net in end_point['reset_connections'].items():
-            reset_path = lib.get_reset_path(net, end_point['domain'], top['resets'])
+            reset_path = lib.get_reset_path(net, end_point['domain'],
+                                            top['resets'])
             end_point['reset_connections'][port] = reset_path
 
     # reset paths are still needed temporarily until host only modules are properly automated
@@ -793,8 +794,10 @@
             topcfg["wakeups"].append(signal)
 
     # add wakeup signals to pwrmgr connections
-    signal_names = ["{}.{}".format(s["module"].lower(), s["name"].lower())
-                    for s in topcfg["wakeups"]]
+    signal_names = [
+        "{}.{}".format(s["module"].lower(), s["name"].lower())
+        for s in topcfg["wakeups"]
+    ]
     # TBD: What's the best way to not hardcode this signal below?
     #      We could make this a top.hjson variable and validate it against pwrmgr hjson
     topcfg["inter_module"]["connect"]["pwrmgr.wakeups"] = signal_names
@@ -818,8 +821,10 @@
             topcfg["reset_requests"].append(signal)
 
     # add reset requests to pwrmgr connections
-    signal_names = ["{}.{}".format(s["module"].lower(), s["name"].lower())
-                    for s in topcfg["reset_requests"]]
+    signal_names = [
+        "{}.{}".format(s["module"].lower(), s["name"].lower())
+        for s in topcfg["reset_requests"]
+    ]
     # TBD: What's the best way to not hardcode this signal below?
     #      We could make this a top.hjson variable and validate it against pwrmgr hjson
     topcfg["inter_module"]["connect"]["pwrmgr.rstreqs"] = signal_names
diff --git a/util/topgen/validate.py b/util/topgen/validate.py
index 145d01e..6c5b6c6 100644
--- a/util/topgen/validate.py
+++ b/util/topgen/validate.py
@@ -2,8 +2,8 @@
 # Licensed under the Apache License, Version 2.0, see LICENSE for details.
 # SPDX-License-Identifier: Apache-2.0
 import logging as log
-from enum import Enum
 from collections import OrderedDict
+from enum import Enum
 
 from reggen.validate import check_keys
 
@@ -123,7 +123,8 @@
 eflash_required = {
     'banks': ['d', 'number of flash banks'],
     'pages_per_bank': ['d', 'number of data pages per flash bank'],
-    'program_resolution': ['d', 'maximum number of flash words allowed to program'],
+    'program_resolution':
+    ['d', 'maximum number of flash words allowed to program'],
     'clock_srcs': ['g', 'clock connections'],
     'clock_group': ['s', 'associated clock attribute group'],
     'reset_connections': ['g', 'reset connections'],
@@ -208,7 +209,6 @@
         mem['pgm_resolution_bytes'] = int(self.program_resolution * word_bytes)
 
 
-
 # Check to see if each module/xbar defined in top.hjson exists as ip/xbar.hjson
 # Also check to make sure there are not multiple definitions of ip/xbar.hjson for each
 # top level definition
@@ -293,7 +293,8 @@
     # check clock fields are all there
     ext_srcs = []
     for src in top['clocks']['srcs']:
-        check_keys(src, clock_srcs_required, clock_srcs_optional, {}, "Clock source")
+        check_keys(src, clock_srcs_required, clock_srcs_optional, {},
+                   "Clock source")
         ext_srcs.append(src['name'])
 
     # check derived clock sources
@@ -304,12 +305,15 @@
             ext_srcs.index(src['src'])
         except Exception:
             error += 1
-            log.error("{} is not a valid src for {}".format(src['src'], src['name']))
+            log.error("{} is not a valid src for {}".format(
+                src['src'], src['name']))
 
     # all defined clock/reset nets
     reset_nets = [reset['name'] for reset in top['resets']['nodes']]
-    clock_srcs = [clock['name'] for clock in top['clocks']['srcs'] +
-                  top['clocks']['derived_srcs']]
+    clock_srcs = [
+        clock['name']
+        for clock in top['clocks']['srcs'] + top['clocks']['derived_srcs']
+    ]
 
     # Check clock/reset port connection for all IPs
     for ipcfg in top['module']:
@@ -486,7 +490,8 @@
             else:
                 for domain in reset['domains']:
                     if domain not in top['power']['domains']:
-                        log.error("{} defined invalid domain {}".format(reset['name'], domain))
+                        log.error("{} defined invalid domain {}".format(
+                            reset['name'], domain))
                         error += 1
                         return error
 
@@ -500,7 +505,8 @@
 
             end_point['domain'] = top['power']['default']
         elif end_point['domain'] not in top['power']['domains']:
-            log.error("{} defined invalid domain {}".format(end_point['name'], end_point['domain']))
+            log.error("{} defined invalid domain {}".format(
+                end_point['name'], end_point['domain']))
             error += 1
             return error