[dv regr tool] Support for multi IP sim runs

- Added support for running sims for multiple IPs in parallel
  Command line:
  ```console
  $ util/dvsim.py hw/dv/data/master_sim_cfgs_list.hjson -i sanity --reseed 1 [--purge]
  ```
  - The master_sim_cfgs_list.hjson only adds UART and HMAC hjsons - need to
  add the cfg hjsons for other IPs once they are available

- Added 'FlowCfg.py' which serves as base class implementation for hjson
based infrastructure specification - SimCfg now extends from it. This is
done in preperation to add support for other ASIC flows such as FPV,
lint, etc in future.

- Several optimizations to reduce run time (12x speedup from before!)

Signed-off-by: Srikrishna Iyer <sriyer@google.com>
diff --git a/util/dvsim.py b/util/dvsim.py
index d3572c0..7c54453 100755
--- a/util/dvsim.py
+++ b/util/dvsim.py
@@ -13,15 +13,13 @@
 """
 
 import argparse
-import glob
+import datetime
 import logging as log
 import os
 import subprocess
 import sys
 from pathlib import Path
 
-import hjson
-
 from dvsim import Deploy, SimCfg, utils
 
 # TODO: add dvsim_cfg.hjson to retrieve this info
@@ -35,24 +33,27 @@
 # Try to create the directory if it does not already exist.
 def resolve_scratch_root(arg_scratch_root):
     scratch_root = os.environ.get('SCRATCH_ROOT')
-    if arg_scratch_root == "scratch":
+    if arg_scratch_root == None or arg_scratch_root == "":
         if scratch_root == None:
-            arg_scratch_root = os.getcwd() + '/' + arg_scratch_root
+            arg_scratch_root = os.getcwd() + "/scratch"
         else:
             # Scratch space could be mounted in a filesystem (such as NFS) on a network drive.
             # If the network is down, it could cause the access access check to hang. So run a
             # simple ls command with a timeout to prevent the hang.
             (out,
              status) = utils.run_cmd_with_timeout(cmd="ls -d " + scratch_root,
-                                                  timeout=5,
+                                                  timeout=1,
                                                   exit_on_failure=0)
             if status == 0 and out != "":
                 arg_scratch_root = scratch_root
             else:
-                arg_scratch_root = os.getcwd() + '/' + arg_scratch_root
+                arg_scratch_root = os.getcwd() + "/scratch"
                 log.warning(
                     "Env variable $SCRATCH_ROOT=\"%s\" is not accessible.\n" +
                     "Using \"%s\" instead.", scratch_root, arg_scratch_root)
+    else:
+        arg_scratch_root = os.path.realpath(arg_scratch_root)
+
     try:
         os.system("mkdir -p " + arg_scratch_root)
     except:
@@ -118,7 +119,6 @@
     parser.add_argument(
         "-sr",
         "--scratch-root",
-        default="scratch",
         metavar="path",
         help="""root scratch directory path where all build / run drectories go;
                       by default, the tool will create the {scratch_path} = {scratch_root}/{dut}
@@ -271,7 +271,7 @@
     parser.add_argument("--purge",
                         default=False,
                         action='store_true',
-                        help="Clean the scratch directory.")
+                        help="Clean the scratch directory before running.")
 
     parser.add_argument(
         "-mo",
@@ -379,25 +379,49 @@
     args.branch = resolve_branch(args.branch)
     args.cfg = os.path.abspath(args.cfg)
 
+    # Add timestamp to args that all downstream objects can use.
+    # Static variables - indicate timestamp.
+    ts_format_long = "%A %B %d %Y %I:%M:%S%p %Z"
+    ts_format = "%a.%m.%d.%y__%I.%M.%S%p"
+    curr_ts = datetime.datetime.now()
+    timestamp_long = curr_ts.strftime(ts_format_long)
+    timestamp = curr_ts.strftime(ts_format)
+    setattr(args, "ts_format_long", ts_format_long)
+    setattr(args, "ts_format", ts_format)
+    setattr(args, "timestamp_long", timestamp_long)
+    setattr(args, "timestamp", timestamp)
+
+    # Register the seeds from command line with RunTest class.
+    Deploy.RunTest.seeds = args.seeds
+
+    # Register the common deploy settings.
+    Deploy.Deploy.print_interval = args.print_interval
+    Deploy.Deploy.max_parallel = args.max_parallel
+    Deploy.Deploy.max_odirs = args.max_odirs
+
     # Build infrastructure from hjson file and create the list of items to
     # be deployed.
-    cfg = SimCfg.SimCfg(proj_root=get_proj_root(), args=args)
+    # TODO: SimCfg item below implies DV - need to solve this once we add FPV
+    # and other ASIC flow targets.
+    cfg = SimCfg.SimCfg(args.cfg, get_proj_root(), args)
 
     # Purge the scratch path if --purge option is set.
     if args.purge:
-        cfg.do_purge()
-        sys.exit(0)
+        cfg.purge()
 
     # List items available for run if --list switch is passed, and exit.
     if args.list != []:
         cfg.print_list()
         sys.exit(0)
 
+    # Create deploy objects.
+    cfg.create_deploy_objects()
+
     # Deploy the builds and runs
     Deploy.Deploy.deploy(cfg.deploy)
 
     # Generate results.
-    print(cfg.gen_results())
+    results = cfg.gen_results()
 
 
 if __name__ == '__main__':
diff --git a/util/dvsim/Deploy.py b/util/dvsim/Deploy.py
index 4b48ca7..e6b0291 100644
--- a/util/dvsim/Deploy.py
+++ b/util/dvsim/Deploy.py
@@ -58,22 +58,6 @@
         self.odir = ""
         self.log = ""
 
-        # Create directories with links for ease of debug / triage.
-        self.links = {
-            "D": self.sim_cfg.scratch_path + "/" + "dispatched",
-            "P": self.sim_cfg.scratch_path + "/" + "passed",
-            "F": self.sim_cfg.scratch_path + "/" + "failed",
-            "K": self.sim_cfg.scratch_path + "/" + "killed"
-        }
-
-        for link in self.links.keys():
-            try:
-                os.system("/bin/rm -rf " + self.links[link])
-                os.system("mkdir -p " + self.links[link])
-            except IOError:
-                log.error("Unable to create dir %s", self.links[link])
-                sys.exit(1)
-
         # Flag to indicate whether to 'overwrite' if odir already exists,
         # or to backup the existing one and create a new one.
         # For builds, we want to overwrite existing to leverage the tools'
@@ -176,8 +160,8 @@
         try:
             self.odir_limiter()
             os.system("mkdir -p " + self.odir)
-            os.system("ln -s " + self.odir + " " + self.links['D'] + '/' +
-                      self.odir_ln)
+            os.system("ln -s " + self.odir + " " + self.sim_cfg.links['D'] +
+                      '/' + self.odir_ln)
             f = open(self.log, "w")
             self.process = subprocess.Popen(args,
                                             stdout=f,
@@ -252,8 +236,8 @@
         if self.status == '.':
             log.error("Method unexpectedly called!")
         else:
-            cmd = "mv " + self.links['D'] + "/" + self.odir_ln + " " + \
-                  self.links[self.status] + "/."
+            cmd = "mv " + self.sim_cfg.links['D'] + "/" + self.odir_ln + " " + \
+                  self.sim_cfg.links[self.status] + "/."
             os.system(cmd)
 
     def get_status(self):
@@ -306,7 +290,7 @@
         num_secs = 0
         status = {}
         status_str = {}
-        targets_done = {}
+        status_str_prev = {}
 
         while all_done == 0:
             time.sleep(1)
@@ -315,8 +299,6 @@
             for item in Deploy.items:
                 if item.target not in status.keys():
                     status[item.target] = {}
-                if item.target not in targets_done.keys():
-                    targets_done[item.target] = False
                 if item not in status[item.target].keys():
                     status[item.target][item] = ""
 
@@ -336,7 +318,6 @@
             else:
                 num_slots = Deploy.max_parallel - Deploy.dispatch_counter
                 if num_slots > 0:
-                    trig_print = True
                     if len(dispatch_items_queue) > num_slots:
                         dispatch_items(dispatch_items_queue[0:num_slots])
                         dispatch_items_queue = dispatch_items_queue[num_slots:]
@@ -344,6 +325,7 @@
                         dispatch_items(dispatch_items_queue)
                         dispatch_items_queue = []
 
+            status_str_prev = status_str.copy()
             status_str = {}
             for target in status.keys():
                 if target not in status_str.keys(): status_str[target] = "["
@@ -357,11 +339,12 @@
             # Print the status string periodically
             if trig_print:
                 for target in status_str.keys():
-                    if targets_done[target] is True: continue
+                    if (target in status_str_prev.keys()) and \
+                       (status_str[target] == status_str_prev[target]) and \
+                       (status_str[target].find(".") == -1):
+                        continue
                     log.info("[dvsim]: [%06ds] [%s]: %s", num_secs, target,
                              status_str[target])
-                    if status_str[target].find(".") == -1:
-                        targets_done[target] = True
 
 
 class CompileSim(Deploy):
@@ -452,7 +435,6 @@
         self.test = self.name
         self.renew_odir = True
         self.build_mode = test.build_mode.name
-        self.scratch_path = sim_cfg.scratch_path
         self.__post_init__()
         # Construct custom odir link name for RunTest items by combining name
         # and index
diff --git a/util/dvsim/FlowCfg.py b/util/dvsim/FlowCfg.py
new file mode 100644
index 0000000..4a10e62
--- /dev/null
+++ b/util/dvsim/FlowCfg.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python3
+# Copyright lowRISC contributors.
+# Licensed under the Apache License, Version 2.0, see LICENSE for details.
+# SPDX-License-Identifier: Apache-2.0
+r"""
+Class describing a flow configuration object
+"""
+
+import logging as log
+import pprint
+
+from .utils import *
+
+
+# Interface class for extensions.
+class FlowCfg():
+    def __str__(self):
+        return pprint.pformat(self.__dict__)
+
+    def __repr__(self):
+        return pprint.pformat(self.__dict__)
+
+    def __init__(self, flow_cfg_file, proj_root, args):
+        # Options set from command line
+        self.flow_cfg_file = flow_cfg_file
+        self.proj_root = proj_root
+        self.args = args
+        self.scratch_root = args.scratch_root
+
+        # Imported cfg files using 'import_cfgs' keyword
+        self.imported_cfg_files = []
+        self.imported_cfg_files.append(flow_cfg_file)
+
+        # List of cfgs if the parsed cfg is a master cfg list
+        self.cfgs = []
+
+        # Add a notion of "master" cfg - this is indicated using
+        # a special key 'use_cfgs' within the hjson cfg.
+        self.is_master_cfg = False
+
+        # Timestamp
+        self.ts_format_long = args.ts_format_long
+        self.timestamp_long = args.timestamp_long
+        self.ts_format = args.ts_format
+        self.timestamp = args.timestamp
+
+    def __post_init__(self):
+        # Run some post init checks
+        if not self.is_master_cfg:
+            # Check if self.cfgs is a list of exactly 1 item (self)
+            if not (len(self.cfgs) == 1 and self.cfgs[0].name == self.name):
+                log.error("Parse error!\n%s", self.cfgs)
+                sys.exit(1)
+
+    @staticmethod
+    def create_instance(flow_cfg_file, proj_root, args):
+        '''Create a new instance of this class as with given parameters.
+        '''
+        return FlowCfg(flow_cfg_file, proj_root, args)
+
+    def parse_flow_cfg(self, flow_cfg_file, is_entry_point=True):
+        '''
+        Parse the flow cfg hjson file. This is a private API used within the
+        extended class' __init__ function. This parses the hjson cfg (and
+        imports / use cfgs) and builds an initial dictionary.
+
+        This method takes 2 args.
+        flow_cfg_file: This is the flow cfg file to be parsed.
+        is_entry_point: the cfg file that is passed on the command line is
+            the entry point cfg. If the cfg file is a part of an inport_cfgs
+            or use_cfgs key, then it is not an entry point.
+        '''
+        hjson_dict = parse_hjson(flow_cfg_file)
+
+        # Check if this is the master cfg, if this is the entry point cfg file
+        if is_entry_point:
+            self.is_master_cfg = self.check_if_master_cfg(hjson_dict)
+
+            # If not a master cfg, then register self with self.cfgs
+            if self.is_master_cfg is False:
+                self.cfgs.append(self)
+
+        # Resolve the raw hjson dict to build this object
+        self.resolve_hjson_raw(hjson_dict)
+
+    def check_if_master_cfg(self, hjson_dict):
+        # This is a master cfg only if it has a single key called "use_cfgs"
+        # which contains a list of actual flow cfgs.
+        hjson_cfg_dict_keys = hjson_dict.keys()
+        return (len(hjson_cfg_dict_keys) == 1 and \
+                "use_cfgs" in hjson_cfg_dict_keys and \
+                type(hjson_dict["use_cfgs"]) is list)
+
+    def resolve_hjson_raw(self, hjson_dict):
+        attrs = self.__dict__.keys()
+        rm_hjson_dict_keys = []
+        import_cfgs = []
+        use_cfgs = []
+        for key in hjson_dict.keys():
+            if key in attrs:
+                hjson_dict_val = hjson_dict[key]
+                self_val = getattr(self, key)
+                scalar_types = {str: [""], int: [0, -1], bool: [False]}
+
+                # Case 1: key value in class and hjson_dict differ - error!
+                if type(hjson_dict_val) != type(self_val):
+                    log.error("Coflicting key types: \"%s\" {\"%s, \"%s\"}",
+                              key,
+                              type(hjson_dict_val).__name__,
+                              type(self_val).__name__)
+                    sys.exit(1)
+
+                # Case 2: key value in class and hjson_dict are strs - set if
+                # not already set, else error!
+                elif type(hjson_dict_val) in scalar_types.keys():
+                    defaults = scalar_types[type(hjson_dict_val)]
+                    if self_val == hjson_dict_val:
+                        rm_hjson_dict_keys.append(key)
+                    elif self_val in defaults and not hjson_dict_val in defaults:
+                        setattr(self, key, hjson_dict_val)
+                        rm_hjson_dict_keys.append(key)
+                    elif not self_val in defaults and not hjson_dict_val in defaults:
+                        log.error(
+                            "Coflicting values {\"%s\", \"%s\"} encountered for key \"%s\"",
+                            str(self_val), str(hjson_dict_val), key)
+                        sys.exit(1)
+
+                # Case 3: key value in class and hjson_dict are lists - merge'em
+                elif type(hjson_dict_val) is list and type(self_val) is list:
+                    self_val.extend(hjson_dict_val)
+                    setattr(self, key, self_val)
+                    rm_hjson_dict_keys.append(key)
+
+                # Case 4: unknown issue
+                else:
+                    log.error(
+                        "Type of \"%s\" (%s) in %s appears to be invalid (should be %s)",
+                        key,
+                        type(hjson_dict_val).__name__, hjson_dict,
+                        type(self_val).__name__)
+                    sys.exit(1)
+
+            # If key is 'import_cfgs' then add to the list of cfgs to
+            # process
+            elif key == 'import_cfgs':
+                import_cfgs.extend(hjson_dict[key])
+                rm_hjson_dict_keys.append(key)
+
+            # If this is a master cfg list and the key is 'use_cfgs'
+            elif self.is_master_cfg and key == "use_cfgs":
+                use_cfgs.extend(hjson_dict[key])
+
+            # If this is a not master cfg list and the key is 'use_cfgs'
+            elif not self.is_master_cfg and key == "use_cfgs":
+                # Throw an error and exit
+                log.error(
+                    "Key \"use_cfgs\" encountered in a non-master cfg file list \"%s\"",
+                    self.flow_cfg_file)
+                sys.exit(1)
+
+            else:
+                # add key-value to class
+                setattr(self, key, hjson_dict[key])
+                rm_hjson_dict_keys.append(key)
+
+        # Parse imported cfgs
+        for cfg_file in import_cfgs:
+            if not cfg_file in self.imported_cfg_files:
+                self.imported_cfg_files.append(cfg_file)
+                # Substitute wildcards in cfg_file files since we need to process
+                # them right away.
+                cfg_file = subst_wildcards(cfg_file, self.__dict__)
+                self.parse_flow_cfg(cfg_file, False)
+            else:
+                log.error("Cfg file \"%s\" has already been parsed", cfg_file)
+
+        # Parse master cfg files
+        if self.is_master_cfg:
+            for cfg_file in use_cfgs:
+                # Substitute wildcards in cfg_file files since we need to process
+                # them right away.
+                cfg_file = subst_wildcards(cfg_file, self.__dict__)
+                self.cfgs.append(
+                    self.create_instance(cfg_file, self.proj_root, self.args))
+
+    def _process_exports(self):
+        # Convert 'exports' to dict
+        exports_dict = {}
+        if self.exports != []:
+            for item in self.exports:
+                if type(item) is dict:
+                    exports_dict.update(item)
+                elif type(item) is str:
+                    [key, value] = item.split(':', 1)
+                    if type(key) is not str: key = str(key)
+                    if type(value) is not str: value = str(value)
+                    exports_dict.update({key.strip(): value.strip()})
+                else:
+                    log.error("Type error in \"exports\": %s", str(item))
+                    sys.exit(1)
+        self.exports = exports_dict
+
+    def _purge(self):
+        '''Purge the existing scratch areas in preperation for the new run.'''
+        return
+
+    def purge(self):
+        '''Public facing API for _purge().
+        '''
+        for item in self.cfgs:
+            item._purge()
+
+    def _print_list(self):
+        '''Print the list of available items that can be kicked off.
+        '''
+        return
+
+    def print_list(self):
+        '''Public facing API for _print_list().
+        '''
+        for item in self.cfgs:
+            item._print_list()
+
+    def _create_deploy_objects(self):
+        '''Create deploy objects from items that were passed on for being run.
+        The deploy objects for build and run are created from the objects that were
+        created from the create_objects() method.
+        '''
+        return
+
+    def create_deploy_objects(self):
+        '''Public facing API for _create_deploy_objects().
+        '''
+        self.deploy = []
+        for item in self.cfgs:
+            item._create_deploy_objects()
+            self.deploy.extend(item.deploy)
+
+    def _gen_results(self, fmt="md"):
+        '''
+        The function is called after the flow has executed. It collates the status of
+        all run targets and generates a dict. It parses the testplan and maps the generated
+        result to the testplan entries to generate a final table (list). It uses the fmt arg
+        to dump the final result as a markdown or html.
+        '''
+        return
+
+    def gen_results(self, fmt="md"):
+        '''Public facing API for _gen_results().
+        '''
+        results = []
+        for item in self.cfgs:
+            result = item._gen_results(fmt)
+            print(result)
+            results.append(result)
+        return results
diff --git a/util/dvsim/SimCfg.py b/util/dvsim/SimCfg.py
index 15ea425..87db098 100644
--- a/util/dvsim/SimCfg.py
+++ b/util/dvsim/SimCfg.py
@@ -6,55 +6,40 @@
 Class describing simulation configuration object
 """
 
-import datetime
 import logging as log
-import pprint
-import re
 import sys
 
-import hjson
-
 from testplanner import class_defs, testplan_utils
 
 from .Deploy import *
+from .FlowCfg import FlowCfg
 from .Modes import *
 from .utils import *
 
 
-class SimCfg():
+class SimCfg(FlowCfg):
     """Simulation configuration object
 
     A simulation configuration class holds key information required for building a DV
     regression framework.
     """
-
-    # Maintain a list of registered top level cfgs
-    cfgs = []
-
-    # Static variables - indicate timestamp.
-    ts_format_long = "%A %B %d %Y %I:%M:%S%p %Z"
-    ts_format = "%a.%m.%d.%y__%I.%M.%S%p"
-
-    def __str__(self):
-        return pprint.pformat(self.__dict__)
-
-    def __repr__(self):
-        return pprint.pformat(self.__dict__)
-
-    def __init__(self, proj_root, args):
+    def __init__(self, flow_cfg_file, proj_root, args):
+        super().__init__(flow_cfg_file, proj_root, args)
         # Options set from command line
-        self.cfg_files = []
-        self.cfg_files.append(args.cfg)
-        self.items = args.items
-        self.list_items = args.list
+        self.items = []
+        self.items.extend(args.items)
+        self.list_items = []
+        self.list_items.extend(args.list)
         self.simulator = args.simulator
-        self.proj_root = proj_root
-        self.scratch_root = args.scratch_root
         self.branch = args.branch
-        self.build_opts = args.build_opts
-        self.en_build_modes = args.build_modes
-        self.run_opts = args.run_opts
-        self.en_run_modes = args.run_modes
+        self.build_opts = []
+        self.build_opts.extend(args.build_opts)
+        self.en_build_modes = []
+        self.en_build_modes.extend(args.build_modes)
+        self.run_opts = []
+        self.run_opts.extend(args.run_opts)
+        self.en_run_modes = []
+        self.en_run_modes.extend(args.run_modes)
         self.build_unique = args.build_unique
         self.build_only = args.build_only
         self.run_only = args.run_only
@@ -111,21 +96,17 @@
         self.dump_file = ""
         self.exports = []
 
-        # Register the seeds from command line with RunTest class.
-        RunTest.seeds = args.seeds
-
-        # Register the common deploy settings.
-        Deploy.print_interval = args.print_interval
-        Deploy.max_parallel = args.max_parallel
-        Deploy.max_odirs = args.max_odirs
-
-        # Current timestamp
-        curr_ts = datetime.datetime.now()
-        self.timestamp_long = curr_ts.strftime(SimCfg.ts_format_long)
-        self.timestamp = curr_ts.strftime(SimCfg.ts_format)
+        # Generated data structures
+        self.links = {}
+        self.build_list = []
+        self.run_list = []
+        self.deploy = []
 
         # Parse the cfg_file file tree
-        self.parse_sim_cfg(args.cfg)
+        self.parse_flow_cfg(flow_cfg_file)
+
+        # Stop here if this is a master cfg list
+        if self.is_master_cfg: return
 
         # If build_unique is set, then add current timestamp to uniquify it
         if self.build_unique:
@@ -140,38 +121,42 @@
                                                       self.__dict__,
                                                       ignored_wildcards)
 
-        # Check if there are items to run
-        if self.items == []:
-            log.error(
-                "No items provided for running this simulation / regression")
-            sys.exit(1)
+        # Print info
+        log.info("Scratch path for %s: %s", self.name, self.scratch_path)
+
+        # Set directories with links for ease of debug / triage.
+        self.links = {
+            "D": self.scratch_path + "/" + "dispatched",
+            "P": self.scratch_path + "/" + "passed",
+            "F": self.scratch_path + "/" + "failed",
+            "K": self.scratch_path + "/" + "killed"
+        }
 
         # Use the default build mode for tests that do not specify it
         if not hasattr(self, "build_mode"):
             setattr(self, "build_mode", "default")
 
-        self.process_exports()
+        self._process_exports()
 
         # Create objects from raw dicts - build_modes, sim_modes, run_modes,
         # tests and regressions
-        self.create_objects()
+        self._create_objects()
 
-        # Look at list of items and build the list of tests to run
-        self.deploy = []
-        self.build_list = []
-        self.run_list = []
-        self.create_build_and_run_list()
+        # Post init checks
+        self.__post_init__()
 
-        # Create deploy objects
-        self.create_deploy_objects()
+    def __post_init__(self):
+        # Run some post init checks
+        super().__post_init__()
 
-        # Print info
-        log.info("Scratch path: %s", self.scratch_path)
+    @staticmethod
+    def create_instance(flow_cfg_file, proj_root, args):
+        '''Create a new instance of this class as with given parameters.
+        '''
+        return SimCfg(flow_cfg_file, proj_root, args)
 
-        # Register self
-        SimCfg.cfgs.append(self)
-
-    def do_purge(self):
+    # Purge the output directories. This operates on self.
+    def _purge(self):
         if self.scratch_path is not "":
             try:
                 log.info("Purging scratch path %s", self.scratch_path)
@@ -180,108 +165,7 @@
                 log.error('Failed to purge scratch directory %s',
                           self.scratch_path)
 
-            # TODO: can't exit here!
-            sys.exit(0)
-
-    def process_exports(self):
-        # Convert 'exports' to dict
-        exports_dict = {}
-        if self.exports != []:
-            for item in self.exports:
-                if type(item) is dict:
-                    exports_dict.update(item)
-                elif type(item) is str:
-                    [key, value] = item.split(':', 1)
-                    if type(key) is not str: key = str(key)
-                    if type(value) is not str: value = str(value)
-                    exports_dict.update({key.strip(): value.strip()})
-                else:
-                    log.error("Type error in \"exports\": %s", str(item))
-                    sys.exit(1)
-        self.exports = exports_dict
-
-    def parse_sim_cfg(self, sim_cfg_file):
-        try:
-            log.debug("Parsing %s", sim_cfg_file)
-            f = open(sim_cfg_file, 'rU')
-            text = f.read()
-            f.close()
-        except:
-            log.fatal("Failed to parse \"%s\"", sim_cfg_file)
-            sys.exit(1)
-        self.resolve_hjson_raw(hjson.loads(text, use_decimal=True))
-
-    def resolve_hjson_raw(self, hjson_dict):
-        attrs = self.__dict__.keys()
-        rm_hjson_dict_keys = []
-        import_cfgs = []
-        for key in hjson_dict.keys():
-            if key in attrs:
-                hjson_dict_val = hjson_dict[key]
-                self_val = getattr(self, key)
-                scalar_types = {str: [""], int: [0, -1], bool: [False]}
-
-                # Case 1: key value in class and hjson_dict differ - error!
-                if type(hjson_dict_val) != type(self_val):
-                    log.error("Coflicting key types: \"%s\" {\"%s, \"%s\"}",
-                              key,
-                              type(hjson_dict_val).__name__,
-                              type(self_val).__name__)
-                    sys.exit(1)
-
-                # Case 2: key value in class and hjson_dict are strs - set if
-                # not already set, else error!
-                elif type(hjson_dict_val) in scalar_types.keys():
-                    defaults = scalar_types[type(hjson_dict_val)]
-                    if self_val == hjson_dict_val:
-                        rm_hjson_dict_keys.append(key)
-                    elif self_val in defaults and not hjson_dict_val in defaults:
-                        setattr(self, key, hjson_dict_val)
-                        rm_hjson_dict_keys.append(key)
-                    elif not self_val in defaults and not hjson_dict_val in defaults:
-                        log.error(
-                            "Coflicting values {\"%s\", \"%s\"} encountered for key \"%s\"",
-                            str(self_val), str(hjson_dict_val), key)
-                        sys.exit(1)
-
-                # Case 3: key value in class and hjson_dict are lists - merge'em
-                elif type(hjson_dict_val) is list and type(self_val) is list:
-                    self_val.extend(hjson_dict_val)
-                    setattr(self, key, self_val)
-                    rm_hjson_dict_keys.append(key)
-
-                # Case 4: unknown issue
-                else:
-                    log.error(
-                        "Type of \"%s\" (%s) in %s appears to be invalid (should be %s)",
-                        key,
-                        type(hjson_dict_val).__name__, hjson_dict,
-                        type(self_val).__name__)
-                    sys.exit(1)
-            # If key is 'import_cfgs' then add to the list of sim_cfgs to
-            # process
-            elif key == 'import_cfgs':
-                import_cfgs.extend(hjson_dict[key])
-                rm_hjson_dict_keys.append(key)
-
-            else:
-                # add key-value to class
-                setattr(self, key, hjson_dict[key])
-                rm_hjson_dict_keys.append(key)
-
-        # Parse imported sim_cfgs
-        for cfg_file in import_cfgs:
-            if not cfg_file in self.cfg_files:
-                self.cfg_files.append(cfg_file)
-                # Substitute wildcards in cfg_file files since we need to process
-                # them right away.
-                cfg_file = subst_wildcards(cfg_file, self.__dict__)
-                self.parse_sim_cfg(cfg_file)
-            else:
-                log.error("Sim cfg file \"%s\" has already been parsed",
-                          cfg_file)
-
-    def create_objects(self):
+    def _create_objects(self):
         # Create build and run modes objects
         build_modes = Modes.create_modes(BuildModes,
                                          getattr(self, "build_modes"))
@@ -322,8 +206,9 @@
             getattr(self, "regressions"), self, tests)
         setattr(self, "regressions", regressions)
 
-    def print_list(self):
+    def _print_list(self):
         for list_item in self.list_items:
+            log.info("---- List of %s in %s ----", list_item, self.name)
             if hasattr(self, list_item):
                 items = getattr(self, list_item)
                 for item in items:
@@ -331,7 +216,7 @@
             else:
                 log.error("Item %s does not exist!", list_item)
 
-    def create_build_and_run_list(self):
+    def _create_build_and_run_list(self):
         # Walk through the list of items to run and create the build and run
         # objects.
         # Allow multiple regressions to run as long as the do not enable
@@ -349,6 +234,12 @@
                 if item not in marked_items: pruned_items.append(item)
             return pruned_items
 
+        # Check if there are items to run
+        if self.items == []:
+            log.error(
+                "No items provided for running this simulation / regression")
+            sys.exit(1)
+
         items_list = self.items
         run_list_names = []
         marked_items = []
@@ -388,8 +279,9 @@
 
         # Check if all items has been processed
         if items_list != []:
-            log.error("The items %s added for run were not found! Use the --list switch " + \
-                      "to see a list of available tests / regressions for run", items_list)
+            log.error("The items %s added for run were not found in \n%s!" + \
+                      "\nUse the --list switch to see a list of available tests / regressions.", \
+                      items_list, self.flow_cfg_file)
             sys.exit(1)
 
         # Process reseed override and create the build_list
@@ -407,7 +299,32 @@
                 self.build_list.append(test.build_mode)
                 build_list_names.append(test.build_mode.name)
 
-    def create_deploy_objects(self):
+    def _create_dirs(self):
+        '''Create initial set of directories
+        '''
+        # Invoking system calls has a performance penalty.
+        # Construct a single command line chained with '&&' to invoke
+        # the system call only once, rather than multiple times.
+        create_link_dirs_cmd = ""
+        for link in self.links.keys():
+            create_link_dirs_cmd += "/bin/rm -rf " + self.links[link] + " && "
+            create_link_dirs_cmd += "mkdir -p " + self.links[link] + " && "
+        create_link_dirs_cmd += " true"
+
+        try:
+            os.system(create_link_dirs_cmd)
+        except IOError:
+            log.error("Error running when running the cmd \"%s\"",
+                      create_link_dirs_cmd)
+            sys.exit(1)
+
+    def _create_deploy_objects(self):
+        '''Create deploy objects from the build and run lists.
+        '''
+
+        # Create the build and run list first
+        self._create_build_and_run_list()
+
         builds = []
         build_map = {}
         for build in self.build_list:
@@ -428,12 +345,15 @@
         else:
             self.deploy = builds
 
-    def gen_results(self, fmt="md"):
+        # Create initial set of directories before kicking off the regression.
+        self._create_dirs()
+
+    def _gen_results(self, fmt="md"):
         '''
         The function is called after the regression has completed. It collates the status of
         all run targets and generates a dict. It parses the testplan and maps the generated
         result to the testplan entries to generate a final table (list). It uses the fmt arg
-        to dump the final result as a markdown of html.
+        to dump the final result as a markdown or html.
         '''
 
         # TODO: add support for html
diff --git a/util/dvsim/utils.py b/util/dvsim/utils.py
index cc2c80f..29ead4d 100644
--- a/util/dvsim/utils.py
+++ b/util/dvsim/utils.py
@@ -51,7 +51,7 @@
         while time.time() - start < timeout:
             if p.poll() is not None:
                 break
-            time.sleep(0.2)
+            time.sleep(.01)
 
     # Capture output and status if cmd exited, else kill it
     if p.poll() is not None:
@@ -68,6 +68,22 @@
     return (result, status)
 
 
+# Parse hjson and return a dict
+def parse_hjson(hjson_file):
+    hjson_cfg_dict = None
+    try:
+        log.debug("Parsing %s", hjson_file)
+        f = open(hjson_file, 'rU')
+        text = f.read()
+        hjson_cfg_dict = hjson.loads(text, use_decimal=True)
+        f.close()
+    except:
+        log.fatal("Failed to parse \"%s\" possibly due to bad path",
+                  hjson_file)
+        sys.exit(1)
+    return hjson_cfg_dict
+
+
 def subst_wildcards(var, mdict, ignored_wildcards=[]):
     '''
     If var has wildcards specified within {..}, find and substitute them.