Create project
All checks were successful
build / test (3.10) (push) Successful in 6s
build / test (3.11) (push) Successful in 6s
build / test (3.12) (push) Successful in 6s
build / test (3.13) (push) Successful in 6s
build / test (3.9) (push) Successful in 6s
build / lint (push) Successful in 7s
build / mypy (push) Successful in 8s
build / test (3.10) (release) Successful in 6s
build / test (3.11) (release) Successful in 6s
build / test (3.12) (release) Successful in 6s
build / test (3.13) (release) Successful in 6s
build / test (3.9) (release) Successful in 6s
build / lint (release) Successful in 8s
build / mypy (release) Successful in 8s
build / Build distributions (push) Successful in 7s
build / Build distributions (release) Successful in 8s
build / deploy (push) Has been skipped
build / deploy (release) Successful in 6s

This commit is contained in:
Byron Lathi
2025-11-23 13:20:23 -08:00
commit b43de9206b
34 changed files with 1788 additions and 0 deletions

156
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,156 @@
name: build
on:
push:
branches:
- master
- 'dev/**'
pull_request:
branches: [ master ]
release:
types:
- published
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
test:
strategy:
matrix:
python-version:
# - "3.7"
# - "3.8"
- "3.9"
- "3.10"
- "3.11"
- "3.12"
- "3.13"
# include:
# - os: ubuntu-latest
# older versions need older OS
# - python-version: "3.7"
# os: ubuntu-22.04
# - python-version: "3.8"
# os: ubuntu-22.04
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --break-system-packages -r tests/requirements.txt
- name: Install
run: |
python -m pip install --break-system-packages ".[cli]"
- name: Test
run: |
cd tests
pytest --cov=peakrdl_python_regmap
#-------------------------------------------------------------------------------
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install -r tests/requirements.txt
- name: Install
run: |
python -m pip install ".[cli]"
- name: Run Lint
run: |
pylint --rcfile tests/pylint.rc peakrdl_python_regmap
#-------------------------------------------------------------------------------
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install -r tests/requirements.txt
- name: Install
run: |
python -m pip install ".[cli]"
- name: Type Check
run: |
mypy --config-file tests/mypy.ini src/peakrdl_python_regmap
#-------------------------------------------------------------------------------
build:
needs:
- test
- lint
- mypy
name: Build distributions
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
name: Install Python
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install build
- name: Build sdist
run: python -m build
- uses: actions/upload-artifact@v3
with:
name: dist
path: |
dist/*.tar.gz
dist/*.whl
#-------------------------------------------------------------------------------
deploy:
needs:
- build
runs-on: ubuntu-latest
environment: release
permissions:
id-token: write
# Only publish when a Gitea Release is created.
if: gitea.event_name == 'release'
steps:
- uses: actions/download-artifact@v3
with:
name: dist
path: dist
- run: python3 -m pip install twine --user --break-system-packages
- run: python3 -m pip install -U packaging --user --break-system-packages
- run: TWINE_PASSWORD=${{ secrets.PYPI_PAT }} TWINE_USERNAME=bslathi19 python -m twine upload --repository-url ${{ vars.CI_API_URL }} dist/*

16
.gitignore vendored Normal file
View File

@@ -0,0 +1,16 @@
**/__pycache__
**/.vscode
**/.venv
**/.coverage
**/*.rpt
**/.pytest_cache
**/_build
build/
dist/
*.egg-info/
.coverage
build/
dist/
*.egg-info/
.eggs/
*.out

17
.readthedocs.yaml Normal file
View File

@@ -0,0 +1,17 @@
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
version: 2
build:
os: ubuntu-22.04
tools:
python: "3.11"
sphinx:
configuration: docs/conf.py
python:
install:
- requirements: docs/requirements.txt
- method: pip
path: .

165
LICENSE Normal file
View File

@@ -0,0 +1,165 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates
the terms and conditions of version 3 of the GNU General Public
License, supplemented by the additional permissions listed below.
0. Additional Definitions.
As used herein, "this License" refers to version 3 of the GNU Lesser
General Public License, and the "GNU GPL" refers to version 3 of the GNU
General Public License.
"The Library" refers to a covered work governed by this License,
other than an Application or a Combined Work as defined below.
An "Application" is any work that makes use of an interface provided
by the Library, but which is not otherwise based on the Library.
Defining a subclass of a class defined by the Library is deemed a mode
of using an interface provided by the Library.
A "Combined Work" is a work produced by combining or linking an
Application with the Library. The particular version of the Library
with which the Combined Work was made is also called the "Linked
Version".
The "Minimal Corresponding Source" for a Combined Work means the
Corresponding Source for the Combined Work, excluding any source code
for portions of the Combined Work that, considered in isolation, are
based on the Application, and not on the Linked Version.
The "Corresponding Application Code" for a Combined Work means the
object code and/or source code for the Application, including any data
and utility programs needed for reproducing the Combined Work from the
Application, but excluding the System Libraries of the Combined Work.
1. Exception to Section 3 of the GNU GPL.
You may convey a covered work under sections 3 and 4 of this License
without being bound by section 3 of the GNU GPL.
2. Conveying Modified Versions.
If you modify a copy of the Library, and, in your modifications, a
facility refers to a function or data to be supplied by an Application
that uses the facility (other than as an argument passed when the
facility is invoked), then you may convey a copy of the modified
version:
a) under this License, provided that you make a good faith effort to
ensure that, in the event an Application does not supply the
function or data, the facility still operates, and performs
whatever part of its purpose remains meaningful, or
b) under the GNU GPL, with none of the additional permissions of
this License applicable to that copy.
3. Object Code Incorporating Material from Library Header Files.
The object code form of an Application may incorporate material from
a header file that is part of the Library. You may convey such object
code under terms of your choice, provided that, if the incorporated
material is not limited to numerical parameters, data structure
layouts and accessors, or small macros, inline functions and templates
(ten or fewer lines in length), you do both of the following:
a) Give prominent notice with each copy of the object code that the
Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the object code with a copy of the GNU GPL and this license
document.
4. Combined Works.
You may convey a Combined Work under terms of your choice that,
taken together, effectively do not restrict modification of the
portions of the Library contained in the Combined Work and reverse
engineering for debugging such modifications, if you also do each of
the following:
a) Give prominent notice with each copy of the Combined Work that
the Library is used in it and that the Library and its use are
covered by this License.
b) Accompany the Combined Work with a copy of the GNU GPL and this license
document.
c) For a Combined Work that displays copyright notices during
execution, include the copyright notice for the Library among
these notices, as well as a reference directing the user to the
copies of the GNU GPL and this license document.
d) Do one of the following:
0) Convey the Minimal Corresponding Source under the terms of this
License, and the Corresponding Application Code in a form
suitable for, and under terms that permit, the user to
recombine or relink the Application with a modified version of
the Linked Version to produce a modified Combined Work, in the
manner specified by section 6 of the GNU GPL for conveying
Corresponding Source.
1) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (a) uses at run time
a copy of the Library already present on the user's computer
system, and (b) will operate properly with a modified version
of the Library that is interface-compatible with the Linked
Version.
e) Provide Installation Information, but only if you would otherwise
be required to provide such information under section 6 of the
GNU GPL, and only to the extent that such information is
necessary to install and execute a modified version of the
Combined Work produced by recombining or relinking the
Application with a modified version of the Linked Version. (If
you use option 4d0, the Installation Information must accompany
the Minimal Corresponding Source and Corresponding Application
Code. If you use option 4d1, you must provide the Installation
Information in the manner specified by section 6 of the GNU GPL
for conveying Corresponding Source.)
5. Combined Libraries.
You may place library facilities that are a work based on the
Library side by side in a single library together with other library
facilities that are not Applications and are not covered by this
License, and convey such a combined library under terms of your
choice, if you do both of the following:
a) Accompany the combined library with a copy of the same work based
on the Library, uncombined with any other library facilities,
conveyed under the terms of this License.
b) Give prominent notice with the combined library that part of it
is a work based on the Library, and explaining where to find the
accompanying uncombined form of the same work.
6. Revised Versions of the GNU Lesser General Public License.
The Free Software Foundation may publish revised and/or new versions
of the GNU Lesser General Public License from time to time. Such new
versions will be similar in spirit to the present version, but may
differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the
Library as you received it specifies that a certain numbered version
of the GNU Lesser General Public License "or any later version"
applies to it, you have the option of following the terms and
conditions either of that published version or of any later version
published by the Free Software Foundation. If the Library as you
received it does not specify a version number of the GNU Lesser
General Public License, you may choose any version of the GNU Lesser
General Public License ever published by the Free Software Foundation.
If the Library as you received it specifies that a proxy can decide
whether future versions of the GNU Lesser General Public License shall
apply, that proxy's public statement of acceptance of any version is
permanent authorization for you to choose that version for the
Library.

11
README.md Normal file
View File

@@ -0,0 +1,11 @@
[![Documentation Status](https://readthedocs.org/projects/peakrdl-cheader/badge/?version=latest)](https://peakrdl-cheader.readthedocs.io)
[![build](https://git.byronlathi.com/bslathi19/PeakRDL-python-regmap/actions/workflows/build.yml/badge.svg)](https://git.byronlathi.com/bslathi19/PeakRDL-python-regmap/actions/workflows/build.yml/badge.svg?branch%3Amaster)
[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/peakrdl-cheader.svg)](https://pypi.org/project/peakrdl-cheader)
# PeakRDL Python
Generate Python addressmap files from a SystemRDL register model.
For the command line tool, see the [PeakRDL project](https://peakrdl.readthedocs.io).
## Documentation
See the [PeakRDL Python Documentation](https://peakrdl-cheader.readthedocs.io) for more details

20
docs/Makefile Normal file
View File

@@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?=
SPHINXBUILD ?= sphinx-build
SOURCEDIR = .
BUILDDIR = _build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

64
docs/conf.py Normal file
View File

@@ -0,0 +1,64 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../src/'))
import datetime
# -- Project information -----------------------------------------------------
project = 'PeakRDL-Python'
copyright = '%d, Alex Mykyta' % datetime.datetime.now().year
author = 'Alex Mykyta'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc' ,
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_book_theme"
html_theme_options = {
"repository_url": "https://git.byronlath.com/bslathi19/PeakRDL-python",
"path_to_docs": "docs",
"use_download_button": False,
"use_source_button": True,
"use_repository_button": True,
"use_issues_button": True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []

13
docs/index.rst Normal file
View File

@@ -0,0 +1,13 @@
Introduction
============
PeakRDL python is a python package which can be used to generate a register
abstraction layer python class from a SystemRDL definition.
Features:
none
.. toctree::
:hidden:
self

50
pyproject.toml Normal file
View File

@@ -0,0 +1,50 @@
[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"
[project]
name = "peakrdl-python-regmap"
dynamic = ["version"]
requires-python = ">=3.7"
dependencies = [
"systemrdl-compiler ~= 1.31",
"jinja2",
]
authors = [
{name="Byron Lathi"},
]
description = "Generate Python address map from a SystemRDL register model"
readme = "README.md"
license = {file = "LICENSE"}
keywords = [
"SystemRDL", "PeakRDL", "CSR", "compiler", "tool", "registers", "generator",
"Python", "software",
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)",
]
[project.optional-dependencies]
cli = [
"peakrdl-cli >= 1.2.3",
]
[project.urls]
Source = "https://git.byronlathi.com/bslathi19/PeakRDL-python"
Tracker = "https://git.byronlathi.com/bslathi19/PeakRDL-python/issues"
Changelog = "https://git.byronlathi.com/bslathi19/PeakRDL-python/releases"
#Documentation = "https://peakrdl-python.readthedocs.io/"
[tool.setuptools.dynamic]
version = {attr = "peakrdl_python_regmap.__about__.__version__"}
[project.entry-points."peakrdl.exporters"]
python = "peakrdl_python_regmap.__peakrdl__:Exporter"

View File

@@ -0,0 +1,2 @@
version_info = (0, 0, 1)
__version__ = ".".join([str(n) for n in version_info])

View File

View File

@@ -0,0 +1,25 @@
from typing import TYPE_CHECKING
from peakrdl.plugins.exporter import ExporterSubcommandPlugin
from .exporter import PythonRegmapExporter
if TYPE_CHECKING:
import argparse
from systemrdl.node import AddrmapNode
class Exporter(ExporterSubcommandPlugin):
short_desc = "Generate a Python regmap definition of an address space"
cfg_schema = {
}
def do_export(self, top_node: 'AddrmapNode', options: 'argparse.Namespace') -> None:
x = PythonRegmapExporter()
x.export(
top_node,
path=options.output
)

View File

@@ -0,0 +1,96 @@
from typing import Optional, List
from systemrdl.walker import RDLListener, RDLWalker, WalkerAction
from systemrdl.node import AddrmapNode, RegNode, AddressableNode
from .design_state import DesignState
class DesignScanner(RDLListener):
def __init__(self, ds: DesignState) -> None:
self.ds = ds
self.msg = ds.top_node.env.msg
self.prev_reg_stack: List[Optional[RegNode]]
self.prev_reg_stack = []
@property
def top_node(self) -> AddrmapNode:
return self.ds.top_node
def run(self) -> None:
RDLWalker().walk(self.top_node, self)
if self.msg.had_error:
self.msg.fatal(
"Unable to export due to previous errors"
)
def enter_AddressableComponent(self, node: AddressableNode) -> Optional[WalkerAction]:
if not isinstance(node, RegNode):
self.prev_reg_stack.append(None)
return WalkerAction.Continue
def exit_AddressableComponent(self, node: AddressableNode) -> Optional[WalkerAction]:
if not isinstance(node, RegNode):
self.prev_reg_stack.pop()
return WalkerAction.Continue
def enter_Reg(self, node: RegNode) -> Optional[WalkerAction]:
# Collect information about overlapping fields, if any.
overlapping_fields = []
fields = list(node.fields())
reg_bitmask = 0
for i, field in enumerate(fields):
field_bitmask = ((1 << field.width) - 1) << field.low
if field_bitmask & reg_bitmask:
# this field overlaps with a prior one
# Determine which one
for prior_field in fields[0:i]:
if prior_field.high >= field.low:
if prior_field.inst_name not in overlapping_fields:
overlapping_fields.append(prior_field.inst_name)
if field.inst_name not in overlapping_fields:
overlapping_fields.append(field.inst_name)
reg_bitmask |= field_bitmask
if overlapping_fields:
# Save infor about this register for later.
self.ds.overlapping_fields[node.get_path()] = overlapping_fields
# Check previous adjacent register for overlap
prev_reg = self.prev_reg_stack[-1]
if prev_reg and ((prev_reg.raw_address_offset + prev_reg.total_size) > node.raw_address_offset):
# registers overlap!
# Registers shall be co-located.
# This restriction guarantees that overlaps can only happen in pairs,
# and avoids the more complex overlap scenarios that involve multiple registers.
if (
prev_reg.raw_address_offset != node.raw_address_offset # Same offset
or prev_reg.size != node.size # Same size
or prev_reg.total_size != node.total_size # Same array footprint
):
self.msg.error(
"C header export currently only supports registers that are co-located. "
f"See registers: '{prev_reg.inst_name}' and '{node.inst_name}.'",
node.inst_src_ref
)
# Save information about register overlap pair
self.ds.overlapping_reg_pairs[prev_reg.get_path()] = node.inst_name
# Check for sparse register arrays
if node.is_array and node.array_stride > node.size: # type: ignore # is_array implies array_stride is not none
self.msg.error(
"C header export does not support sparse arrays of registers. "
f"See register: '{node.inst_name}.'",
node.inst_src_ref
)
return WalkerAction.SkipDescendants
def exit_Reg(self, node: RegNode) -> None:
self.prev_reg_stack[-1] = node

View File

@@ -0,0 +1,30 @@
from typing import Any, Dict, List
import os
import jinja2 as jj
from systemrdl.node import AddrmapNode
class DesignState:
def __init__(self, top_node: AddrmapNode, kwargs: Any) -> None:
loader = jj.FileSystemLoader(os.path.join(os.path.dirname(__file__), "templates"))
self.jj_env = jj.Environment(
loader=loader,
undefined=jj.StrictUndefined
)
self.top_node = top_node
#------------------------
# Info about the design
#------------------------
# Each reg that has overlapping fields generates an entry:
# reg_path : list of field names involved in overlap
self.overlapping_fields: Dict[str, List[str]] = {}
# Pairs of overlapping registers
# first_reg_path : partner_register_name
self.overlapping_reg_pairs: Dict[str, str] = {}
#------------------------
# Extract compiler args
#------------------------

View File

@@ -0,0 +1,41 @@
from typing import Any, Union
from systemrdl.node import RootNode, AddrmapNode
from .design_state import DesignState
from .design_scanner import DesignScanner
from .generator import Generator
# from .testcase_generator import TestcaseGenerator
class PythonRegmapExporter:
def export(self, node: Union[RootNode, AddrmapNode], path: str, **kwargs: Any) -> None:
"""
Parameters
----------
node: AddrmapNode
Top-level SystemRDL node to export.
path: str
Output header file path
"""
# If it is the root node, skip to top addrmap
if isinstance(node, RootNode):
top_node = node.top
else:
top_node = node
ds = DesignState(top_node, kwargs)
# Check for stray kwargs
if kwargs:
raise TypeError(f"got an unexpected keyword argument '{list(kwargs.keys())[0]}'")
# Validate and collect info for export
DesignScanner(ds).run()
top_nodes = []
top_nodes.append(top_node)
# Write output
Generator(ds).run(path, top_nodes)
# if ds.testcase:
# TestcaseGenerator(ds).run(path, top_nodes)

View File

@@ -0,0 +1,52 @@
from typing import TextIO, Set, List, Union, Dict, Any
from systemrdl.walker import RDLListener, RDLWalker
from systemrdl.node import AddrmapNode, AddressableNode, MemNode, RegfileNode
from .design_state import DesignState
class Generator(RDLListener):
root_node: Union[AddrmapNode, MemNode, RegfileNode]
f: TextIO
def __init__(self, ds: DesignState) -> None:
self.ds = ds
self.defined_namespace: Set[str]
self.defined_namespace = set()
self.indent_level = 0
def run(self, path: str, top_nodes: List[AddrmapNode]) -> None:
with open(path, "w", encoding='utf-8') as f:
self.f = f
context: Dict[str, Any] = {}
# Stream header via jinja
template = self.ds.jj_env.get_template("regmap.py")
template.stream(context).dump(f) # type: ignore # jinja incorrectly typed
f.write("\n")
for node in top_nodes:
self.root_node = node
RDLWalker().walk(node, self)
def enter_AddressableComponent(self, node: AddressableNode) -> None:
self.f.write(f"{' '*self.indent_level*4}class {node.inst_name}Class(AddrNode):\n")
self.indent_level+=1
def exit_AddressableComponent(self, node: AddressableNode) -> None:
self.f.write(f"{' '*self.indent_level*4}def __init__(self, addr: int = 0):\n")
self.f.write(f"{' '*self.indent_level*4} self.addr = addr\n")
for child in node.children():
if isinstance(child, AddressableNode):
if child.is_array:
assert child.array_dimensions is not None
if len(child.array_dimensions) > 1:
raise NotImplementedError("Multidimensional arrays not supported")
self.f.write(f"{' '*self.indent_level*4} self.{child.inst_name} = [self.{child.inst_name}Class({child.raw_address_offset} + {child.size}*i for i in range({child.n_elements}))]\n")
else:
self.f.write(f"{' '*self.indent_level*4} self.{child.inst_name} = self.{child.inst_name}Class({child.address_offset})\n")
self.indent_level-=1

View File

@@ -0,0 +1,32 @@
C_KEYWORDS = {
# Base
"auto", "break", "case", "char", "const", "continue", "default", "do",
"double", "else", "enum", "extern", "float", "for", "goto", "if", "int",
"long", "register", "return", "short", "signed", "sizeof", "static",
"struct", "switch", "typedef", "union", "unsigned", "void", "volatile",
"while",
# C99
"inline", "restrict", "_Bool", "_Complex", "_Imaginary",
# C11
"_Alignas", "_Alignof", "_Atomic", "_Generic", "_Noreturn",
"_Static_assert", "_Thread_local",
# C23
"alignas", "alignof", "bool", "constexpr", "false", "nullptr",
"static_assert", "thread_local", "true", "typeof", "typeof_unqual",
"_BitInt", "_Decimal128", "_Decimal32", "_Decimal64",
}
def kw_filter(s: str) -> str:
"""
Make all user identifiers 'safe' and ensure they do not collide with
C keywords.
If a C keyword is encountered, add an underscore suffix
"""
if s in C_KEYWORDS:
s += "_"
return s

View File

View File

@@ -0,0 +1,2 @@
class AddrNode():
addr: int

View File

@@ -0,0 +1,37 @@
from typing import Union
from systemrdl.node import AddressableNode, AddrmapNode, Node, MemNode, RegfileNode
from .design_state import DesignState
def get_node_prefix(ds: DesignState, root_node: Union[AddrmapNode, MemNode, RegfileNode], node: AddressableNode) -> str:
prefix = node.get_rel_path(
root_node.parent,
hier_separator="__",
array_suffix="x",
empty_array_suffix="x"
)
return prefix
def get_struct_name(ds: DesignState, root_node: Union[AddrmapNode, MemNode, RegfileNode], node: AddressableNode) -> str:
if node.is_array and node.array_stride > node.size: # type: ignore # is_array implies array_stride is not none
# Stride is larger than size of actual element.
# Struct will be padded up, and therefore needs a unique name
pad_suffix = f"__stride{node.array_stride:x}"
else:
pad_suffix = ""
return get_node_prefix(ds, root_node, node) + pad_suffix + "_t"
def get_friendly_name(ds: DesignState, root_node: Union[AddrmapNode, MemNode, RegfileNode], node: Node) -> str:
"""
Returns a useful string that helps identify the typedef in
a comment
"""
friendly_name = node.get_rel_path(root_node.parent)
return node.component_type_name + " - " + friendly_name
def roundup_pow2(x: int) -> int:
return 1<<(x-1).bit_length()

20
tests/.coveragerc Normal file
View File

@@ -0,0 +1,20 @@
[run]
branch = True
#relative_files = True
omit =
# to be covered elsewhere
*/__peakrdl__.py
[paths]
source =
../src/peakrdl_python_regmap/
*/site-packages/*/peakrdl_python_regmap
*/site-packages/peakrdl_python_regmap
[report]
exclude_lines =
pragma: no cover
if TYPE_CHECKING:
precision = 1

83
tests/base.py Normal file
View File

@@ -0,0 +1,83 @@
from unittest import TestCase
import os
import subprocess
from itertools import product
from systemrdl import RDLCompiler
from peakrdl_python_regmap.exporter import PythonRegmapExporter
def get_permutations(spec):
param_list = []
for v in product(*spec.values()):
param_list.append(dict(zip(spec, v)))
return param_list
class BaseHeaderTestcase(TestCase):
rdl_file = ""
@classmethod
def get_run_dir(cls) -> str:
this_dir = os.path.dirname(__file__)
run_dir = os.path.join(this_dir, "test.out", cls.__name__)
return run_dir
@property
def output_dir(self) -> str:
return self.get_run_dir()
@classmethod
def _write_params(cls) -> None:
"""
Write out the class parameters to a file so that it is easier to debug
how a testcase was parameterized
"""
path = os.path.join(cls.get_run_dir(), "params.txt")
with open(path, 'w') as f:
for k, v in cls.__dict__.items():
if k.startswith("_") or callable(v):
continue
f.write(f"{k}: {repr(v)}\n")
def do_export(self):
os.makedirs(self.output_dir, exist_ok=True)
rdl_path = os.path.join(os.path.dirname(__file__), self.rdl_file)
rdlc = RDLCompiler()
rdlc.compile_file(rdl_path)
top_node = rdlc.elaborate()
x = PythonRegmapExporter()
x.export(
top_node,
path=os.path.join(self.output_dir, "out.h"),
)
self._write_params()
def do_compile(self):
args = [
"gcc",
"--std", self.std.value,
os.path.join(self.output_dir, "out.h.test.c"),
"-o", os.path.join(self.output_dir, "test.exe"),
]
ret = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
print(" ".join(args))
print(ret.stdout.decode('utf-8'))
self.assertEqual(ret.returncode, 0)
def do_run(self):
args = [os.path.join(self.output_dir, "test.exe")]
ret = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
print(" ".join(args))
print(ret.stdout.decode('utf-8'))
self.assertEqual(ret.returncode, 0)
def do_test(self):
return
self.do_export()
self.do_compile()
self.do_run()

7
tests/mypy.ini Normal file
View File

@@ -0,0 +1,7 @@
[mypy]
disallow_incomplete_defs = True
disallow_untyped_defs = True
warn_unused_configs = True
warn_unused_ignores = True
warn_unreachable = True
disallow_untyped_calls = True

571
tests/pylint.rc Normal file
View File

@@ -0,0 +1,571 @@
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold to be exceeded before program exits with error.
fail-under=10.0
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS, parser, docs, test
# Add files or directories matching the regex patterns to the ignore-list. The
# regex matches against paths.
ignore-paths=
# Files or directories matching the regex patterns are skipped. The regex
# matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=0
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=
# Disable for now during development
fixme,
# User ignored limits
too-many-lines,
too-many-locals,
too-many-branches,
too-many-return-statements,
too-few-public-methods,
too-many-public-methods,
too-many-statements,
too-many-instance-attributes,
too-many-function-args,
line-too-long,
# Noise / Don't care
no-else-return,
unused-variable,
invalid-name,
missing-docstring,
abstract-method,
protected-access,
duplicate-code,
unused-argument,
consider-using-f-string
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=no
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: en_GB (aspell), en_AU
# (aspell), en_US (hunspell), en (aspell), en_CA (aspell).
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear and the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=no
# Signatures are removed from the similarity computation
ignore-signatures=no
# Minimum lines number of a similarity.
min-similarity-lines=10
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=110
# Maximum number of lines in a module.
max-module-lines=2000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[DESIGN]
# List of qualified class names to ignore when countint class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=16
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=builtin.BaseException,
builtin.Exception

7
tests/requirements.txt Normal file
View File

@@ -0,0 +1,7 @@
pytest
pytest-xdist
parameterized
pylint
mypy
pytest-cov
coveralls>=3.0.0

27
tests/run.sh Executable file
View File

@@ -0,0 +1,27 @@
#!/bin/bash
set -e
cd "$(dirname "$0")"
# Initialize venv
python3 -m venv .venv
source .venv/bin/activate
# Install test dependencies
pip install -r requirements.txt
# Install dut
pip install -e "../[cli]"
# Run lint
pylint --rcfile pylint.rc ../src/peakrdl_python_regmap
# Run static type checking
mypy ../src/peakrdl_python_regmap
# Run unit tests
pytest -n auto --cov=peakrdl_python_regmap
# Generate coverage report
coverage html -i -d htmlcov

18
tests/test_all.py Normal file
View File

@@ -0,0 +1,18 @@
import glob
import base
from parameterized import parameterized_class
exceptions = [
"testcases/wide_regs.rdl",
]
files = glob.glob("testcases/*.rdl")
files = [file for file in files if not file in exceptions]
@parameterized_class(base.get_permutations({
"rdl_file": files,
}))
class TestAll(base.BaseHeaderTestcase):
def test_all(self) -> None:
self.do_test()

9
tests/test_htol.py Normal file
View File

@@ -0,0 +1,9 @@
import base
class TestHTOL(base.BaseHeaderTestcase):
rdl_file = "testcases/widths_and_mem.rdl"
def test_htol(self) -> None:
return
# Can't actually run test because gcc on this arch is ltoh
self.do_export()
self.do_compile()

6
tests/test_wide_regs.py Normal file
View File

@@ -0,0 +1,6 @@
import base
class TestWideRegs(base.BaseHeaderTestcase):
rdl_file = "testcases/wide_regs.rdl"
def test_wide_regs(self) -> None:
self.do_test()

58
tests/testcases/basic.rdl Normal file
View File

@@ -0,0 +1,58 @@
addrmap basic {
reg {
default sw = rw;
default hw = r;
field {} basicfield_a[31:0];
} basicreg_a;
reg {
default sw = rw;
default hw = r;
field { sw = r; } basicfield_b[31:16];
field {} basicfield_c[15:0];
} basicreg_b;
reg {
default sw = r;
default hw = r;
field { fieldwidth=1; } basicfield_d[0:0];
field { } basicfield_e[2:1];
} basicreg_c;
reg {
default sw = w;
default hw = r;
field { fieldwidth=1; } basicfield_f[0:0];
field { } basicfield_g[2:1];
field { } case[5:4]; // collide with C keyword
} basicreg_d;
reg {
default sw = rw;
default hw = r;
field { fieldwidth=8; } basicfield_h;
field { fieldwidth=8; } basicfield_i;
field { fieldwidth=8; } basicfield_j;
field { fieldwidth=8; } basicfield_k;
} basicreg_e;
reg {
default sw = rw;
default hw = r;
field { fieldwidth=8; sw = w; } basicfield_l;
field { fieldwidth=8; } basicfield_m;
field { fieldwidth=8; sw = w; } basicfield_n;
} basicreg_f;
reg {
default sw = rw;
default hw = r;
field { fieldwidth=8; sw = r; } basicfield_p;
field { fieldwidth=8; } basicfield_1;
field { fieldwidth=8; sw = r; } basicfield_r;
} basicreg_g;
};

View File

@@ -0,0 +1,29 @@
reg r_global {
field {} f1[4];
field myfield {};
myfield f2;
myfield f3[4];
field myfield2 {fieldwidth = 4;};
myfield2 f4;
};
regfile rf_global #(
longint unsigned NUM = 4
){
reg r_local {
field {} f_param[NUM];
field myfield {};
myfield f_param2[NUM];
} r1;
r_global r2;
signal {} xyz;
};
addrmap top {
rf_global rf1;
rf_global #(.NUM (8)) rf2;
rf_global rf3[4] @ 0x1000 += 0x100;
rf_global rf4[4] @ 0x2000 += 0x200;
};

View File

@@ -0,0 +1,24 @@
addrmap top {
reg {
field f_rw {sw=rw; hw=r;};
field f_r {sw=r; hw=w;};
field f_w {sw=w; hw=r;};
f_rw f1[0:0] = 0;
f_r f2[1:1];
f_w f3[1:1];
} overlap_fields;
reg r_rw {
field {sw=rw; hw=r;} f[8];
};
reg r_r {
field {sw=r; hw=w;} f[8];
};
reg r_w {
field {sw=w; hw=r;} f[8];
};
r_rw r1 @ 0x10;
r_r r2 @ 0x14;
r_w r3 @ 0x14;
};

View File

@@ -0,0 +1,23 @@
mem mem_empty #(
longint WIDTH = 32
){
memwidth = WIDTH;
mementries = 16;
};
addrmap top {
reg wide_reg {
regwidth = 128;
field {} f1[32];
field {} f2[32];
field {} f3[32];
field {} f4[32];
};
wide_reg r1;
wide_reg r2[4];
wide_reg r3;
external mem_empty #(.WIDTH(128)) mem_empty_128;
};

View File

@@ -0,0 +1,77 @@
regfile rf1 #(
longint WIDTH = 32
){
default regwidth = WIDTH;
reg myreg1 {
field {} f1[WIDTH/4];
field {} f2[WIDTH/4];
field {} f3[WIDTH/4];
field {} f4[WIDTH/4];
};
reg myreg2 {
field {} f1[WIDTH/8];
field {} f2[WIDTH/8];
field {} f3[WIDTH/8];
field {} f4[WIDTH/8];
};
reg myreg3 {
field {} f1[1:1];
field {} f2[4:3] = 3;
field {} f3[6:5] = 2;
};
myreg1 r1 @ 0x100;
myreg2 r2[3];
myreg3 r3[5];
myreg1 r4 @ 0x200;
myreg2 r5;
myreg3 r6;
};
mem mem_empty #(
longint WIDTH = 32
){
memwidth = WIDTH;
mementries = 16;
};
mem mem_vregs #(
longint WIDTH = 32
){
memwidth = WIDTH;
mementries = 16;
reg myreg {
regwidth = WIDTH;
field {} f1[WIDTH/2];
field {} f2[WIDTH/4];
field {} f3[WIDTH/8];
field {} f4[WIDTH/8];
};
myreg r1[8];
myreg r2;
myreg r3[6];
myreg r4;
};
addrmap top {
rf1 #(.WIDTH(8)) rf1_8;
rf1 #(.WIDTH(16)) rf1_16;
rf1 #(.WIDTH(32)) rf1_32;
rf1 #(.WIDTH(64)) rf1_64;
rf1 #(.WIDTH(8)) rf1_8_again;
external mem_empty #(.WIDTH(8)) mem_empty_8;
external mem_empty #(.WIDTH(16)) mem_empty_16;
external mem_empty #(.WIDTH(32)) mem_empty_32;
external mem_empty #(.WIDTH(64)) mem_empty_64;
external mem_empty #(.WIDTH(8)) mem_empty_8_again;
external mem_vregs #(.WIDTH(8)) mem_vregs_8;
external mem_vregs #(.WIDTH(16)) mem_vregs_16;
external mem_vregs #(.WIDTH(32)) mem_vregs_32;
external mem_vregs #(.WIDTH(64)) mem_vregs_64;
external mem_vregs #(.WIDTH(8)) mem_vregs_8_again;
};