Lint and typing cleanup

This commit is contained in:
Alex Mykyta
2022-02-25 23:05:16 -08:00
parent da3ed05492
commit 7a890b56c5
26 changed files with 852 additions and 94 deletions

129
.github/workflows/build.yml vendored Normal file
View File

@@ -0,0 +1,129 @@
name: build
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
release:
types:
- published
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version:
- 3.6
- 3.7
- 3.8
- 3.9
- "3.10"
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install -U -r test/requirements.txt
- name: Install
run: |
python -m pip install .
- name: Test
run: |
cd test
export SKIP_SYNTH_TESTS=1
export STUB_SIMULATOR=1
pytest --workers auto
#-------------------------------------------------------------------------------
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install -U pylint
- name: Install
run: |
python -m pip install .
- name: Run Lint
run: |
pylint --rcfile test/pylint.rc peakrdl
#-------------------------------------------------------------------------------
mypy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install -U mypy
- name: Type Check
run: |
mypy --config-file test/mypy.ini peakrdl
#-------------------------------------------------------------------------------
build_sdist:
needs:
- test
- lint
- mypy
name: Build source distribution
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
name: Install Python
with:
python-version: 3.8
- name: Build sdist
run: python setup.py sdist
- uses: actions/upload-artifact@v2
with:
path: dist/*.tar.gz
#-------------------------------------------------------------------------------
deploy:
needs:
- build_sdist
runs-on: ubuntu-latest
# Only publish when a GitHub Release is created.
if: github.event_name == 'release'
steps:
- uses: actions/download-artifact@v2
with:
name: artifact
path: dist
- uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.pypi_password }}

View File

@@ -1,5 +1,5 @@
Customizing your own CPU interface Customizing the CPU interface
================================== =============================
Bring your own SystemVerilog interface Bring your own SystemVerilog interface
-------------------------------------- --------------------------------------
@@ -33,9 +33,11 @@ Rather than rewriting a new CPU interface definition, you can extend and adjust
class My_AXI4Lite(AXI4Lite_Cpuif): class My_AXI4Lite(AXI4Lite_Cpuif):
@property @property
def port_declaration(self) -> str: def port_declaration(self) -> str:
# Override the port declaration text to use the alternate type name and modport style
return "axi4_lite_interface.Slave_mp s_axil" return "axi4_lite_interface.Slave_mp s_axil"
def signal(self, name:str) -> str: def signal(self, name:str) -> str:
# Override the signal names to be lowercase instead
return "s_axil." + name.lower() return "s_axil." + name.lower()
Then use your custom CPUIF during export: Then use your custom CPUIF during export:

View File

@@ -193,12 +193,13 @@ be the same as a write for a given register block configuration. Typically read
operations will be more deeply pipelined. This latency asymmetry would create a operations will be more deeply pipelined. This latency asymmetry would create a
hazard for response collisions. hazard for response collisions.
In order to eliminate this hazard, additional stall signals are provided to delay In order to eliminate this hazard, additional stall signals (``cpuif_req_stall_rd``
an incoming transfer request if necessary. When asserted, the CPU interface shall and ``cpuif_req_stall_wr``) are provided to delay the next incoming transfer
hold the next pending request until the stall is cleared. request if necessary. When asserted, the CPU interface shall hold the next pending
request until the stall is cleared.
For non-pipelined CPU interfaces that only allow one outstanding transaction at a time, For non-pipelined CPU interfaces that only allow one outstanding transaction at a time,
these can be safely ignored. these stall signals can be safely ignored.
In the following example, the regblock is configured such that: In the following example, the regblock is configured such that:

View File

@@ -1,5 +1,4 @@
import re from typing import TYPE_CHECKING, Union, List
from typing import TYPE_CHECKING, List, Union
from systemrdl.node import AddrmapNode, AddressableNode, RegNode, FieldNode from systemrdl.node import AddrmapNode, AddressableNode, RegNode, FieldNode
@@ -60,7 +59,7 @@ class DecodeLogicGenerator(RDLForLoopGenerator):
super().__init__() super().__init__()
# List of address strides for each dimension # List of address strides for each dimension
self._array_stride_stack = [] self._array_stride_stack = [] # type: List[List[int]]
def enter_AddressableComponent(self, node: 'AddressableNode') -> None: def enter_AddressableComponent(self, node: 'AddressableNode') -> None:
@@ -80,7 +79,7 @@ class DecodeLogicGenerator(RDLForLoopGenerator):
def _get_address_str(self, node:AddressableNode) -> str: def _get_address_str(self, node:AddressableNode) -> str:
a = "'h%x" % (node.raw_absolute_address - self.addr_decode.top_node.raw_absolute_address) a = f"'h{(node.raw_absolute_address - self.addr_decode.top_node.raw_absolute_address):x}"
for i, stride in enumerate(self._array_stride_stack): for i, stride in enumerate(self._array_stride_stack):
a += f" + i{i}*'h{stride:x}" a += f" + i{i}*'h{stride:x}"
return a return a

View File

@@ -61,8 +61,7 @@ class Dereferencer:
else: else:
# No reset value defined! # No reset value defined!
obj.env.msg.warning( obj.env.msg.warning(
"Field '%s' is a constant but does not have a known value (missing reset). Assigning it a value of X." f"Field '{obj.inst_name}' is a constant but does not have a known value (missing reset). Assigning it a value of X.",
% obj.inst_name,
obj.inst.inst_src_ref obj.inst.inst_src_ref
) )
return "'X" return "'X"
@@ -79,7 +78,7 @@ class Dereferencer:
else: else:
raise RuntimeError raise RuntimeError
raise RuntimeError("Unhandled reference to: %s" % obj) raise RuntimeError(f"Unhandled reference to: {obj}")
def get_field_propref_value(self, field: FieldNode, prop_name: str) -> str: def get_field_propref_value(self, field: FieldNode, prop_name: str) -> str:
@@ -187,7 +186,7 @@ class Dereferencer:
}: }:
return self.field_logic.get_field_combo_identifier(field, prop_name) return self.field_logic.get_field_combo_identifier(field, prop_name)
raise RuntimeError("Unhandled reference to: %s->%s" % (field, prop_name)) raise RuntimeError(f"Unhandled reference to: {field}->{prop_name}")
def get_reg_propref_value(self, reg: RegNode, prop_name: str) -> str: def get_reg_propref_value(self, reg: RegNode, prop_name: str) -> str:

View File

@@ -1,5 +1,5 @@
import os import os
from typing import Union from typing import Union, Any, Type
import jinja2 as jj import jinja2 as jj
from systemrdl.node import AddrmapNode, RootNode from systemrdl.node import AddrmapNode, RootNode
@@ -16,10 +16,10 @@ from .utils import get_always_ff_event
from .scan_design import DesignScanner from .scan_design import DesignScanner
class RegblockExporter: class RegblockExporter:
def __init__(self, **kwargs) -> None: def __init__(self, **kwargs: Any) -> None:
# Check for stray kwargs # Check for stray kwargs
if kwargs: if kwargs:
raise TypeError("got an unexpected keyword argument '%s'" % list(kwargs.keys())[0]) raise TypeError(f"got an unexpected keyword argument '{list(kwargs.keys())[0]}'")
self.top_node = None # type: AddrmapNode self.top_node = None # type: AddrmapNode
@@ -45,7 +45,7 @@ class RegblockExporter:
) )
def export(self, node: Union[RootNode, AddrmapNode], output_dir:str, **kwargs) -> None: def export(self, node: Union[RootNode, AddrmapNode], output_dir:str, **kwargs: Any) -> None:
""" """
Parameters Parameters
---------- ----------
@@ -99,18 +99,18 @@ class RegblockExporter:
self.top_node = node self.top_node = node
cpuif_cls = kwargs.pop("cpuif_cls", APB3_Cpuif) cpuif_cls = kwargs.pop("cpuif_cls", APB3_Cpuif) # type: Type[CpuifBase]
module_name = kwargs.pop("module_name", self.top_node.inst_name) module_name = kwargs.pop("module_name", self.top_node.inst_name) # type: str
package_name = kwargs.pop("package_name", module_name + "_pkg") package_name = kwargs.pop("package_name", module_name + "_pkg") # type: str
reuse_hwif_typedefs = kwargs.pop("reuse_hwif_typedefs", True) reuse_hwif_typedefs = kwargs.pop("reuse_hwif_typedefs", True) # type: bool
# Pipelining options # Pipelining options
retime_read_fanin = kwargs.pop("retime_read_fanin", False) retime_read_fanin = kwargs.pop("retime_read_fanin", False) # type: bool
retime_read_response = kwargs.pop("retime_read_response", True) retime_read_response = kwargs.pop("retime_read_response", True) # type: bool
# Check for stray kwargs # Check for stray kwargs
if kwargs: if kwargs:
raise TypeError("got an unexpected keyword argument '%s'" % list(kwargs.keys())[0]) raise TypeError(f"got an unexpected keyword argument '{list(kwargs.keys())[0]}'")
self.min_read_latency = 0 self.min_read_latency = 0
self.min_write_latency = 0 self.min_write_latency = 0

View File

@@ -1,14 +1,15 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, List
from collections import OrderedDict from collections import OrderedDict
from ..struct_generator import RDLStructGenerator from ..struct_generator import RDLStructGenerator
from ..forloop_generator import RDLForLoopGenerator from ..forloop_generator import RDLForLoopGenerator
from ..utils import get_indexed_path, get_always_ff_event from ..utils import get_always_ff_event
if TYPE_CHECKING: if TYPE_CHECKING:
from . import FieldLogic from . import FieldLogic
from systemrdl.node import FieldNode, RegNode from systemrdl.node import FieldNode, RegNode
from .bases import SVLogic
class CombinationalStructGenerator(RDLStructGenerator): class CombinationalStructGenerator(RDLStructGenerator):
@@ -23,7 +24,7 @@ class CombinationalStructGenerator(RDLStructGenerator):
return return
# collect any extra combo signals that this field requires # collect any extra combo signals that this field requires
extra_combo_signals = OrderedDict() extra_combo_signals = OrderedDict() # type: OrderedDict[str, SVLogic]
for conditional in self.field_logic.get_conditionals(node): for conditional in self.field_logic.get_conditionals(node):
for signal in conditional.get_extra_combo_signals(node): for signal in conditional.get_extra_combo_signals(node):
if signal.name in extra_combo_signals: if signal.name in extra_combo_signals:
@@ -61,7 +62,7 @@ class CombinationalStructGenerator(RDLStructGenerator):
class FieldStorageStructGenerator(RDLStructGenerator): class FieldStorageStructGenerator(RDLStructGenerator):
def __init__(self, field_logic: 'FieldLogic'): def __init__(self, field_logic: 'FieldLogic') -> None:
super().__init__() super().__init__()
self.field_logic = field_logic self.field_logic = field_logic
@@ -79,15 +80,15 @@ class FieldStorageStructGenerator(RDLStructGenerator):
class FieldLogicGenerator(RDLForLoopGenerator): class FieldLogicGenerator(RDLForLoopGenerator):
i_type = "genvar" i_type = "genvar"
def __init__(self, field_logic: 'FieldLogic'): def __init__(self, field_logic: 'FieldLogic') -> None:
super().__init__() super().__init__()
self.field_logic = field_logic self.field_logic = field_logic
self.exp = field_logic.exp self.exp = field_logic.exp
self.field_storage_template = self.field_logic.exp.jj_env.get_template( self.field_storage_template = self.field_logic.exp.jj_env.get_template(
"field_logic/templates/field_storage.sv" "field_logic/templates/field_storage.sv"
) )
self.intr_fields = [] self.intr_fields = [] # type: List[FieldNode]
self.halt_fields = [] self.halt_fields = [] # type: List[FieldNode]
def enter_Reg(self, node: 'RegNode') -> None: def enter_Reg(self, node: 'RegNode') -> None:

View File

@@ -1,9 +1,9 @@
from typing import TYPE_CHECKING, List from typing import TYPE_CHECKING, List
from .bases import NextStateConditional
from systemrdl.rdltypes import InterruptType from systemrdl.rdltypes import InterruptType
from .bases import NextStateConditional
if TYPE_CHECKING: if TYPE_CHECKING:
from systemrdl.node import FieldNode from systemrdl.node import FieldNode
@@ -28,7 +28,7 @@ class Sticky(NextStateConditional):
I = self.exp.hwif.get_input_identifier(field) I = self.exp.hwif.get_input_identifier(field)
return [ return [
f"next_c = {I};", f"next_c = {I};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
@@ -51,7 +51,7 @@ class Stickybit(NextStateConditional):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} | {I};", f"next_c = {R} | {I};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class PosedgeStickybit(NextStateConditional): class PosedgeStickybit(NextStateConditional):
@@ -77,7 +77,7 @@ class PosedgeStickybit(NextStateConditional):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} | (~{Iq} & {I});", f"next_c = {R} | (~{Iq} & {I});",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class NegedgeStickybit(NextStateConditional): class NegedgeStickybit(NextStateConditional):
@@ -103,7 +103,7 @@ class NegedgeStickybit(NextStateConditional):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} | ({Iq} & ~{I});", f"next_c = {R} | ({Iq} & ~{I});",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class BothedgeStickybit(NextStateConditional): class BothedgeStickybit(NextStateConditional):
@@ -129,7 +129,7 @@ class BothedgeStickybit(NextStateConditional):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} | ({Iq} ^ {I});", f"next_c = {R} | ({Iq} ^ {I});",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class PosedgeNonsticky(NextStateConditional): class PosedgeNonsticky(NextStateConditional):
@@ -152,7 +152,7 @@ class PosedgeNonsticky(NextStateConditional):
Iq = self.exp.field_logic.get_next_q_identifier(field) Iq = self.exp.field_logic.get_next_q_identifier(field)
return [ return [
f"next_c = ~{Iq} & {I};", f"next_c = ~{Iq} & {I};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class NegedgeNonsticky(NextStateConditional): class NegedgeNonsticky(NextStateConditional):
@@ -175,7 +175,7 @@ class NegedgeNonsticky(NextStateConditional):
Iq = self.exp.field_logic.get_next_q_identifier(field) Iq = self.exp.field_logic.get_next_q_identifier(field)
return [ return [
f"next_c = {Iq} & ~{I};", f"next_c = {Iq} & ~{I};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class BothedgeNonsticky(NextStateConditional): class BothedgeNonsticky(NextStateConditional):
@@ -198,5 +198,5 @@ class BothedgeNonsticky(NextStateConditional):
Iq = self.exp.field_logic.get_next_q_identifier(field) Iq = self.exp.field_logic.get_next_q_identifier(field)
return [ return [
f"next_c = {Iq} ^ {I};", f"next_c = {Iq} ^ {I};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]

View File

@@ -35,7 +35,7 @@ class HWSet(NextStateConditional):
return [ return [
f"next_c = {next_val};", f"next_c = {next_val};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
@@ -68,5 +68,5 @@ class HWClear(NextStateConditional):
return [ return [
f"next_c = {next_val};", f"next_c = {next_val};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]

View File

@@ -38,7 +38,7 @@ class AlwaysWrite(NextStateConditional):
return [ return [
f"next_c = {next_val};", f"next_c = {next_val};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WEWrite(AlwaysWrite): class WEWrite(AlwaysWrite):

View File

@@ -23,8 +23,8 @@ class ClearOnRead(_OnRead):
def get_assignments(self, field: 'FieldNode') -> List[str]: def get_assignments(self, field: 'FieldNode') -> List[str]:
return [ return [
f"next_c = '0;", "next_c = '0;",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
@@ -34,6 +34,6 @@ class SetOnRead(_OnRead):
def get_assignments(self, field: 'FieldNode') -> List[str]: def get_assignments(self, field: 'FieldNode') -> List[str]:
return [ return [
f"next_c = '1;", "next_c = '1;",
f"load_next_c = '1;", "load_next_c = '1;",
] ]

View File

@@ -41,7 +41,7 @@ class WriteOneSet(_OnWrite):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} | {self._wr_data(field)};", f"next_c = {R} | {self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteOneClear(_OnWrite): class WriteOneClear(_OnWrite):
@@ -52,7 +52,7 @@ class WriteOneClear(_OnWrite):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} & ~{self._wr_data(field)};", f"next_c = {R} & ~{self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteOneToggle(_OnWrite): class WriteOneToggle(_OnWrite):
@@ -63,7 +63,7 @@ class WriteOneToggle(_OnWrite):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} ^ {self._wr_data(field)};", f"next_c = {R} ^ {self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteZeroSet(_OnWrite): class WriteZeroSet(_OnWrite):
@@ -74,7 +74,7 @@ class WriteZeroSet(_OnWrite):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} | ~{self._wr_data(field)};", f"next_c = {R} | ~{self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteZeroClear(_OnWrite): class WriteZeroClear(_OnWrite):
@@ -85,7 +85,7 @@ class WriteZeroClear(_OnWrite):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} & {self._wr_data(field)};", f"next_c = {R} & {self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteZeroToggle(_OnWrite): class WriteZeroToggle(_OnWrite):
@@ -96,7 +96,7 @@ class WriteZeroToggle(_OnWrite):
R = self.exp.field_logic.get_storage_identifier(field) R = self.exp.field_logic.get_storage_identifier(field)
return [ return [
f"next_c = {R} ^ ~{self._wr_data(field)};", f"next_c = {R} ^ ~{self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteClear(_OnWrite): class WriteClear(_OnWrite):
@@ -105,8 +105,8 @@ class WriteClear(_OnWrite):
def get_assignments(self, field: 'FieldNode') -> List[str]: def get_assignments(self, field: 'FieldNode') -> List[str]:
return [ return [
f"next_c = '0;", "next_c = '0;",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class WriteSet(_OnWrite): class WriteSet(_OnWrite):
@@ -115,8 +115,8 @@ class WriteSet(_OnWrite):
def get_assignments(self, field: 'FieldNode') -> List[str]: def get_assignments(self, field: 'FieldNode') -> List[str]:
return [ return [
f"next_c = '1;", "next_c = '1;",
f"load_next_c = '1;", "load_next_c = '1;",
] ]
class Write(_OnWrite): class Write(_OnWrite):
@@ -126,5 +126,5 @@ class Write(_OnWrite):
def get_assignments(self, field: 'FieldNode') -> List[str]: def get_assignments(self, field: 'FieldNode') -> List[str]:
return [ return [
f"next_c = {self._wr_data(field)};", f"next_c = {self._wr_data(field)};",
f"load_next_c = '1;", "load_next_c = '1;",
] ]

View File

@@ -17,6 +17,6 @@ class Singlepulse(NextStateConditional):
def get_assignments(self, field: 'FieldNode') -> List[str]: def get_assignments(self, field: 'FieldNode') -> List[str]:
return [ return [
f"next_c = '0;", "next_c = '0;",
f"load_next_c = '1;", "load_next_c = '1;",
] ]

View File

@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING, Optional, List from typing import TYPE_CHECKING, Optional, List, Union
import textwrap import textwrap
from systemrdl.walker import RDLListener, RDLWalker from systemrdl.walker import RDLListener, RDLWalker
@@ -9,7 +9,7 @@ if TYPE_CHECKING:
class Body: class Body:
def __init__(self) -> None: def __init__(self) -> None:
self.children = [] self.children = [] # type: List[Union[str, Body]]
def __str__(self) -> str: def __str__(self) -> str:
s = '\n'.join((str(x) for x in self.children)) s = '\n'.join((str(x) for x in self.children))
@@ -37,7 +37,7 @@ class ForLoopGenerator:
def __init__(self) -> None: def __init__(self) -> None:
self._loop_level = 0 self._loop_level = 0
self._stack = [] self._stack = [] # type: List[Body]
@property @property
def current_loop(self) -> Body: def current_loop(self) -> Body:
@@ -60,7 +60,7 @@ class ForLoopGenerator:
self.current_loop.children.append(b) self.current_loop.children.append(b)
self._loop_level -= 1 self._loop_level -= 1
def start(self): def start(self) -> None:
assert not self._stack assert not self._stack
b = Body() b = Body()
self._stack.append(b) self._stack.append(b)

View File

@@ -27,8 +27,8 @@ class Hwif:
self.exp = exp self.exp = exp
self.package_name = package_name self.package_name = package_name
self.has_input_struct = None self.has_input_struct = False
self.has_output_struct = None self.has_output_struct = False
self.in_hier_signal_paths = in_hier_signal_paths self.in_hier_signal_paths = in_hier_signal_paths
self.out_of_hier_signals = out_of_hier_signals self.out_of_hier_signals = out_of_hier_signals
@@ -147,7 +147,7 @@ class Hwif:
elif isinstance(obj, PropertyReference): elif isinstance(obj, PropertyReference):
return self.get_implied_prop_input_identifier(obj.node, obj.name) return self.get_implied_prop_input_identifier(obj.node, obj.name)
raise RuntimeError("Unhandled reference to: %s", obj) raise RuntimeError(f"Unhandled reference to: {obj}")
def get_implied_prop_input_identifier(self, field: FieldNode, prop: str) -> str: def get_implied_prop_input_identifier(self, field: FieldNode, prop: str) -> str:
@@ -179,7 +179,7 @@ class Hwif:
assert obj.node.get_property(obj.name) assert obj.node.get_property(obj.name)
return self.get_implied_prop_output_identifier(obj.node, obj.name) return self.get_implied_prop_output_identifier(obj.node, obj.name)
raise RuntimeError("Unhandled reference to: %s", obj) raise RuntimeError(f"Unhandled reference to: {obj}")
def get_implied_prop_output_identifier(self, node: Union[FieldNode, RegNode], prop: str) -> str: def get_implied_prop_output_identifier(self, node: Union[FieldNode, RegNode], prop: str) -> str:

View File

@@ -1,13 +1,15 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ..struct_generator import RDLFlatStructGenerator
from systemrdl.node import FieldNode from systemrdl.node import FieldNode
from ..struct_generator import RDLFlatStructGenerator
if TYPE_CHECKING: if TYPE_CHECKING:
from systemrdl.node import Node, SignalNode, RegNode from systemrdl.node import Node, SignalNode, RegNode
from . import Hwif from . import Hwif
class InputStructGenerator_Hier(RDLFlatStructGenerator): class InputStructGenerator_Hier(RDLFlatStructGenerator):
def __init__(self, hwif: 'Hwif'): def __init__(self, hwif: 'Hwif') -> None:
super().__init__() super().__init__()
self.hwif = hwif self.hwif = hwif
self.top_node = hwif.top_node self.top_node = hwif.top_node

View File

@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, List
from ..forloop_generator import RDLForLoopGenerator, LoopBody from ..forloop_generator import RDLForLoopGenerator, LoopBody
@@ -30,8 +30,8 @@ class ReadbackAssignmentGenerator(RDLForLoopGenerator):
# array. The array width is equal to the CPUIF bus width. Each entry in # array. The array width is equal to the CPUIF bus width. Each entry in
# the array represents an aligned read access. # the array represents an aligned read access.
self.current_offset = 0 self.current_offset = 0
self.start_offset_stack = [] self.start_offset_stack = [] # type: List[int]
self.dim_stack = [] self.dim_stack = [] # type: List[int]
@property @property
def current_offset_str(self) -> str: def current_offset_str(self) -> str:
@@ -99,7 +99,7 @@ class ReadbackAssignmentGenerator(RDLForLoopGenerator):
# Number of registers enclosed in this loop # Number of registers enclosed in this loop
n_regs = self.current_offset - start_offset n_regs = self.current_offset - start_offset
self.current_loop.n_regs = n_regs self.current_loop.n_regs = n_regs # type: ignore
super().pop_loop() super().pop_loop()

View File

@@ -1,4 +1,4 @@
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, Set
from collections import OrderedDict from collections import OrderedDict
from systemrdl.walker import RDLListener, RDLWalker from systemrdl.walker import RDLListener, RDLWalker
@@ -16,16 +16,16 @@ class DesignScanner(RDLListener):
Also collects any information that is required prior to the start of the export process. Also collects any information that is required prior to the start of the export process.
""" """
def __init__(self, exp:'RegblockExporter'): def __init__(self, exp:'RegblockExporter') -> None:
self.exp = exp self.exp = exp
self.cpuif_data_width = 0 self.cpuif_data_width = 0
self.msg = exp.top_node.env.msg self.msg = exp.top_node.env.msg
# Collections of signals that were actually referenced by the design # Collections of signals that were actually referenced by the design
self.in_hier_signal_paths = set() self.in_hier_signal_paths = set() # type: Set[str]
self.out_of_hier_signals = OrderedDict() self.out_of_hier_signals = OrderedDict() # type: OrderedDict[str, SignalNode]
def _get_out_of_hier_field_reset(self): def _get_out_of_hier_field_reset(self) -> None:
current_node = self.exp.top_node.parent current_node = self.exp.top_node.parent
while current_node is not None: while current_node is not None:
for signal in current_node.signals(): for signal in current_node.signals():
@@ -35,7 +35,7 @@ class DesignScanner(RDLListener):
return return
current_node = current_node.parent current_node = current_node.parent
def do_scan(self): def do_scan(self) -> None:
# Collect cpuif reset, if any. # Collect cpuif reset, if any.
cpuif_reset = self.exp.top_node.cpuif_reset cpuif_reset = self.exp.top_node.cpuif_reset
if cpuif_reset is not None: if cpuif_reset is not None:

View File

@@ -11,8 +11,8 @@ if TYPE_CHECKING:
class _StructBase: class _StructBase:
def __init__(self): def __init__(self) -> None:
self.children = [] # type: Union[str, _StructBase] self.children = [] # type: List[Union[str, _StructBase]]
def __str__(self) -> str: def __str__(self) -> str:
s = '\n'.join((str(x) for x in self.children)) s = '\n'.join((str(x) for x in self.children))
@@ -65,8 +65,8 @@ class _TypedefStruct(_StructBase):
class StructGenerator: class StructGenerator:
def __init__(self): def __init__(self) -> None:
self._struct_stack = [] self._struct_stack = [] # type: List[_StructBase]
@property @property
def current_struct(self) -> _StructBase: def current_struct(self) -> _StructBase:
@@ -99,7 +99,7 @@ class StructGenerator:
self.current_struct.children.append(s) self.current_struct.children.append(s)
def start(self, type_name: str): def start(self, type_name: str) -> None:
assert not self._struct_stack assert not self._struct_stack
s = _TypedefStruct(type_name) s = _TypedefStruct(type_name)
self._struct_stack.append(s) self._struct_stack.append(s)
@@ -155,16 +155,17 @@ class RDLStructGenerator(StructGenerator, RDLListener):
class FlatStructGenerator(StructGenerator): class FlatStructGenerator(StructGenerator):
def __init__(self): def __init__(self) -> None:
super().__init__() super().__init__()
self.typedefs = OrderedDict() self.typedefs = OrderedDict() # type: OrderedDict[str, _TypedefStruct]
def push_struct(self, type_name: str, inst_name: str, array_dimensions: Optional[List[int]] = None) -> None: def push_struct(self, type_name: str, inst_name: str, array_dimensions: Optional[List[int]] = None) -> None: # type: ignore # pylint: disable=arguments-differ
s = _TypedefStruct(type_name, inst_name, array_dimensions) s = _TypedefStruct(type_name, inst_name, array_dimensions)
self._struct_stack.append(s) self._struct_stack.append(s)
def pop_struct(self) -> None: def pop_struct(self) -> None:
s = self._struct_stack.pop() s = self._struct_stack.pop()
assert isinstance(s, _TypedefStruct)
if s.children: if s.children:
# struct is not empty. Attach it to the parent # struct is not empty. Attach it to the parent
@@ -176,6 +177,7 @@ class FlatStructGenerator(StructGenerator):
def finish(self) -> Optional[str]: def finish(self) -> Optional[str]:
s = self._struct_stack.pop() s = self._struct_stack.pop()
assert isinstance(s, _TypedefStruct)
assert not self._struct_stack assert not self._struct_stack
# no children, no struct. # no children, no struct.

View File

@@ -1,5 +1,5 @@
import re import re
from typing import TYPE_CHECKING from typing import TYPE_CHECKING, Match
if TYPE_CHECKING: if TYPE_CHECKING:
from systemrdl.node import Node, SignalNode from systemrdl.node import Node, SignalNode
@@ -13,9 +13,9 @@ def get_indexed_path(top_node: 'Node', target_node: 'Node') -> str:
path = target_node.get_rel_path(top_node, empty_array_suffix="[!]") path = target_node.get_rel_path(top_node, empty_array_suffix="[!]")
# replace unknown indexes with incrementing iterators i0, i1, ... # replace unknown indexes with incrementing iterators i0, i1, ...
class repl: class repl:
def __init__(self): def __init__(self) -> None:
self.i = 0 self.i = 0
def __call__(self, match): def __call__(self, match: Match) -> str:
s = f'i{self.i}' s = f'i{self.i}'
self.i += 1 self.i += 1
return s return s

View File

@@ -5,15 +5,20 @@ import jinja2 as jj
from .sv_line_anchor import SVLineAnchor from .sv_line_anchor import SVLineAnchor
from .simulators.questa import Questa from .simulators.questa import Questa
from .simulators import StubSimulator
from .base_testcase import BaseTestCase from .base_testcase import BaseTestCase
SIM_CLS = Questa
if os.environ.get("STUB_SIMULATOR", False):
SIM_CLS = StubSimulator
class SimTestCase(BaseTestCase): class SimTestCase(BaseTestCase):
#: Abort test if it exceeds this number of clock cycles #: Abort test if it exceeds this number of clock cycles
timeout_clk_cycles = 5000 timeout_clk_cycles = 5000
simulator_cls = Questa simulator_cls = SIM_CLS
@classmethod @classmethod
def _generate_tb(cls): def _generate_tb(cls):

View File

@@ -24,3 +24,10 @@ class Simulator:
def run(self, plusargs:List[str] = None) -> None: def run(self, plusargs:List[str] = None) -> None:
raise NotImplementedError raise NotImplementedError
class StubSimulator(Simulator):
def compile(self) -> None:
pass
def run(self, plusargs:List[str] = None) -> None:
pass

9
test/mypy.ini Normal file
View File

@@ -0,0 +1,9 @@
[mypy]
ignore_missing_imports = True
strict_optional = False
disallow_incomplete_defs = True
disallow_untyped_defs = True
warn_unused_configs = True
warn_unused_ignores = True
warn_unreachable = True
disallow_untyped_calls = True

571
test/pylint.rc Normal file
View File

@@ -0,0 +1,571 @@
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code.
extension-pkg-allow-list=
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
# for backward compatibility.)
extension-pkg-whitelist=
# Return non-zero exit code if any of these messages/categories are detected,
# even if score is above --fail-under value. Syntax same as enable. Messages
# specified are enabled, while categories only check already-enabled messages.
fail-on=
# Specify a score threshold to be exceeded before program exits with error.
fail-under=10.0
# Files or directories to be skipped. They should be base names, not paths.
ignore=CVS, parser, docs, test
# Add files or directories matching the regex patterns to the ignore-list. The
# regex matches against paths.
ignore-paths=
# Files or directories matching the regex patterns are skipped. The regex
# matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
# number of processors available to use.
jobs=0
# Control the amount of potential inferred values when inferring a single
# object. This can help the performance when dealing with large functions or
# complex, nested conditions.
limit-inference-results=100
# List of plugins (as comma separated values of python module names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages.
suggestion-mode=yes
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once). You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use "--disable=all --enable=classes
# --disable=W".
disable=
# Disable for now during development
fixme,
# User ignored limits
too-many-lines,
too-many-locals,
too-many-branches,
too-many-return-statements,
too-few-public-methods,
too-many-public-methods,
too-many-statements,
too-many-instance-attributes,
too-many-function-args,
line-too-long,
# Noise / Don't care
no-else-return,
no-self-use,
unused-variable,
invalid-name,
missing-docstring,
abstract-method,
protected-access,
duplicate-code,
unused-argument
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=c-extension-no-member
[REPORTS]
# Python expression which should return a score less than or equal to 10. You
# have access to the variables 'error', 'warning', 'refactor', and 'convention'
# which contain the number of messages in each category, as well as 'statement'
# which is the total number of statements analyzed. This score is used by the
# global evaluation report (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details.
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio). You can also give a reporter class, e.g.
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages.
reports=no
# Activate the evaluation score.
score=no
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=sys.exit,argparse.parse_error
[STRING]
# This flag controls whether inconsistent-quotes generates a warning when the
# character used as a quote delimiter is used inconsistently within a module.
check-quote-consistency=no
# This flag controls whether the implicit-str-concat should generate a warning
# on implicit string concatenation in sequences defined over several lines.
check-str-concat-over-line-jumps=no
[SPELLING]
# Limits count of emitted suggestions for spelling mistakes.
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: en_GB (aspell), en_AU
# (aspell), en_US (hunspell), en (aspell), en_CA (aspell).
spelling-dict=
# List of comma separated words that should be considered directives if they
# appear and the beginning of a comment and should not be checked.
spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains the private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to the private dictionary (see the
# --spelling-private-dict-file option) instead of raising a message.
spelling-store-unknown-words=no
[LOGGING]
# The type of string formatting that logging methods do. `old` means using %
# formatting, `new` is for `{}` formatting.
logging-format-style=old
# Logging modules to check that the string format arguments are in logging
# function parameter format.
logging-modules=logging
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid defining new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of names allowed to shadow builtins
allowed-redefined-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,
_cb
# A regular expression matching the name of dummy variables (i.e. expected to
# not be used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore.
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io
[SIMILARITIES]
# Comments are removed from the similarity computation
ignore-comments=yes
# Docstrings are removed from the similarity computation
ignore-docstrings=yes
# Imports are removed from the similarity computation
ignore-imports=no
# Signatures are removed from the similarity computation
ignore-signatures=no
# Minimum lines number of a similarity.
min-similarity-lines=10
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# Tells whether to warn about missing members when the owner of the attribute
# is inferred to be None.
ignore-none=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis). It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
# List of decorators that change the signature of a decorated function.
signature-mutators=
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=110
# Maximum number of lines in a module.
max-module-lines=2000
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,
XXX,
TODO
[BASIC]
# Naming style matching correct argument names.
argument-naming-style=snake_case
# Regular expression matching correct argument names. Overrides argument-
# naming-style.
#argument-rgx=
# Naming style matching correct attribute names.
attr-naming-style=snake_case
# Regular expression matching correct attribute names. Overrides attr-naming-
# style.
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma.
bad-names=foo,
bar,
baz,
toto,
tutu,
tata
# Bad variable names regexes, separated by a comma. If names match any regex,
# they will always be refused
bad-names-rgxs=
# Naming style matching correct class attribute names.
class-attribute-naming-style=any
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style.
#class-attribute-rgx=
# Naming style matching correct class constant names.
class-const-naming-style=UPPER_CASE
# Regular expression matching correct class constant names. Overrides class-
# const-naming-style.
#class-const-rgx=
# Naming style matching correct class names.
class-naming-style=PascalCase
# Regular expression matching correct class names. Overrides class-naming-
# style.
#class-rgx=
# Naming style matching correct constant names.
const-naming-style=UPPER_CASE
# Regular expression matching correct constant names. Overrides const-naming-
# style.
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names.
function-naming-style=snake_case
# Regular expression matching correct function names. Overrides function-
# naming-style.
#function-rgx=
# Good variable names which should always be accepted, separated by a comma.
good-names=i,
j,
k,
ex,
Run,
_
# Good variable names regexes, separated by a comma. If names match any regex,
# they will always be accepted
good-names-rgxs=
# Include a hint for the correct naming format with invalid-name.
include-naming-hint=no
# Naming style matching correct inline iteration names.
inlinevar-naming-style=any
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style.
#inlinevar-rgx=
# Naming style matching correct method names.
method-naming-style=snake_case
# Regular expression matching correct method names. Overrides method-naming-
# style.
#method-rgx=
# Naming style matching correct module names.
module-naming-style=snake_case
# Regular expression matching correct module names. Overrides module-naming-
# style.
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
# These decorators are taken in consideration only for invalid-name.
property-classes=abc.abstractproperty
# Naming style matching correct variable names.
variable-naming-style=snake_case
# Regular expression matching correct variable names. Overrides variable-
# naming-style.
#variable-rgx=
[CLASSES]
# Warn about protected attribute access inside special methods
check-protected-access-in-special-methods=no
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,
__new__,
setUp,
__post_init__
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,
_fields,
_replace,
_source,
_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=cls
[DESIGN]
# List of qualified class names to ignore when countint class parents (see
# R0901)
ignored-parents=
# Maximum number of arguments for function / method.
max-args=8
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in an if statement (see R0916).
max-bool-expr=5
# Maximum number of branch for function / method body.
max-branches=12
# Maximum number of locals for function / method body.
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body.
max-returns=6
# Maximum number of statements in function / method body.
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[IMPORTS]
# List of modules that can be imported at any level, not just the top level
# one.
allow-any-import-level=
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma.
deprecated-modules=
# Output a graph (.gv or any supported image format) of external dependencies
# to the given file (report RP0402 must not be disabled).
ext-import-graph=
# Output a graph (.gv or any supported image format) of all (i.e. internal and
# external) dependencies to the given file (report RP0402 must not be
# disabled).
import-graph=
# Output a graph (.gv or any supported image format) of internal dependencies
# to the given file (report RP0402 must not be disabled).
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Couples of modules and preferred modules, separated by a comma.
preferred-modules=
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "BaseException, Exception".
overgeneral-exceptions=BaseException,
Exception

View File

@@ -2,3 +2,5 @@ pytest
parameterized parameterized
pytest-parallel pytest-parallel
jinja2-simple-tags jinja2-simple-tags
pylint
mypy

29
test/run.sh Executable file
View File

@@ -0,0 +1,29 @@
#!/bin/bash
set -e
this_dir="$( cd "$(dirname "$0")" ; pwd -P )"
# Initialize venv
venv_bin=$this_dir/.venv/bin
python3 -m venv $this_dir/.venv
source $this_dir/.venv/bin/activate
# Install test dependencies
pip install -U pip setuptools wheel
pip install -r $this_dir/requirements.txt
# Install dut
cd $this_dir/../
python $this_dir/../setup.py install
cd $this_dir
# Run unit tests
export SKIP_SYNTH_TESTS=1
pytest --workers auto
# Run lint
pylint --rcfile $this_dir/pylint.rc ../peakrdl
# Run static type checking
mypy $this_dir/../peakrdl