From d1615332704ac8e319a745def5326302b1b8ea3d Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Tue, 13 Aug 2024 22:42:13 +0200 Subject: [PATCH 01/11] harp/config_description: [FEATURE] Add validation classes for TOML checks --- .../ndk_xbuild/comp_settings/__init__.py | 0 .../comp_settings/config_description.py | 161 ++++++++++++++++++ 2 files changed, 161 insertions(+) create mode 100644 build/scripts/ndk_xbuild/comp_settings/__init__.py create mode 100644 build/scripts/ndk_xbuild/comp_settings/config_description.py diff --git a/build/scripts/ndk_xbuild/comp_settings/__init__.py b/build/scripts/ndk_xbuild/comp_settings/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/scripts/ndk_xbuild/comp_settings/config_description.py b/build/scripts/ndk_xbuild/comp_settings/config_description.py new file mode 100644 index 000000000..1b92d7d67 --- /dev/null +++ b/build/scripts/ndk_xbuild/comp_settings/config_description.py @@ -0,0 +1,161 @@ +# File: config_description.py +# Author(s): Oliver Gurka +# Copyright: (C) 2024 CESNET, z.s.p.o. + +""" +Pydantic classes describing structure of NdkXBuild TOML config file. +""" + +from pydantic import BaseModel, Field, model_validator, AfterValidator +from typing import Annotated, Union, List, Optional, Dict +from typing_extensions import Self +from enum import Enum + + +def check_range( + v: str, +): + ldict = {} + cmd = f"rng = {v}" + exec(cmd, globals(), ldict) + rng = ldict["rng"] + + assert isinstance(rng, range), "String is not a valid range!" + return v + + +TOMLStringRange = Annotated[str, AfterValidator(check_range)] +TOMLDirectValue = Union[int, str, bool, List[Union[int, str, bool]]] + + +class TOMLGenericSettingType(str, Enum): + CONSTANT = "const" + LIST = "list" + GENERATOR = "gen" + + +def _execute_inline_code( + code_str, +): + ldict = {} + try: + cmd = f"var = {code_str}" + exec(cmd, globals(), ldict) + except BaseException: + raise ValueError(f"Failed to execute code: \"{cmd}\"") + return ldict["var"] + + +class TOMLExcludeWhen(BaseModel): + generic: str + value: str + + @model_validator(mode="after") + def validate_lambda(self) -> Self: + lmbda = _execute_inline_code(self.value) + + if not callable(lmbda): + raise ValueError("Generator value must be a function") + + if not isinstance(lmbda(42), bool): + raise ValueError("Generator value (function) must return boolean value") + + self.value = lmbda + return self + + +class TOMLGenericSetting(BaseModel): + type: Annotated[TOMLGenericSettingType, Field(default=TOMLGenericSettingType.CONSTANT)] + value: TOMLDirectValue + range: Optional[str] = Field(None) + exclude_when: Optional[TOMLExcludeWhen] = Field(None) + + @model_validator(mode="after") + def validate_type(self) -> Self: + if self.type == TOMLGenericSettingType.CONSTANT: + if self.range is not None: + raise ValueError( + "Range is specified for generic setting of type constant or list") + elif self.type == TOMLGenericSettingType.LIST: + if self.range is not None: + raise ValueError( + "Range is specified for generic setting of type constant or list") + + if not isinstance(self.value, list): + raise ValueError("Generic of type list must be of type list (wait what?)") + elif self.type == TOMLGenericSettingType.GENERATOR: + if not isinstance(self.value, str) or self.range is None: + raise ValueError( + "Generator value must be a string and range must be specified") + + # Lamda function validation + lmbda = _execute_inline_code(self.value) + + if not callable(lmbda): + raise ValueError("Generator value must be a function") + + if not isinstance(lmbda(42), int): + raise ValueError("Generator value (function) must return an integer") + + self.value = lmbda + + # Range validation + rng = _execute_inline_code(self.range) + + try: + _ = (e for e in rng) + except TypeError: + raise ValueError("Range of generator must be valid python iterable!") + + self.range = rng + + return self + + +TOMLUnionSetting = Union[TOMLGenericSetting, TOMLDirectValue] + + +class TOMLCombination(BaseModel): + name: str + description: Optional[str] = Field(default=None) + settings: List[Union[str, List[str]]] + tests: Optional[List[str]] = Field(default=None) + + +class TOMLVerRndSetting(BaseModel): + tests_allowed: List[str] + amount: float + + +class TOMLVerSettings(BaseModel): + tests: List[str] + + +class TOMLVer(BaseModel): + settings: TOMLVerSettings + rnd: Optional[TOMLVerRndSetting] = Field(None) + combinations: List[TOMLCombination] + + +class TOMLSynthSettings(BaseModel): + combinations: List[TOMLCombination] + + +class TOMLBuildSystem(BaseModel): + synth_folder: str = Field(default="synth") + ver_folder: str = Field(default="uvm") + ver_fdo_file: str = Field(default="top_level.fdo") + + +class TOMLGenerics(BaseModel): + asserts: List[str] + + +class NdkXBuildTOML(BaseModel): + settings: Dict[str, Dict[str, TOMLUnionSetting]] + ver_settings: Optional[Dict[str, Dict[str, TOMLUnionSetting]]] = Field(default=None) + synth_settings: Optional[Dict[str, Dict[str, TOMLUnionSetting]]] = Field(default=None) + ver: TOMLVer + synth: TOMLSynthSettings + build_system: Optional[TOMLBuildSystem] = Field(default=TOMLBuildSystem()) + generics: Optional[TOMLGenerics] = Field(default=None) From a0ae947d2432465a135c1eb6dd8a7491b0b77ae7 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Mon, 5 Aug 2024 13:59:54 +0200 Subject: [PATCH 02/11] harp/config_transform: [FEATURE] Create config transformations for generating combinations --- .../comp_settings/config_transform.py | 350 ++++++++++++++++++ 1 file changed, 350 insertions(+) create mode 100644 build/scripts/ndk_xbuild/comp_settings/config_transform.py diff --git a/build/scripts/ndk_xbuild/comp_settings/config_transform.py b/build/scripts/ndk_xbuild/comp_settings/config_transform.py new file mode 100644 index 000000000..2a959c03b --- /dev/null +++ b/build/scripts/ndk_xbuild/comp_settings/config_transform.py @@ -0,0 +1,350 @@ +# File: config_transform.py +# Author(s): Oliver Gurka +# Copyright: (C) 2024 CESNET, z.s.p.o. + +""" +Classes and functions, which assist in generating combinations +of entity generics settings. +""" + +from .config_description import ( + TOMLUnionSetting, + TOMLGenericSetting, + TOMLGenericSettingType, + TOMLDirectValue, + NdkXBuildTOML, + TOMLCombination +) +from typing import Dict, List, Optional, Tuple +import pandas as pd +import logging +import re + +_global_logger = logging.getLogger("NdkXBuild(ConfigTransform)") + + +def cartesian_product(dfs: List[pd.DataFrame]) -> pd.DataFrame: + """ + Perform a cartesian product on a list of DataFrames. + + Parameters: + *dfs : Tuple[pd.DataFrame] + The DataFrames to perform the cartesian product on. + + Returns: + pd.DataFrame + The resulting DataFrame after performing the cartesian product. + """ + + if len(dfs) == 1: + r = dfs[0].copy(True) + return r + elif len(dfs) == 0: + raise ValueError("Can't merge zero data frames!") + + # Check for unique column names across all DataFrames + all_columns = set() + for df in dfs: + columns = set(df.columns) + if not columns.isdisjoint(all_columns): + raise ValueError("All DataFrames must have unique column names") + all_columns.update(columns) + + # Initialize the result with the first DataFrame + result = dfs[0].copy(True) + result["_key"] = 1 + + for df in dfs[1:]: + # Add a key column for the merge + df["_key"] = 1 + # Perform the cross join and drop the key column + result = pd.merge(result, df, on='_key') + + result = result.drop(["_key"], axis=1) + + return result + + +class NdkXBuildConfig: + def __init__( + self, + toml_conf: NdkXBuildTOML, + ): + # keys - setting name, values - pandas dataframes of all combinations + self.default_generics = pd.DataFrame.from_dict( + {k: [v] for k, v in toml_conf.settings["default"].items()}) + self.settings = { + k: NdkXBuildSetting.get_setting_values( + v, + self.default_generics) for k, + v in toml_conf.settings.items()} + self.ver_settings = { + k: NdkXBuildSetting.get_setting_values( + v, + self.default_generics) for k, + v in toml_conf.ver_settings.items()} if toml_conf.ver_settings else None + self.synth_settings = { + k: NdkXBuildSetting.get_setting_values( + v, + self.default_generics) for k, + v in toml_conf.synth_settings.items()} if toml_conf.synth_settings else None + self.ver_combinations = [ + NdkXBuildCombination( + comb, + self.settings, + self.ver_settings, + self.default_generics) for comb in toml_conf.ver.combinations] + self.synth_combinations = [ + NdkXBuildCombination( + comb, + self.settings, + self.ver_settings, + self.default_generics) for comb in toml_conf.synth.combinations] + + if toml_conf.generics: + self.assert_filter(toml_conf.generics.asserts) + + def assert_filter( + self, + asserts: List[str], + ): + for comb in self.ver_combinations: + for a in asserts: + ares = comb.generics_df.eval(a) + comb.generics_df = comb.generics_df[ares] + + def debug_print(self): + for comb in self.ver_combinations: + print(comb.generics_df) + + for comb in self.synth_combinations: + print(comb.generics_df) + + +def transform_list( + input_list, +): + prefix = [] + to_generate = [] + + for el in input_list: + if isinstance(el, str): + prefix.append(el) + else: + to_generate.append(el) + + if len(prefix) == 0: + raise ValueError( + "Prefix must not be empty! If you wish to have no common combination, use \"\".") + + def remove_empty_strings(nested_list): + return [[item for item in inner_list if item != ""] + for inner_list in nested_list] + + rwip = [prefix] + final_result = [] + + for bi in to_generate: + result = [sublist_a + [elem_b] for sublist_a in rwip for elem_b in bi] + final_result.extend(result) + rwip.extend(result) + rwip = remove_empty_strings(result) + rwip = [list(x) for x in set(tuple(x) for x in rwip) if x] + + return remove_empty_strings(rwip) + + +class NdkXBuildCombination: + def __init__( + self, + toml_comb: TOMLCombination, + settings: Dict[str, pd.DataFrame], + additional_settings: Dict[str, pd.DataFrame], + defaults: pd.DataFrame, + ) -> None: + + _global_logger.debug(f"Generating combination {toml_comb.name}") + self.generics_df = self.get_combination_generics( + toml_comb.settings, settings, additional_settings, defaults) + self.fill_defaults(self.generics_df, defaults) + self.name = toml_comb.name + self.description = toml_comb.description + self.tests = toml_comb.tests + + @classmethod + def fill_defaults( + cls, + gen_df: pd.DataFrame, + defaults: pd.DataFrame, + ): + for dcol in defaults.columns: + if dcol not in gen_df.columns: + gen_df[dcol] = defaults[dcol][0] + + @classmethod + def _search_setting( + cls, + setting_name: str, + settings: Dict[str, pd.DataFrame], + ver_settings: Optional[Dict[str, pd.DataFrame]], + ): + if setting_name in settings: + return settings[setting_name] + elif ver_settings is not None and setting_name in ver_settings: + return ver_settings[setting_name] + else: + raise KeyError( + f"Setting '{setting_name}' not found in settings or ver_settings.") + + @classmethod + def _get_range( + cls, + comb_str: str, + ) -> Tuple[str, Optional[int], Optional[int]]: + """Parses range from combination name specification.""" + + pattern = r'^(.*?)(\[(\d+)(:(\d+))?\])?$' + + # Use re.match to apply the pattern + match = re.match(pattern, comb_str) + + if match: + left_part = match.group(1) + start_number = match.group(3) + end_number = match.group(5) + + # Check if indices are valid + if start_number is None: + return left_part, None, None + elif start_number is not None and end_number is None: + return left_part, int(start_number), int(start_number) + 1 + elif start_number is not None and end_number is not None: + return left_part, int(start_number), int(end_number) + else: + raise ValueError(f"Invalid combination format \"{comb_str}\"!") + else: + # Invalid format if the pattern does not match + raise ValueError(f"Invalid combination format \"{comb_str}\"!") + + @classmethod + def _get_comb_ranges( + cls, + partial_combinations: List[List[str]] + ) -> List[List[Tuple[str, Optional[int], Optional[int]]]]: + """Parses partial combination slices.""" + + return [[cls._get_range(key) for key in setlist] + for setlist in partial_combinations] + + @classmethod + def get_combination_generics( + cls, + setting_list: List[str], + settings: Dict[str, pd.DataFrame], + ver_settings: Optional[Dict[str, pd.DataFrame]], + defaults: pd.DataFrame, + ): + """Transforms combination into pandas dataframe containing all possible + generic settings.""" + + settings_to_apply = [x for x in setting_list] + partial_combinations = transform_list(settings_to_apply) + partial_combinations = cls._get_comb_ranges(partial_combinations) + + partial_comb_dfs = list() + # Iterate through the setting list and check both dictionaries for the + # keys + for setlist in partial_combinations: + partial_comb = [] + for key, _, _ in setlist: + partial_comb.append( + NdkXBuildCombination._search_setting( + key, settings, ver_settings).copy(True)) + + partial_comb_dfs.append(partial_comb) + + # Filter out unwanted rows in dfs + filtered_dfs = list() + for i, setlist in enumerate(partial_comb_dfs): + partial_list = [] + for df_i, df in enumerate(setlist): + _, start_ind, end_ind = partial_combinations[i][df_i] + if start_ind is None: + partial_list.append(df) + else: + partial_list.append(df.iloc[start_ind:end_ind]) + filtered_dfs.append(partial_list) + + partial_combinations = [cartesian_product(x) for x in filtered_dfs] + for x in partial_combinations: + cls.fill_defaults(x, defaults) + + return pd.concat(partial_combinations, + ignore_index=True).drop_duplicates() + + +class NdkXBuildSetting: + @classmethod + def check_generic_presence( + cls, + generics: List[str], + defaults: List[str], + ): + for gen_name in generics: + # Skip type - fake generic + if gen_name == "type": + continue + if gen_name not in defaults: + raise ValueError( + f"Generic {gen_name} not in default settings!") + + @classmethod + def get_setting_values( + cls, + generics: Dict[str, TOMLUnionSetting], + defaults: Dict[str, TOMLDirectValue], + ) -> pd.DataFrame: + NdkXBuildSetting.check_generic_presence( + generics.keys(), defaults.keys()) + # Generate generic values + if "type" in generics.keys(): + if generics["type"] == "list": + generics.pop("type") + return pd.DataFrame.from_dict(generics) + else: + raise ValueError("Unknown type of setting!") + else: + generics_values = [ + NdkXBuildGeneric.generate_values( + gname, + values) for gname, + values in generics.items()] + # Create cartesian product of values + return cartesian_product(generics_values) + + +class NdkXBuildGeneric: + @classmethod + def generate_values( + cls, + name: str, + setting: TOMLUnionSetting + ) -> pd.DataFrame: + values = None + + if isinstance(setting, TOMLGenericSetting): + # Generate possible values + if setting.type == TOMLGenericSettingType.CONSTANT: + values = [setting.value] + elif setting.type == TOMLGenericSettingType.LIST: + values = setting.value + elif setting.type == TOMLGenericSettingType.GENERATOR: + values = [setting.value(x) for x in setting.range] + else: + raise ValueError( + f"Incorrect value of generic type, got {setting.type}") + else: + # Constant + values = [setting] + + return pd.DataFrame(values, columns=[name]) From 0a92648892cbdcdeb1b90aa45d99dea7e0789847 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Mon, 5 Aug 2024 14:06:58 +0200 Subject: [PATCH 03/11] harp/build_adapters: [FEATURE] introduce abstract class representing build tool adapter --- .../ndk_xbuild/build_adapters/__init__.py | 0 .../build_adapters/build_adapter.py | 43 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 build/scripts/ndk_xbuild/build_adapters/__init__.py create mode 100644 build/scripts/ndk_xbuild/build_adapters/build_adapter.py diff --git a/build/scripts/ndk_xbuild/build_adapters/__init__.py b/build/scripts/ndk_xbuild/build_adapters/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/scripts/ndk_xbuild/build_adapters/build_adapter.py b/build/scripts/ndk_xbuild/build_adapters/build_adapter.py new file mode 100644 index 000000000..3a5acb35d --- /dev/null +++ b/build/scripts/ndk_xbuild/build_adapters/build_adapter.py @@ -0,0 +1,43 @@ +# File: build_adapter.py +# Author(s): Oliver Gurka +# Copyright: (C) 2024 CESNET, z.s.p.o. + +""" +Description: Abstract class describing behaviour of generic build adapter. +""" + +from abc import ABC, abstractmethod +from comp_settings.config_transform import NdkXBuildConfig + + +class NdkXBuildAdapter(ABC): + @abstractmethod + def run_set_generics( + self, + comb_id: int, + ): + pass + + @abstractmethod + def run_tool(self): + pass + + @abstractmethod + def run_report(self) -> bool: + pass + + +class NdkXBuildRunner(ABC): + def __init__( + self, + config: NdkXBuildConfig, + ) -> None: + self.config: NdkXBuildConfig = config + + @abstractmethod + def run(self): + pass + + @abstractmethod + def report(self): + pass From 8ec71a6c4dcb946e6e982e0c1a0a86d1d37a5899 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Tue, 13 Aug 2024 22:47:33 +0200 Subject: [PATCH 04/11] build/Modelsim: [FEATURE] Enable setting generics by setting generics variable --- build/Modelsim.inc.fdo | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/build/Modelsim.inc.fdo b/build/Modelsim.inc.fdo index bad780873..781ee1997 100644 --- a/build/Modelsim.inc.fdo +++ b/build/Modelsim.inc.fdo @@ -427,7 +427,24 @@ proc nb_sim_parse_parameters {sim_flags} { echo "============================" } -proc nb_sim_main {{user_sim_flags SIM_FLAGS}} { +proc nb_sim_expand_generics {{vsim_flags VSIM_FLAGS} {sim_flags SIM_FLAGS}} { + upvar 1 $vsim_flags VSIM_FLAGS + upvar 1 $sim_flags SIM_FLAGS + + if {[file exists "./tmp/generics.fdo"]} { + source "./tmp/generics.fdo" + puts "HARP: Found file with entity parameters settings." + } else { + return + } + + foreach {key value} [array get SIM_GENERICS] { + puts "$key=$value" + lappend VSIM_FLAGS "-g$SIM_FLAGS(SIM_MODULE)/$key=$value" + } +} + +proc nb_sim_main {{user_sim_flags SIM_FLAGS} {sim_generics SIM_GENERICS}} { # Let VSIM_FLAGS variable to be accessible in top level stack frame for running vsim by hand upvar 1 VSIM_FLAGS VSIM_FLAGS @@ -448,7 +465,10 @@ proc nb_sim_main {{user_sim_flags SIM_FLAGS}} { nb_vsim_prepare nb_vsim_compile_project SIM_FLAGS HIERARCHY + nb_vsim_set_flags VSIM_FLAGS SIM_FLAGS + nb_sim_expand_generics VSIM_FLAGS SIM_FLAGS + # Run simulation if {!$SIM_FLAGS(VSIM_MANUAL_START)} { From 2ed7a27fe6303ad30eabdb1110414bed0f15a7e5 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Tue, 13 Aug 2024 22:47:54 +0200 Subject: [PATCH 05/11] harp/build_adapters: [FEATURE] Add questasim build adapter --- .../ndk_xbuild/build_adapters/questasim.py | 264 ++++++++++++++++++ 1 file changed, 264 insertions(+) create mode 100644 build/scripts/ndk_xbuild/build_adapters/questasim.py diff --git a/build/scripts/ndk_xbuild/build_adapters/questasim.py b/build/scripts/ndk_xbuild/build_adapters/questasim.py new file mode 100644 index 000000000..b8f17342d --- /dev/null +++ b/build/scripts/ndk_xbuild/build_adapters/questasim.py @@ -0,0 +1,264 @@ +# File: questasim.py +# Author(s): Oliver Gurka +# Copyright: (C) 2024 CESNET, z.s.p.o. + +""" +QuestaSim and ModelSim build adapter and runner. Creates interface +between NdkXBuild script and QuestaSim. +""" + + +import pandas as pd +import logging +import os +import numpy as np +import re +import random +import cowsay + +from colorama import Fore, Style +from typing import Union, List +from comp_settings.config_transform import NdkXBuildConfig +from pathlib import Path + +from .build_adapter import NdkXBuildAdapter, NdkXBuildRunner + +_global_logger = logging.getLogger("BuildAdapter(QuestaSim)") + + +class NdkXBuildQuestasimAdapter(NdkXBuildAdapter): + VER_SUCCESS_REGEX = re.compile( + r'(Verification finished successfully|VERIFICATION SUCCESS)', + re.IGNORECASE) + VER_SV_SEED_REGEX = r'Sv_Seed\s*=\s*(\d+)' + + TMP_FOLDER_NAME = "tmp" + GENERICS_FILE_NAME = "generics.fdo" + + def __init__( + self, + fdo_file_path: Path, + ) -> None: + self.fdo_file_path: Path = fdo_file_path + self.curr_transcript_path = None + self.vsim_succ = True + + def _value_formatter( + self, + value: Union[int, bool, str, List[Union[int, bool, str]]], + ) -> str: + if isinstance(value, np.bool): + return f"{value}" + elif isinstance(value, str): + return value + elif isinstance(value, np.int64): + return f"{value}" + else: + print(type(value)) + raise NotImplementedError("Generics of type list are not supported yet!") + + def run_set_generics( + self, + generics: pd.Series, + ): + fdo_folder = self.fdo_file_path.parent + tmp_folder = fdo_folder / self.TMP_FOLDER_NAME + tmp_folder.mkdir(parents=True, exist_ok=True) + + generic_fdo = tmp_folder / self.GENERICS_FILE_NAME + self.generics = generics + + with open(generic_fdo, "w") as f: + for generic in generics.keys(): + value = generics[generic] + f.write(f"set SIM_GENERICS({generic}) \"{self._value_formatter(value)}\"\n") + f.write("\n") + + def run_tool( + self, + transcript_suffix: str, + gui=False, + print_output=False, + ): + transcript_name = f"transcript_{transcript_suffix}" + self.curr_transcript_path = self.fdo_file_path.parent / transcript_name + + quit_cmd = "quit -f;" if not gui else "" + vsim_gui_cmd = "-c" if not gui else "" + dev_null = "> /dev/null" if not print_output else "" + + vsim_do_command = f"do {self.fdo_file_path.absolute().as_posix()};{quit_cmd}" + logfile_param = f"-logfile \"{transcript_name}\"" + vsim_cmd = f"cd {self.fdo_file_path.parent}; vsim -do \"{vsim_do_command}\" {logfile_param} {vsim_gui_cmd} {dev_null}" + + _global_logger.debug(f"Running command:\n{vsim_cmd}") + ret_code = os.system(vsim_cmd) + self.vsim_succ = ret_code == 0 + if ret_code != 0: + _global_logger.critical("QuestaSim run into an error!") + raise ChildProcessError("QuestaSim run into an error!") + + def run_report(self) -> bool: + if self.vsim_succ: + return self._parse_transcript() + else: + sv_seed_series = pd.Series(["QUESTA FAILED"], index=["SV_SEED"]) + self.generics = self.generics.append(sv_seed_series) + return False + + def _parse_transcript(self) -> bool: + with open(self.curr_transcript_path, "r") as f: + content = f.read() + success = self.VER_SUCCESS_REGEX.search(content) + + if not success: + sv_seed_reg = re.search(self.VER_SV_SEED_REGEX, content) + sv_seed = "DONT_KNOW" + if sv_seed_reg: + sv_seed = sv_seed_reg.group(1) + else: + _global_logger.error("Could not find sv_seed value!") + success = False + + sv_seed_series = pd.Series([sv_seed], index=["SV_SEED"]) + self.generics = pd.concat([self.generics, sv_seed_series]) + + return success + + +class NdkXBuildQuestaRunner(NdkXBuildRunner): + COMB_SUCC_STR = Fore.GREEN + "SUCCESS" + Style.RESET_ALL + COMB_FAIL_STR = Fore.RED + "FAIL" + Style.RESET_ALL + + VER_FAILED_MESSAGES = [ + "Oops! Your RTL code just stumbled and fell flat on its face. Maybe it needs coding lessons?", + "Houston, we have a problem. Your RTL component seems to be speaking in riddles. Care to translate?", + "Congratulations! You've discovered a new way to confuse your RTL component. Impressive... but not quite what we're looking for.", + "Alert: RTL malfunction detected. Cause: Code that resembles a bowl of digital spaghetti.", + "Your RTL component just threw a tantrum. It refuses to cooperate until you fix that code.", + "Beep boop! RTL does not compute. Have you considered a career in abstract art instead?", + "Warning: RTL verification system has encountered an unexpected laugh track. Please rewrite and try again.", + "Plot twist! Your RTL code decided to go rogue. Maybe try asking it nicely to behave?", + "Ouch! Your RTL component just face-palmed itself. It's clearly embarrassed by your code.", + "Achievement unlocked: 'RTL Confusion Master'! Now, how about we aim for 'RTL Verification Success' instead?"] + + VER_SUCCESS_MESSAGES = [ + "Well, well, well... Your RTL code is surprisingly stubborn. Challenge accepted!", + "Hmph. Your RTL component refuses to crack. Are you sure you didn't accidentally create skynet?", + "Congratulations, your code is annoyingly resilient. Time to unleash my secret weapon: more coffee!", + "Your RTL laughs in the face of my tests. I'm not mad, I'm just... impressed. And a little mad.", + "Alert: Verification engineer's ego slightly bruised. RTL code remains irritatingly intact.", + "Is your RTL made of adamantium? Because my tests just bounced right off!", + "Warning: Unbreakable code detected. Preparing to question my entire career choice.", + "Plot twist! Your RTL is tougher than a Nokia 3310. What dark magic is this?", + "Bravo! Your code just earned the 'Verification Engineer's Nightmare' badge. Wear it proudly.", + "Achievement unlocked: 'The Unbreakable'. Now excuse me while I go sulk in the corner."] + + def __init__( + self, + config: NdkXBuildConfig, + top_level_fdo: Path, + gui: bool, + default_only: bool = False, + ) -> None: + super().__init__(config) + self.build_adapter: NdkXBuildQuestasimAdapter = NdkXBuildQuestasimAdapter(top_level_fdo) + self.gui = gui + self.run_default = default_only + self.failed_comb = pd.DataFrame( + columns=list( + self.config.ver_combinations[0].generics_df.columns) + + ["SV_SEED"]) + + def _run_comb( + self, + comb, + default=False, + default_only=False + ): + comb_name = comb.name if not default else "default" + comb_df = comb.generics_df if not default else comb + gui = self.gui if default_only else False + + internal_comb_count = len(comb_df.index) + failed_comb_count = 0 + print(f"Running combinations from group {comb_name}...") + for i in range(internal_comb_count): + # Run verification + self.build_adapter.run_set_generics(comb_df.iloc[i]) + transcript_suffix = f"{comb_name}{i}" + self.build_adapter.run_tool(transcript_suffix=transcript_suffix, gui=gui) + comb_part_succ = self.build_adapter.run_report() + + if not comb_part_succ: + self.failed_comb = pd.concat( + [self.failed_comb, self.build_adapter.generics.to_frame().T], ignore_index=True) + failed_comb_count += 1 + + # Print partial combination success + comb_result_str = self.COMB_SUCC_STR if comb_part_succ else self.COMB_FAIL_STR + print(f" combination {i}: {comb_result_str}") + + return internal_comb_count, failed_comb_count + + def _run_multiver(self) -> bool: + self._run_comb(self.config.default_generics, default=True) + + for comb in self.config.ver_combinations: + internal_comb_count, failed_comb_count = self._run_comb(comb) + # Report whole combination group + comb_result_str = self.COMB_SUCC_STR if failed_comb_count == 0 else self.COMB_FAIL_STR + print( + f"Combinations group {comb.name}: {comb_result_str} # {internal_comb_count - failed_comb_count} PASSED | {failed_comb_count} FAILED") + + def _run_ver(self): + self._run_comb(self.config.default_generics, default=True, default_only=True) + + def _run_specified_comb_from_csv(self): + pass + + def run(self): + if not self.run_default: + self._run_multiver() + else: + self._run_ver() + + def report(self) -> bool: + # Report whole multiver status + failed_comb_total = len(self.failed_comb.index) + multiver_success = failed_comb_total == 0 + + character = random.choice(list(cowsay.CHARS.keys())) + if multiver_success: + message = random.choice(self.VER_SUCCESS_MESSAGES) + message = cowsay.get_output_string( + character, Fore.GREEN + "SUCCESS! " + Style.RESET_ALL + message) + print(message) + return True + else: + csv_name = "failed_combinations.csv" + self.failed_comb.to_csv(csv_name, sep=",", encoding="utf-8") + # Message + message = random.choice(self.VER_FAILED_MESSAGES) + message = cowsay.get_output_string( + character, Fore.RED + "FAIL! " + Style.RESET_ALL + message) + print(message) + return False + + # def report_comb(self) -> bool: + # failed_comb_count = len(self.failed_combinations.index) + # character = random.choice(list(cowsay.CHARS.keys())) + + # if failed_comb_count == 0: + # message = random.choice(self.VER_SUCCESS_MESSAGES) + # message = cowsay.get_output_string(character, Fore.GREEN + "SUCCESS! " + Style.RESET_ALL + message) + # print(message) + # return True + # else: + # csv_name = f"failed_comb_{self.combination.name}.csv" + # self.failed_combinations.to_csv(csv_name, sep=",", encoding="utf-8") + # # Message + # message = random.choice(self.VER_FAILED_MESSAGES) + # message = cowsay.get_output_string(character, Fore.RED + "FAIL! " + Style.RESET_ALL + message) + # print(message) + # return False From 8dd22bd525862750f11e69eb48de2470f13dedfc Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Tue, 13 Aug 2024 22:17:42 +0200 Subject: [PATCH 06/11] harp: [FEATURE] Create prototype of unified build script --- .../scripts/ndk_xbuild/ndk_xbuild/__init__.py | 0 .../ndk_xbuild/ndk_xbuild/ndk_xbuild.py | 231 ++++++++++++++++++ build/scripts/ndk_xbuild/pyproject.toml | 29 +++ 3 files changed, 260 insertions(+) create mode 100644 build/scripts/ndk_xbuild/ndk_xbuild/__init__.py create mode 100644 build/scripts/ndk_xbuild/ndk_xbuild/ndk_xbuild.py create mode 100644 build/scripts/ndk_xbuild/pyproject.toml diff --git a/build/scripts/ndk_xbuild/ndk_xbuild/__init__.py b/build/scripts/ndk_xbuild/ndk_xbuild/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/build/scripts/ndk_xbuild/ndk_xbuild/ndk_xbuild.py b/build/scripts/ndk_xbuild/ndk_xbuild/ndk_xbuild.py new file mode 100644 index 000000000..0a537adb1 --- /dev/null +++ b/build/scripts/ndk_xbuild/ndk_xbuild/ndk_xbuild.py @@ -0,0 +1,231 @@ +# File: ndk_xbuild.py +# Author(s): Oliver Gurka +# Copyright: (C) 2024 CESNET, z.s.p.o. + +""" +Contains script called "harp". +""" + +import argparse +import tomli +import sys +import os +import logging +import pandas as pd + +from colorama import Fore, Style +from pathlib import Path +from comp_settings.config_description import NdkXBuildTOML +from comp_settings.config_transform import NdkXBuildConfig +from build_adapters.questasim import NdkXBuildQuestaRunner + + +def eprint(*args, **kwargs): + print(*args, file=sys.stderr, **kwargs) + + +def args_setup(): + parser = argparse.ArgumentParser() + + # Add general arguments to the top-level parser + parser.add_argument( + "-c", + "--config_file", + help="Path to ndk_xbuild TOML config file. Default is \'./harp.toml\'", + required=False, + default="./harp.toml", + action="store") + parser.add_argument("--gui", help="Launch tool with gui", action="store_true") + parser.add_argument( + "--dry", + help="Run combination evaluation and store them into .md or .csv files", + action="store", + choices=[ + "md", + "csv"]) + parser.add_argument( + "--log-level", + choices=[ + "DEBUG", + "INFO", + "WARNING", + "ERROR", + "CRITICAL"], + help="Set logging level", + required=False) + parser.add_argument("--comb", nargs=1, metavar=("COMB_NAME"), + help="Evaluates combinations and prints the one specified") + + # Create subparsers for each command + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("synth") + ver_parser = subparsers.add_parser("ver") + multiver_parser = subparsers.add_parser("multiver") + subparsers.add_parser("multisynth") + + # Add verification specific arguments to ver and multiver subparsers + for subparser in [ver_parser, multiver_parser]: + ver_args = subparser.add_argument_group("Verification specific arguments") + ver_args.add_argument( + "-r", + "--run-comb", + help="Load combination csv file and launch combination on given row.", + nargs=2, + metavar=("COMB_CSV", "COMB_INDEX"), + action="store") + ver_args.add_argument( + "-f", + "--failed-comb", + help="Run first or specified failed combination from given csv.", + nargs=2, + metavar=("COMB_CSV", "COMB_INDEX"), + action="store") + + return parser + + +def load_toml_config( + path: Path, +): + parsed_toml = None + + with open(path, "rb") as f: + parsed_toml = tomli.load(f) + + return NdkXBuildTOML(**parsed_toml) + + +def get_top_level_fdo( + parsed_toml: NdkXBuildTOML, +) -> Path: + cwd = Path(os.getcwd()) + top_level_fdo_path = cwd / parsed_toml.build_system.ver_folder / parsed_toml.build_system.ver_fdo_file + if not top_level_fdo_path.exists(): + raise FileNotFoundError( + f"Could not find top level fdo file on path: {top_level_fdo_path.absolute().as_posix()}") + + return top_level_fdo_path + + +def synth(): + pass + + +def multisynth(): + pass + + +def ver( + parsed_toml: NdkXBuildTOML, + args, + multiver, +): + + top_level_fdo = get_top_level_fdo(parsed_toml) + config = NdkXBuildConfig(parsed_toml) + runner = NdkXBuildQuestaRunner( + config, + top_level_fdo, + args.gui, + not multiver + ) + runner.run() + + if not args.gui: + a = runner.report() + exit(0 if a else 42) + + +def dry_run( + parsed_toml: NdkXBuildTOML, + ftype: str, +): + config = NdkXBuildConfig(parsed_toml) + ver_combs: pd.DataFrame = pd.concat( + [x.generics_df for x in config.ver_combinations] + [config.default_generics], ignore_index=True) + synth_combs: pd.DataFrame = pd.concat( + [x.generics_df for x in config.synth_combinations] + [config.default_generics], ignore_index=True) + + if ftype == "md": + ver_combs.to_markdown("ver_combinations.md") + synth_combs.to_markdown("synth_combinations.md") + else: + ver_combs.to_csv("ver_combinations.csv") + synth_combs.to_csv("synth_combinations.csv") + + +def failed_comb( + parsed_toml, + args, +): + cwd = Path.cwd() + comb_csv, index = args.failed_comb + csv_path = cwd / comb_csv + failed_comb = pd.read_csv(csv_path) + failed_comb.drop(columns=failed_comb.columns[0], axis=1, inplace=True) + selected_comb = failed_comb.iloc[[index]] + + top_level_fdo = get_top_level_fdo(parsed_toml) + config = NdkXBuildConfig(parsed_toml) + + # HACK Set selected combination as default and run default only + config.default_generics = selected_comb + runner = NdkXBuildQuestaRunner( + config, + top_level_fdo, + args.gui, + True + ) + runner.run() + + if not args.gui: + a = runner.report() + exit(0 if a else 42) + + +def print_comb( + parsed_toml: NdkXBuildTOML, + args, +): + config = NdkXBuildConfig(parsed_toml) + for comb in config.ver_combinations: + if comb.name == args.comb[0]: + print(comb.generics_df) + exit(0) + + print(Fore.RED + "Check combination name again, or...am I blind?" + Style.RESET_ALL) + exit(1) + + +def main(): + parser = args_setup() + args = parser.parse_args() + + if args.log_level: + logging.basicConfig(level=args.log_level) + + toml_path = Path(args.config_file) + parsed_toml = load_toml_config(toml_path) + + if args.comb: + print_comb(parsed_toml, args) + + if args.dry: + dry_run(parsed_toml, args.dry) + return + + if args.failed_comb: + failed_comb(parsed_toml, args) + return + + if args.command in ["multiver", "ver"]: + ver(parsed_toml, args, args.command == "multiver") + else: + raise NotImplementedError("Other commands are not supported. Yet...") + + # conf.debug_print() + pass + + +if __name__ == "__main__": + main() diff --git a/build/scripts/ndk_xbuild/pyproject.toml b/build/scripts/ndk_xbuild/pyproject.toml new file mode 100644 index 000000000..4d9a90b5a --- /dev/null +++ b/build/scripts/ndk_xbuild/pyproject.toml @@ -0,0 +1,29 @@ +[build-system] +requires = ["setuptools >= 61.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "ndk-xbuild" +version = "1.0.0.dev1" +dependencies = [ + "tomli", + "pydantic", + "pandas", + "cowsay", + "colorama" +] +requires-python = ">= 3.9" +description = "Smart build system developed by Liberouter supporting QuestaSim, Quartus and Vivado." +authors = [ + {name = "Oliver Gurka", email = "oliver.gurka@cesnet.cz"} +] +maintainers = [ + {name = "Oliver Gurka", email = "oliver.gurka@cesnet.cz"} +] + +[tool.setuptools.packages.find] +include = ["build_adapters*", "comp_settings*", "ndk_xbuild*"] +namespaces = false + +[project.scripts] +harp = "ndk_xbuild:ndk_xbuild.main" From 6fc99f325c11a6004a015ef6a181e7fc24fbcd14 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Wed, 14 Aug 2024 13:43:36 +0200 Subject: [PATCH 07/11] harp: [DOC] Add rtlbuild readme file --- build/scripts/ndk_xbuild/README.rst | 318 ++++++++++++++++++++++++++++ 1 file changed, 318 insertions(+) create mode 100644 build/scripts/ndk_xbuild/README.rst diff --git a/build/scripts/ndk_xbuild/README.rst b/build/scripts/ndk_xbuild/README.rst new file mode 100644 index 000000000..618a21bb2 --- /dev/null +++ b/build/scripts/ndk_xbuild/README.rst @@ -0,0 +1,318 @@ +=============================== +HDL Application Runner Platform +=============================== + +A script for HDL build and verification processes. + +Description +----------- + +harp is a Python script that provides functionality for HDL synthesis +and verification processes. It supports verification and multiverification and is prepared for +extension with various synthesis and simulation tools. + +Features +-------- + +- Supports multiple commands: ver, multiver (TODO synth and multisynth) +- Configurable via TOML configuration file +- Run with GUI support +- Dry run capability with all combinations stored in Markdown or CSV format +- Logging level configuration for debugging +- Combination dry evaluation +- Integration with QuestaSim for verification + +Installation +------------ + +To use harp, ensure you have Python installed along with the following dependencies: + +- tomli +- pydantic +- pandas +- cowsay +- colorama +- argparse + +You can install harp using this command (dependencies are installed automatically): + +.. code-block:: bash + + pip install . + +Usage +----- + +To use the script, one must create configuration file. Default name for config file +is "harpproject.toml" in root folder of entity. To create this file, read section +"Enitity configuration". After creating this file, to run verification with defualt settings +just run this command: + +Example: + +.. code-block:: bash + + harp ver # command line only or + harp ver --gui # with gui + +To run multi verification with all combinations: + +.. code-block:: bash + + harp multiver # no gui only + +After multi verification finished, if any combination verification failed, +all failed combinations are stored inside "failed_comb.csv" file along with system verilog seed. + +To run failed combination, just run: + +.. code-block:: bash + + harp --gui --failed-comb ./failed_comb.csv 0 # run first failed combination + +For now, one must set random seed manually in "top_level.fdo". Automatic setting +of seed can be added upon request. + +For debugging purposes you can store all combinations in markdown file or csv file using +this commands: + +.. code-block:: bash + + harp --dry md # markdown, human readable format + harp --dry csv # csv, machine readable format + +Another good debugging feature is listing generic settings of one combination: + +.. code-block:: bash + + harp --comb regions # Show generics for combination named "regions" + +In the case of some errors during combination evaluation, it may useful to increase +logging level: + +.. code-block:: bash + + harp multiver --log-level DEBUG + +Verification setup +~~~~~~~~~~~~~~~~~~ + +Generics of entities are passed to QuestaSim through command line and they modify +generics of top level module. For our typical usage, testbench of UVM verification +must be parametrized and parameters must be passed to tests. All tests must have +"uvm_component_registry" macro: + +.. code-block:: sv + + typedef uvm_component_registry#(test::ex_test #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH), "test::ex_test") type_id; + +Testbench may look like this: + +.. code-block:: sv + + module testbench #( + int unsigned MVB_ITEMS, + int unsigned MVB_ITEM_WIDTH_RAW, + int unsigned MFB_REGIONS, + int unsigned MFB_REGION_SIZE, + int unsigned MFB_BLOCK_SIZE, + int unsigned MFB_ITEM_WIDTH, + int unsigned MFB_ALIGNMENT, + int unsigned MFB_META_WIDTH, + string DEVICE + ); + + ... + + initial begin + ... + +Full example of setup verification for harp can be found in folder "comp/mvb_tools/flow/mvb2mfb". + +Entity configuration +-------------------- + +Build system +~~~~~~~~~~~~ +One can define folders, in which necessary files are contained. This configuration +is optional has default values. + +- synth_folder: Synthesis folder containing Makefile (default: "synth") +- ver_folder: Verification folder containing verification .fdo file (default: "uvm") +- ver_fdo_file: Verification FDO file name (default: "top_level.fdo") + +Example: + +.. code-block:: toml + + [build_system] + synth_folder = "synth" + ver_folder = "ver" + ver_fdo_file = "custom_top_level.fdo" + +Generics setting +~~~~~~~~~~~~~~~~ + +HARP currently supports generics of type int (VHDL - integer, natural), string and boolean. +Each generic can be set to a value from a defined set and it will be set to every value from +that set. Sets can be defined in multiple ways: + +- constant, +- list, +- generator. + +Constant defines a set with only one value. A set defined by a list is equivalent to a set +defined by an enumeration of values. The generator produces a set that is the image +of a function whose definition scope can be defined by a Python iterable object. + +Example: + +.. code-block:: toml + + # Constant, creates set [48] + [setting.mvb] + DATA_WIDTH = 48 + + # List, creates set [48, 64, 256] + [setting.mvb] + DATA_WIDTH = {type = "list", value = [48, 64, 256]} + + # Alternative and equivalent list setting + [setting.mvb.DATA_WIDTH] + type = "list" + value = value = [48, 64, 256] + + # Generator, creates set [64, 128, 256] + [setting.mvb.DATA_WIDTH] + type = "gen" + # Definition of function + value = "lambda x : 2**x" + # Definition scope of the function + range = "range(6, 8+1)" + +As you might have noticed, individual settings are created as follows: + +.. code-block:: toml + + [settings..] + +Setting name must be unique. Also one must create "default" setting, which has two functions: +- Enumaration of available generics of entity +- Default values for generics, which are not modified by a setting. +Default setting must be defined only with constants. + +It is important to note that multiple combinations can be created within a single setting. +Within a single setup, the Cartesian product of the sets of values of all generics is performed. + +.. code-block:: toml + + # Generator, creates set [64, 128] + [setting.mvb.ITEM_WIDTH] + type = "gen" + value = "lambda x : 2**x" + range = "range(6, 7+1)" + + # Generator, creates set [1, 2] + [setting.mvb.ITEMS] + type = "gen" + value = "lambda x : 2**x" + range = "range(0, 1+1)" + +In the example above, setting "mvb" will contain combinations: + + ======= ============ + ITEMS ITEM_WIDTH + ======= ============ + 1 64 + 1 128 + 2 64 + 2 128 + ======= ============ + +There is also one special type of setting, which contains individual combinations, without +cartesian product performed between sets. + +.. code-block:: toml + + # Each column is one combination + [settings.regions] + type = "list" + REGIONS = [1, 2, 1, 1, 1, 1] + REGION_SIZE = [8, 8, 1, 2, 2, 4] + BLOCK_SIZE = [8, 8, 8, 8, 4, 8] + ITEM_WIDTH = [8, 8, 8, 8, 8, 8] + +Creating combinations +~~~~~~~~~~~~~~~~~~~~~ + +Setting combinations are created separately for verification and synthesis. Each combination +must have a name and list of settings to apply. By default, cartesian product is performed +between the settings. It implies, that applied settings must not have overlapping generics. +Combinations are specified in list of tables. Simple verification combination definition +can look like this: + +.. code-block:: toml + + [[ver.combinations]] + name = "Regions" + description = "This description is totally optional, but may be useful" + settings = ["regions", "fifox_size"] + +When creating synthesis combination, one just replaces "ver" with "synth" in the table creation. + +One can also create in a "distributive" way. For example, one wishes to create combinations like this: + +- regions +- regions, pipe_on +- regions, output_reg +- regions, special_device + +Instead of creating individual combination for each case, this can be written like this: + +.. code-block:: toml + + [[ver.combinations]] + name = "Regions" + description = "This description is totally optional, but may be useful" + settings = ["regions", ["", "pipe_on", "output_reg", "special_device"]] + +The empty string represents no setting, which enables creating combination with setting "regions". +One can use same principle with empty string for grouping multiple combinations without any common +setting. + +Verification can have special random combination. It can be useful, if one manages to create huge +amounts of combinations, which would run almost forever. This combination will select given amount +of combinations randomly. This feature is to be implemented as deemed non essential. + +.. code-block:: toml + + [ver.rnd] + settings = ["regions", "big_setting0", "big_setting1", ...] + # Select 8 random combinations + amout = 8 + +Special directives +~~~~~~~~~~~~~~~~~~ + +HDL components usually have some restrictions for their generics. Creating exact settings +and combinations without violating those restrictions may difficult. Thus, one can specify +assertions on the generics, which must be true. Combinations which violate those assertions +are filtered out. + +.. code-block:: toml + + [generics] + asserts = [ + """(MVB_ITEM_WIDTH_RAW >= MFB_REGION_SIZE * MFB_BLOCK_SIZE * MFB_ITEM_WIDTH) \ + or (MFB_ALIGNMENT == MFB_REGION_SIZE * MFB_BLOCK_SIZE)""", + "(MVB_ITEM_WIDTH_RAW % MFB_ITEM_WIDTH) == 0", + """(MFB_ALIGNMENT <= MFB_REGION_SIZE*MFB_BLOCK_SIZE) and \ + (MFB_ALIGNMENT >= MFB_BLOCK_SIZE)""", + ] + +Example configuration can be found in "comp/mvb_tools/flow/mvb2mfb/harp.toml" + +Authors +------- + +- Oliver Gurka From 2d08fb2e8c0907b3760d2f7f80160b1cecc8de40 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Thu, 8 Aug 2024 13:34:48 +0200 Subject: [PATCH 08/11] UVM(MFB_PIPE): [PROTOTYPE] Create harp config example --- comp/mfb_tools/flow/pipe/harp.toml | 64 ++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 comp/mfb_tools/flow/pipe/harp.toml diff --git a/comp/mfb_tools/flow/pipe/harp.toml b/comp/mfb_tools/flow/pipe/harp.toml new file mode 100644 index 000000000..47e001e37 --- /dev/null +++ b/comp/mfb_tools/flow/pipe/harp.toml @@ -0,0 +1,64 @@ +# rtlproject.toml: File verification and synthesis combinations +# Copyright (C) 2024 CESNET z. s. p. o. +# Author(s): Oliver Gurka +# +# SPDX-License-Identifier: BSD-3-Clause + +[settings.default] +REGIONS = 4 +REGION_SIZE = 8 +BLOCK_SIZE = 8 +ITEM_WIDTH = 8 +FAKE_PIPE = false +USE_DST_RDY = true +PIPE_TYPE = "SHREG" +DEVICE = "ULTRASCALE" + +[settings.regions] +type = "list" +REGIONS = [1, 2, 1, 1, 1, 1] +REGION_SIZE = [8, 8, 1, 2, 2, 4] +BLOCK_SIZE = [8, 8, 8, 8, 4, 8] +ITEM_WIDTH = [8, 8, 8, 8, 8, 8] + +[settings.pcie] +REGIONS = 2 +REGION_SIZE = 1 +BLOCK_SIZE = 8 +ITEM_WIDTH = 32 + +[settings.pipe_type_reg] +PIPE_TYPE = "REG" + +[settings.fake_pipe_up] +FAKE_PIPE = true + +[settings.use_dst_rdy_down] +USE_DST_RDY = false + +[[ver.combinations]] +name = "haha" +settings = ["regions"] + +[[ver.combinations]] +name = "haha1" +settings = ["regions", "fake_pipe_up"] + +[[ver.combinations]] +name = "haha2" +settings = ["regions[4:6]", "use_dst_rdy_down"] + +[[ver.combinations]] +name = "haha3" +settings = ["pcie", ["fake_pipe_up", "use_dst_rdy_down"]] + +[[ver.combinations]] +name = "haha4" +settings = ["", ["fake_pipe_up", "use_dst_rdy_down", "pipe_type_reg"]] + +[ver.settings] +tests = ["ex_test"] + +[[synth.combinations]] +name = "dfasd" +settings = ["regions"] From 19899a733d9498d2deddbca310651463b1810181 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Thu, 8 Aug 2024 15:04:53 +0200 Subject: [PATCH 09/11] UVM(MVB2MFB): [FEATURE] Add harp config --- comp/mvb_tools/flow/mvb2mfb/harp.toml | 56 +++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 comp/mvb_tools/flow/mvb2mfb/harp.toml diff --git a/comp/mvb_tools/flow/mvb2mfb/harp.toml b/comp/mvb_tools/flow/mvb2mfb/harp.toml new file mode 100644 index 000000000..725a480e3 --- /dev/null +++ b/comp/mvb_tools/flow/mvb2mfb/harp.toml @@ -0,0 +1,56 @@ +# rtlproject.toml: File verification and synthesis combinations +# Copyright (C) 2024 CESNET z. s. p. o. +# Author(s): Oliver Gurka +# +# SPDX-License-Identifier: BSD-3-Clause + +[generics] +asserts = [ + """(MVB_ITEM_WIDTH_RAW >= MFB_REGION_SIZE * MFB_BLOCK_SIZE * MFB_ITEM_WIDTH) \ + or (MFB_ALIGNMENT == MFB_REGION_SIZE * MFB_BLOCK_SIZE)""", + "(MVB_ITEM_WIDTH_RAW % MFB_ITEM_WIDTH) == 0", + """(MFB_ALIGNMENT <= MFB_REGION_SIZE*MFB_BLOCK_SIZE) and \ + (MFB_ALIGNMENT >= MFB_BLOCK_SIZE)""", +] + +[settings.default] +MFB_REGIONS = 4 +MFB_REGION_SIZE = 8 +MFB_BLOCK_SIZE = 8 +MFB_ITEM_WIDTH = 8 +MFB_ALIGNMENT = 64 +MFB_META_WIDTH = 12 +DEVICE = "ULTRASCALE" +MVB_ITEMS = 4 +MVB_ITEM_WIDTH_RAW = 536 + +[settings.regions] +type = "list" +MVB_ITEMS = [1, 2] +MFB_REGIONS = [1, 2] + +[settings.align_low] +MFB_ALIGNMENT = 8 + +[settings.mvb_item_small] +MVB_ITEM_WIDTH_RAW = 48 + +[[ver.combinations]] +name = "regions" +settings = ["regions"] + +[[ver.combinations]] +name = "small" +settings = ["", ["align_low", "mvb_item_small"]] + +[[ver.combinations]] +name = "region_big_small" +settings = ["regions[0]", ["mvb_item_small", "align_low"]] + +[ver.settings] +tests = ["test_basic", "test_perf"] + +[[synth.combinations]] +name = "Some basic" +description = "Space search of resource usage and timing for Agilex and Ultrascale devices" +settings = ["regions"] From 958d15e4881e04154950e3b6c9ef5ab5a29a0dfa Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Wed, 7 Aug 2024 15:00:29 +0200 Subject: [PATCH 10/11] UVM(MVB2MFB): [FEATURE] Prepare verification for harp system --- comp/mvb_tools/flow/mvb2mfb/uvm/tbench/dut.sv | 16 +++++++--- .../flow/mvb2mfb/uvm/tbench/testbench.sv | 32 ++++++++++++++++--- .../flow/mvb2mfb/uvm/tbench/tests/pkg.sv | 13 -------- .../flow/mvb2mfb/uvm/tbench/tests/speed.sv | 4 +-- .../flow/mvb2mfb/uvm/tbench/tests/test.sv | 4 +-- comp/mvb_tools/flow/mvb2mfb/uvm/top_level.fdo | 7 ++++ 6 files changed, 51 insertions(+), 25 deletions(-) diff --git a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/dut.sv b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/dut.sv index 5a02676fa..8c4c92d72 100644 --- a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/dut.sv +++ b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/dut.sv @@ -4,10 +4,18 @@ // SPDX-License-Identifier: BSD-3-Clause - -import test::*; - -module DUT ( +module DUT #( + int unsigned MVB_ITEMS, + int unsigned MVB_ITEM_WIDTH_RAW, + int unsigned MFB_REGIONS, + int unsigned MFB_REGION_SIZE, + int unsigned MFB_BLOCK_SIZE, + int unsigned MFB_ITEM_WIDTH, + int unsigned MFB_ALIGNMENT, + int unsigned MFB_META_WIDTH, + int unsigned MVB_ITEM_WIDTH, + string DEVICE +) ( input logic CLK, input logic RST, mvb_if.dut_rx mvb_rx, diff --git a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/testbench.sv b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/testbench.sv index 6023b3a08..a36d5792a 100644 --- a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/testbench.sv +++ b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/testbench.sv @@ -9,11 +9,24 @@ import uvm_pkg::*; `include "uvm_macros.svh" import test::*; -module testbench; +module testbench #( + int unsigned MVB_ITEMS, + int unsigned MVB_ITEM_WIDTH_RAW, + int unsigned MFB_REGIONS, + int unsigned MFB_REGION_SIZE, + int unsigned MFB_BLOCK_SIZE, + int unsigned MFB_ITEM_WIDTH, + int unsigned MFB_ALIGNMENT, + int unsigned MFB_META_WIDTH, + string DEVICE +); + + localparam int unsigned MVB_ITEM_WIDTH = MVB_ITEM_WIDTH_RAW+MFB_META_WIDTH; //TESTS - typedef test::ex_test ex_test; - typedef test::speed speed; + typedef test::ex_test #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH) ex_test; + typedef test::speed #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH) speed; + // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- // Signals @@ -53,7 +66,18 @@ module testbench; // ------------------------------------------------------------------------------------------------------------------------------------------------------------------- // DUT - DUT DUT_U ( + DUT #( + .MVB_ITEMS (MVB_ITEMS), + .MVB_ITEM_WIDTH_RAW (MVB_ITEM_WIDTH_RAW), + .MFB_REGIONS (MFB_REGIONS), + .MFB_REGION_SIZE (MFB_REGION_SIZE), + .MFB_BLOCK_SIZE (MFB_BLOCK_SIZE), + .MFB_ITEM_WIDTH (MFB_ITEM_WIDTH), + .MFB_ALIGNMENT (MFB_ALIGNMENT), + .MFB_META_WIDTH (MFB_META_WIDTH), + .MVB_ITEM_WIDTH (MVB_ITEM_WIDTH), + .DEVICE (DEVICE) + ) DUT_U ( .CLK (CLK), .RST (reset.RESET), .mvb_rx (mvb_rx), diff --git a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/pkg.sv b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/pkg.sv index 3ccc58dd7..50765c94b 100644 --- a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/pkg.sv +++ b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/pkg.sv @@ -13,19 +13,6 @@ package test; `include "uvm_macros.svh" import uvm_pkg::*; - parameter MFB_REGIONS = 1; - parameter MFB_REGION_SIZE = 8; - parameter MFB_BLOCK_SIZE = 8; - parameter MFB_ITEM_WIDTH = 8; - parameter MFB_META_WIDTH = 12; - - parameter MVB_ITEMS = 1; - parameter MVB_ITEM_WIDTH_RAW = 48; - parameter MVB_ITEM_WIDTH = MVB_ITEM_WIDTH_RAW+MFB_META_WIDTH; - - parameter MFB_ALIGNMENT = MFB_REGION_SIZE*MFB_BLOCK_SIZE; - parameter DEVICE = "ULTRASCALE"; - parameter CLK_PERIOD = 4ns; parameter RESET_CLKS = 10; diff --git a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/speed.sv b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/speed.sv index 982f96920..3f8b65e30 100644 --- a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/speed.sv +++ b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/speed.sv @@ -37,8 +37,8 @@ class mfb_rx_speed#(MFB_REGIONS, MFB_REGION_SIZE, MFB_ITEM_WIDTH, MFB_BLOCK_SIZE endfunction endclass -class speed extends uvm_test; - typedef uvm_component_registry#(test::speed, "test::speed") type_id; +class speed #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH) extends uvm_test; + typedef uvm_component_registry#(test::speed #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH), "test::speed") type_id; // declare the Environment reference variable uvm_mvb2mfb::env #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH) m_env; diff --git a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/test.sv b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/test.sv index 55aab8023..0d60f656b 100644 --- a/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/test.sv +++ b/comp/mvb_tools/flow/mvb2mfb/uvm/tbench/tests/test.sv @@ -4,8 +4,8 @@ // SPDX-License-Identifier: BSD-3-Clause -class ex_test extends uvm_test; - typedef uvm_component_registry#(test::ex_test, "test::ex_test") type_id; +class ex_test #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH) extends uvm_test; + typedef uvm_component_registry#(test::ex_test #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH), "test::ex_test") type_id; // declare the Environment reference variable uvm_mvb2mfb::env #(MFB_REGIONS, MVB_ITEMS, MFB_REGION_SIZE, MFB_BLOCK_SIZE, MFB_ITEM_WIDTH, MFB_META_WIDTH, MVB_ITEM_WIDTH) m_env; diff --git a/comp/mvb_tools/flow/mvb2mfb/uvm/top_level.fdo b/comp/mvb_tools/flow/mvb2mfb/uvm/top_level.fdo index aee6905b6..f0d98993b 100644 --- a/comp/mvb_tools/flow/mvb2mfb/uvm/top_level.fdo +++ b/comp/mvb_tools/flow/mvb2mfb/uvm/top_level.fdo @@ -22,9 +22,16 @@ set SIM_FLAGS(CODE_COVERAGE) false set SIM_FLAGS(UVM_ENABLE) true set SIM_FLAGS(UVM_TEST) "test::ex_test" +set SIM_FLAGS(DEBUG) true #set SIM_FLAGS(UVM_TEST) "test::speed" set SIM_FLAGS(UVM_VERBOSITY) UVM_NONE +if {[file exists "./generics.fdo"]} { + source "./generics.fdo" +} else { + puts "Could not source generics.fdo." +} + # Global include file for compilation source "$FIRMWARE_BASE/build/Modelsim.inc.fdo" From 1540fcafb231e9a496b51d1d2835ec2027ecbdc0 Mon Sep 17 00:00:00 2001 From: Oliver Gurka Date: Thu, 5 Sep 2024 11:31:48 +0200 Subject: [PATCH 11/11] harp: [MAINTENANCE] Move harp script to python folder --- {build/scripts/ndk_xbuild => python/harp}/README.rst | 0 .../scripts/ndk_xbuild => python/harp}/build_adapters/__init__.py | 0 .../ndk_xbuild => python/harp}/build_adapters/build_adapter.py | 0 .../ndk_xbuild => python/harp}/build_adapters/questasim.py | 0 .../scripts/ndk_xbuild => python/harp}/comp_settings/__init__.py | 0 .../harp}/comp_settings/config_description.py | 0 .../ndk_xbuild => python/harp}/comp_settings/config_transform.py | 0 {build/scripts/ndk_xbuild => python/harp}/ndk_xbuild/__init__.py | 0 .../scripts/ndk_xbuild => python/harp}/ndk_xbuild/ndk_xbuild.py | 0 {build/scripts/ndk_xbuild => python/harp}/pyproject.toml | 0 10 files changed, 0 insertions(+), 0 deletions(-) rename {build/scripts/ndk_xbuild => python/harp}/README.rst (100%) rename {build/scripts/ndk_xbuild => python/harp}/build_adapters/__init__.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/build_adapters/build_adapter.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/build_adapters/questasim.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/comp_settings/__init__.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/comp_settings/config_description.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/comp_settings/config_transform.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/ndk_xbuild/__init__.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/ndk_xbuild/ndk_xbuild.py (100%) rename {build/scripts/ndk_xbuild => python/harp}/pyproject.toml (100%) diff --git a/build/scripts/ndk_xbuild/README.rst b/python/harp/README.rst similarity index 100% rename from build/scripts/ndk_xbuild/README.rst rename to python/harp/README.rst diff --git a/build/scripts/ndk_xbuild/build_adapters/__init__.py b/python/harp/build_adapters/__init__.py similarity index 100% rename from build/scripts/ndk_xbuild/build_adapters/__init__.py rename to python/harp/build_adapters/__init__.py diff --git a/build/scripts/ndk_xbuild/build_adapters/build_adapter.py b/python/harp/build_adapters/build_adapter.py similarity index 100% rename from build/scripts/ndk_xbuild/build_adapters/build_adapter.py rename to python/harp/build_adapters/build_adapter.py diff --git a/build/scripts/ndk_xbuild/build_adapters/questasim.py b/python/harp/build_adapters/questasim.py similarity index 100% rename from build/scripts/ndk_xbuild/build_adapters/questasim.py rename to python/harp/build_adapters/questasim.py diff --git a/build/scripts/ndk_xbuild/comp_settings/__init__.py b/python/harp/comp_settings/__init__.py similarity index 100% rename from build/scripts/ndk_xbuild/comp_settings/__init__.py rename to python/harp/comp_settings/__init__.py diff --git a/build/scripts/ndk_xbuild/comp_settings/config_description.py b/python/harp/comp_settings/config_description.py similarity index 100% rename from build/scripts/ndk_xbuild/comp_settings/config_description.py rename to python/harp/comp_settings/config_description.py diff --git a/build/scripts/ndk_xbuild/comp_settings/config_transform.py b/python/harp/comp_settings/config_transform.py similarity index 100% rename from build/scripts/ndk_xbuild/comp_settings/config_transform.py rename to python/harp/comp_settings/config_transform.py diff --git a/build/scripts/ndk_xbuild/ndk_xbuild/__init__.py b/python/harp/ndk_xbuild/__init__.py similarity index 100% rename from build/scripts/ndk_xbuild/ndk_xbuild/__init__.py rename to python/harp/ndk_xbuild/__init__.py diff --git a/build/scripts/ndk_xbuild/ndk_xbuild/ndk_xbuild.py b/python/harp/ndk_xbuild/ndk_xbuild.py similarity index 100% rename from build/scripts/ndk_xbuild/ndk_xbuild/ndk_xbuild.py rename to python/harp/ndk_xbuild/ndk_xbuild.py diff --git a/build/scripts/ndk_xbuild/pyproject.toml b/python/harp/pyproject.toml similarity index 100% rename from build/scripts/ndk_xbuild/pyproject.toml rename to python/harp/pyproject.toml