From a7cc3e98d04c388e6ca95f4ee82d1622d1b7f14d Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 20 Mar 2022 02:34:36 +0100 Subject: [PATCH 01/33] ci/prepare_environment: update Signed-off-by: Unai Martinez-Corral --- .github/scripts/prepare_environment.sh | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/scripts/prepare_environment.sh b/.github/scripts/prepare_environment.sh index eee142ee8..1fd78f849 100755 --- a/.github/scripts/prepare_environment.sh +++ b/.github/scripts/prepare_environment.sh @@ -62,7 +62,7 @@ echo '::endgroup::' cd .. -echo '::group::🗑️ Remove the wrappers (pre-packaged from arch-defs) and add f4pga-env' +echo '::group::Add f4pga-env' case "$F4PGA_FAM" in xc7) F4PGA_DIR_ROOT='install';; @@ -71,6 +71,11 @@ esac F4PGA_DIR_BIN="$F4PGA_INSTALL_DIR_FAM/$F4PGA_DIR_ROOT"/bin/ cp $(dirname "$0")/../../f4pga-env "$F4PGA_DIR_BIN" + +echo '::endgroup::' + +echo '::group::🗑️ Remove the wrappers (pre-packaged from arch-defs)' + cd "$F4PGA_DIR_BIN" case "$F4PGA_FAM" in From cd2ad7144c4022bda2c543beeb13a44e5298b6e2 Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Mon, 5 Jul 2021 19:49:32 +0200 Subject: [PATCH 02/33] Up-to-date version of sfbuild Signed-off-by: Krzysztof Boronski --- sfbuild/CMakeLists.txt | 146 ++++ sfbuild/__init__.py | 1 + sfbuild/docs/DevNotes.md | 173 +++++ sfbuild/docs/GettingStarted.md | 315 +++++++++ sfbuild/docs/Module.md | 244 +++++++ sfbuild/docs/browse_pydoc.sh | 7 + sfbuild/docs/common targets and variables.md | 44 ++ .../modules/common/generic_script_wrapper.md | 47 ++ sfbuild/docs/modules/common/io_rename.md | 27 + sfbuild/docs/modules/common/mkdirs.md | 13 + sfbuild/docs/modules/common/synth.md | 47 ++ sfbuild/part_db/parts.json | 193 ++++++ sfbuild/platforms/ql-eos-s3.json | 121 ++++ sfbuild/platforms/ql-k4n8_fast.json | 167 +++++ sfbuild/platforms/ql-k4n8_slow.json | 167 +++++ sfbuild/platforms/xc7a100t.json | 155 +++++ sfbuild/platforms/xc7a200t.json | 155 +++++ sfbuild/platforms/xc7a50t.json | 155 +++++ sfbuild/sf_argparse.py | 234 +++++++ sfbuild/sf_cache.py | 115 ++++ sfbuild/sf_common/__init__.py | 265 ++++++++ sfbuild/sf_common_modules/__init__.py | 1 + sfbuild/sf_common_modules/fasm.py | 90 +++ .../generic_script_wrapper.py | 310 +++++++++ sfbuild/sf_common_modules/io_rename.py | 113 +++ sfbuild/sf_common_modules/mkdirs.py | 40 ++ sfbuild/sf_common_modules/pack.py | 70 ++ sfbuild/sf_common_modules/place.py | 86 +++ .../sf_common_modules/place_constraints.py | 62 ++ sfbuild/sf_common_modules/route.py | 57 ++ sfbuild/sf_common_modules/synth.py | 162 +++++ sfbuild/sf_flow_config.py | 360 ++++++++++ sfbuild/sf_module/__init__.py | 147 ++++ sfbuild/sf_module_inspector.py | 38 ++ sfbuild/sf_module_runner/__init__.py | 110 +++ sfbuild/sf_stage.py | 78 +++ sfbuild/sf_ugly.py | 19 + sfbuild/sfbuild | 5 + sfbuild/sfbuild.py | 641 ++++++++++++++++++ xc/xc7/toolchain_wrappers/__init__.py | 0 xc/xc7/toolchain_wrappers/symbiflow_common.py | 99 +++ xc/xc7/toolchain_wrappers/symbiflow_place.py | 28 + xc/xc7/toolchain_wrappers/symbiflow_route.py | 23 + xc/xc7/toolchain_wrappers/symbiflow_synth.py | 84 +++ .../symbiflow_write_fasm.py | 42 ++ 45 files changed, 5456 insertions(+) create mode 100644 sfbuild/CMakeLists.txt create mode 100644 sfbuild/__init__.py create mode 100644 sfbuild/docs/DevNotes.md create mode 100644 sfbuild/docs/GettingStarted.md create mode 100644 sfbuild/docs/Module.md create mode 100755 sfbuild/docs/browse_pydoc.sh create mode 100644 sfbuild/docs/common targets and variables.md create mode 100644 sfbuild/docs/modules/common/generic_script_wrapper.md create mode 100644 sfbuild/docs/modules/common/io_rename.md create mode 100644 sfbuild/docs/modules/common/mkdirs.md create mode 100644 sfbuild/docs/modules/common/synth.md create mode 100644 sfbuild/part_db/parts.json create mode 100644 sfbuild/platforms/ql-eos-s3.json create mode 100644 sfbuild/platforms/ql-k4n8_fast.json create mode 100644 sfbuild/platforms/ql-k4n8_slow.json create mode 100644 sfbuild/platforms/xc7a100t.json create mode 100644 sfbuild/platforms/xc7a200t.json create mode 100644 sfbuild/platforms/xc7a50t.json create mode 100644 sfbuild/sf_argparse.py create mode 100755 sfbuild/sf_cache.py create mode 100644 sfbuild/sf_common/__init__.py create mode 100644 sfbuild/sf_common_modules/__init__.py create mode 100644 sfbuild/sf_common_modules/fasm.py create mode 100644 sfbuild/sf_common_modules/generic_script_wrapper.py create mode 100644 sfbuild/sf_common_modules/io_rename.py create mode 100644 sfbuild/sf_common_modules/mkdirs.py create mode 100644 sfbuild/sf_common_modules/pack.py create mode 100644 sfbuild/sf_common_modules/place.py create mode 100644 sfbuild/sf_common_modules/place_constraints.py create mode 100644 sfbuild/sf_common_modules/route.py create mode 100755 sfbuild/sf_common_modules/synth.py create mode 100644 sfbuild/sf_flow_config.py create mode 100644 sfbuild/sf_module/__init__.py create mode 100644 sfbuild/sf_module_inspector.py create mode 100644 sfbuild/sf_module_runner/__init__.py create mode 100644 sfbuild/sf_stage.py create mode 100644 sfbuild/sf_ugly.py create mode 100644 sfbuild/sfbuild create mode 100755 sfbuild/sfbuild.py create mode 100644 xc/xc7/toolchain_wrappers/__init__.py create mode 100644 xc/xc7/toolchain_wrappers/symbiflow_common.py create mode 100644 xc/xc7/toolchain_wrappers/symbiflow_place.py create mode 100644 xc/xc7/toolchain_wrappers/symbiflow_route.py create mode 100755 xc/xc7/toolchain_wrappers/symbiflow_synth.py create mode 100644 xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py diff --git a/sfbuild/CMakeLists.txt b/sfbuild/CMakeLists.txt new file mode 100644 index 000000000..2f2a9a8de --- /dev/null +++ b/sfbuild/CMakeLists.txt @@ -0,0 +1,146 @@ +# Installs sfbuild - experimental Symbiflow Build System + +function(INSTALL_DIR) + # Create directory during installation phase + set(options) + set(one_value_args INSTALL_DIRECTORY) + set(multi_value_args) + cmake_parse_arguments( + INSTALL_DIR + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + set(make_dir_code "file(MAKE_DIRECTORY ${INSTALL_DIR_INSTALL_DIRECTORY})") + install(CODE ${make_dir_code}) + +endfunction() + +function(INSTALL_DIR_CONTENT) + # Install files from ROOT_DIRECTORY/FILES_DIRECTORY directory into a FILES_DIRECTORY subdirectory of INSTALL_DIRECTORY + set(options) + set(one_value_args + ROOT_DIRECTORY + FILES_DIRECTORY + DESTINATION) + set(multi_value_args + FILES + INSTALL_OPTS) + cmake_parse_arguments( + INSTALL_DIR_CONTENT + "${options}" + "${one_value_args}" + "${multi_value_args}" + ${ARGN} + ) + + if(NOT DEFINED INSTALL_DIR_CONTENT_ROOT_DIRECTORY) + set(INSTALL_DIR_CONTENT_ROOT_DIRECTORY .) + endif() + if(NOT DEFINED INSTALL_DIR_CONTENT_FILES_DIRECTORY) + set(INSTALL_DIR_CONTENT_FILES_DIRECTORY .) + endif() + + set(file_paths) + foreach(file ${INSTALL_DIR_CONTENT_FILES}) + list(APPEND file_paths ${INSTALL_DIR_CONTENT_ROOT_DIRECTORY}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY}/${file}) + endforeach() + + install(FILES ${file_paths} + DESTINATION ${INSTALL_DIR_CONTENT_DESTINATION}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY} + ${INSTALL_DIR_CONTENT_INSTALL_OPTS}) + +endfunction() + + +find_package(Python3 COMPONENTS Interpreter REQUIRED) + +get_target_property_required(VPR env VPR) +get_target_property_required(GENFASM env GENFASM) + +set(SFBUILD_SUPPORTED_PLATFORMS + ql-eos-s3 + xc7a50t + xc7a100t + xc7a200t + ql-k4n8_fast + ql-k4n8_slow) + +# Create required directories +foreach(DIR_PATH ${SFBUILD_DIRECTORIES}) + install_dir(INSTALL_DIRECTORY ${CMAKE_INSTALL_PREFIX}/bin/${DIR_PATH}) +endforeach() + +# Install sfbuild +install_dir_content( + FILES + __init__.py + sf_argparse.py + sf_cache.py + sf_flow_config.py + sf_module_inspector.py + sf_stage.py + sf_ugly.py + sfbuild.py + sfbuild + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +install_dir_content( + FILES __init__.py + FILES_DIRECTORY sf_common + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +install_dir_content( + FILES __init__.py + FILES_DIRECTORY sf_module + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +install_dir_content( + FILES __init__.py + FILES_DIRECTORY sf_module_runner + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +# Install common modules +install_dir_content( + FILES + fasm.py + generic_script_wrapper.py + io_rename.py + mkdirs.py + pack.py + place_constraints.py + place.py + route.py + synth.py + FILES_DIRECTORY sf_common_modules + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) +# Install platform flow definitions +set(sfbuild_supported_platform_defs) +foreach(SFBUILD_PLATFORM ${SFBUILD_SUPPORTED_PLATFORMS}) + set(sfbuild_platform_def "${SFBUILD_PLATFORM}.json") + list(APPEND sfbuild_supported_platform_defs ${sfbuild_platform_def}) +endforeach() +install_dir_content( + FILES ${sfbuild_supported_platform_defs} + FILES_DIRECTORY platforms + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ) + +# Install part_db +install_dir_content( + FILES + parts.json + FILES_DIRECTORY part_db + DESTINATION bin/sfbuild + INSTALL_OPTS + PERMISSIONS WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ +) diff --git a/sfbuild/__init__.py b/sfbuild/__init__.py new file mode 100644 index 000000000..ad48ec86c --- /dev/null +++ b/sfbuild/__init__.py @@ -0,0 +1 @@ +import sfbuild \ No newline at end of file diff --git a/sfbuild/docs/DevNotes.md b/sfbuild/docs/DevNotes.md new file mode 100644 index 000000000..87678f43c --- /dev/null +++ b/sfbuild/docs/DevNotes.md @@ -0,0 +1,173 @@ +# Developer's notes + +## Project's structure + +The main script is in the `sfbuild.py` file. +`sf_cache.py` contains code needed for tracking modifications in the project. +`sf_ugly` contains some ugly workarounds. + +There a are two python modules which are shared by the code of `sfbuild.py` and +_sfbuild modules_: `sf_common` and `sf_module`. + +_sfbuild modules_ are extensions to the build system that wrap tools to be used +within _sfbuild_ and currently they are standalone executable scripts. All +_sfbuild modules_ are single python scripts located under directories that +follow `sf_*_modules/` pattern. So currently those are: + + * `sf_common_modules` - modules which can be shared by multiple platforms. + * `sf_xc7_modules` - modules specific to xc7 flows. + * `sf_quicklogic_modules` - modules specific to Quiclogic flows. + +There's also a `docs` directory which you are probably aware of if you are reading +this. All the documentation regarding sfbuild goes here. + +`platforms` direcotory contains JSON files with _platform flow definitions_. +Names of those files must follow `platform_name.json` pattern. + +## Differnt subsystems and where to find them? + +### Building and dependency resolution + +All the code regarding dependency resolution is located in `sfbuild.py` file. +Take a look at the `Flow` class. + +Most of the work is done in `Flow._resolve_dependencies` method. Basically it +performs a _DFS_ with _stages_ (instances of _sfbuild modules_) as its nodes +which are linked using symbolic names of dependencies on inputs and outputs. +It queries the modules for information regarding i/o (most importantly the paths +on which they are going to produce outputs), checks whether +their inputs are going to be satisfied, checks if dependencies were modified, etc. + +The actual building is done using `Flow._build_dep` procedure. It uses a similar +_DFS_ approach to invoke modules and check their inputs and outputs. + +### Modification tracking + +Modification tracking is done by taking, comparing and keeping track of `adler32` +hashes of all dependencies. Each dependency has a set of hashes associted with it. +The reason for having multiple hashes is that a dependency may have multiple +"_consumers_", ie. _stages_ which take it as input. Each hash is associated with +particular consumer. This is necessary, because the system tries to avoid rebuilds +when possible and status of each file (modified/unmodified) may differ in regards +to individual stages. + +Keeping track of status of each file is done using `SymbiCache` class, which is +defined in `sf_cache.py` file. `SymbiCache` is used mostly inside `Flow`'s methods. + +### Module's internals and API + +`sf_module` contains everything that is necessary to write a module. +Prticularly the `Module` and `ModuleContext` classes +The `do_module` function currently servers as to create an instance of some +`Module`'s subtype and provide a _CLI_ interface for it. + +The _CLI_ interface however, is not meant to be used by an end-user, especially +given that it reads JSON data from _stdin_. A wrapper for interfacing with modules +exists in `sfbuild.py` and it's called `_run_module`. + +### Internal environmental variable system + +_sfbuild_ exposes some data to the user as well as reads some using internal +environmental variables. These can be referenced by users in +_platform flow definitions_ and _project flow configurations_ using the +`${variable_name}` syntax when defining values. They can also be read inside +_sfbuild modules_ by accesing the `ctx.values` namespace. + +The core of tis system is the `ResolutionEnvironemt` class which can be found +inside the `sf_common` module. + +### Installation + +Check `CMakeLists.txt`. + +## TODO: + +Therea re a couple things that need some work: + +### Urgent + +* Full support for Quicklogic platforms. +* Testing XC7 projects with more sophisticated setups and PCF flows. + +### Important + +* Fix and refactor overloading mechanism in _platform flow definitions_ and + _platform flow configurations_. Values in the global `values` dict should + be overloaded by those in `values` dict under `module_options.stage_name` + inside _platform flow definition_. Values in `platform flow configuration` + should be imported from `platform flow definition` and then overloaded by + entries in `values`, `platform_name.values`, + `platform_name.stages.stage_name.values` dicts respectively. + +* Define a clear specification for entries in _platform flow definitions_ and + _platform flow configurations_. Which environmental variables can be accessed + where, and when? + +* Force "_on-demand_" outputs if they are required by another stage. + This may require redesigning the "on-demand" feature, which currently works + by producing a dependency if and only if the user explicitely provides the + path. Otherwise the path is unknown. + +* Make commenting style consistent + +* Write more docs + +### Not very important + +* Extend the metadata system for modules, perhaps make it easier to use. + +* Add missing metadata for module targets. + +### Maybe possible in the future + +* Generate platform defintions using CMake. + +### Out of the current scope + +* Change interfaces of some internal python scripts. This could lead to possibly + merging some modules for XC7 and Quicklogic into one common module. + +## Quicklogic + +So far I've been trying to bring support to _EOS-S3_ platform with mixed results. +Some parts of upstream Symbiflow aren't there yet. The Quicklogic scripts are +incomplete. + +The _k4n8_ family remains a mystery to me. There's zero information about any +other familiar that _PP3_ and _PP2_. Neither could I find example projects for that. +Symbiflow's website mentions that only briefly. Yosys complains about `_DLATCH_N_` +not being supported when I tried synthesisng anything. Possibly related to the fact +that there's no equivalent of `pp3_latches_map.v` for `k4n8/umc22` in +[Yosys](https://github.com/YosysHQ/yosys/tree/master/techlibs/quicklogic). + +**UPDATE**: Finally got the ioplace stage to work. Pulling the Quicklogic fork was +necessary in order to progress. The Quicklogic EOS-S3 development is now moved into +`eos-s3` branch of my fork. +Additionally The `chandalar.pcf` file in _symbiflow-examples_ seemed to be faulty. +The '()' parenthesis should be replaced by '[]' brackets. +I also tried to synthesize the `iir` project from `tool-perf`, but **VPR** seems +to be unable to fit it (at least on my installation of Symbiflow which at this point +is a bit old and heavily modified). + +Here's a flow configuration I've used for `btn_counter` on `eos-s3`: + +```json +{ + "dependencies": { + "sources": ["btn_counter.v"], + "synth_log": "${build_dir}/synth.log", + "pack_log": "${build_dir}/pack.log" + }, + "values": { + "top": "top", + "build_dir": "build/eos-s3" + }, + + "ql-eos-s3": { + "dependencies": { + "pcf": "chandalar.pcf", + "build_dir": "${build_dir}" + } + } +} +``` \ No newline at end of file diff --git a/sfbuild/docs/GettingStarted.md b/sfbuild/docs/GettingStarted.md new file mode 100644 index 000000000..e20620a59 --- /dev/null +++ b/sfbuild/docs/GettingStarted.md @@ -0,0 +1,315 @@ +# sfbuild + +## Getting started + +To use _**sfbuild**_ you need a working python 3 installation which should be icluded +as a part of conda virtual environment set up during symbiflow installation. +_**sfbuild**_ installs along _**Symbiflow**_ with any version of toolchain. However, +only _XC7_ architectures are supported currently and _Quicklogic_ support is a work +in progress. _**sfbuild**_'s installation directory is `bin/sfbuild`, under your +_**Symbiflow**_ installation directory. `sfbuild.py` is the script that you should +run to use _**sfbuild**_. + +To get started with a project that already uses sfbuild, go to the project's +directory and run the following line to build a bitstream: +``` +$ python3 /path/to/sfbuild.py flow.json -p platform_name -t bitstream +``` + +Substitute `platform_name` by the name of the target platform (eg. `x7a50t`). +`flow.json` should be a **project's flow configuration** file included with the +project. If you are unsure if you got the right file, you can check an example of +the contents of such file shown in the "_Using sfbuild to build a target_" section. + +The location of the file containing bitstream will be indicated by sfbuild after the +flow completes. Look for a line like this one on stdout.: + +``` +Target `bitstream` -> build/arty_35/top.bit +``` + +------------------------------------------------------------------------------------- + +## Fundamental concepts + +If you want to create a new sfbuild project, it's highly recommended that you +read this section first. + +### sfbuild + +_**sfbuild**_ is a modular build system designed to handle various +_Verilog-to-bitsream_ flows for FPGAs. It works by wrapping the necessary tools +in python scripts, which are called **sfbuild modules**. The modules are then +referenced in a **platform's flow definition** files along configurations specific +for given platform. These files for come included as a part of _**sfbuild**_ for the +following platforms: + +* x7a50t +* x7a100t +* x7a200t (_soon_) + +You can also write your own **platform's flow definition** file if you want to bring +support to a different device. + +Each project that uses _**sfbuild**_ to perform any flow should include a _.json_ +file describing the project. The purpose of that file is to configure inputs +for the flow and possibly override configuration values if necessary. + +### Modules + +A **module** (also referred to as **sfbuild module** in sistuations where there might +be confusion between Python's _modules_ and sfbuild's _modules_) is a python scripts +that wraps a tool used within **Symbilfow's** ecosystem. The main purpouse of this +wrapper is to provide a unified interface for sfbuild to use and configure the tool +as well as provide information about files required and produced by the tool. + +### Dependecies + +A **dependency** is any file, directory or a list of such that a **module** takes as +its input or produces on its output. + +Modules specify their dependencies by using symbolic names instead of file paths. +The files they produce are also given symbolic names and paths which are either set +through **project's flow configuration** file or derived from the paths of the +dependencies taken by the module. + +### Target + +**Target** is a dependency that the user has asked sfbuild to produce. + +### Flow + +A **flow** is set of **modules** executed in a right order to produce a **target**. + +### .symbicache + +All **dependencies** are tracked by a modification tracking system which stores hashes +of the files (directories get always `'0'` hash) in `.symbicache` file in the root of +the project. When _**sfbuild**_ constructs a **flow**, it will try to omit execution +of modules which would receive the same data on their input. There's a strong +_assumption_ there that a **module**'s output remains unchanged if the input +doconfiguring esn't +change, ie. **modules** are deterministic. + +### Resolution + +A **dependency** is said to be **resolved** if it meets one of the following +critereia: + +* it exists on persistent storage and its hash matches the one stored in .symbicache +* there exists such **flow** that all of the dependieces of its modules are + **resolved** and it produces the **dependency** in question. + +### Platform's flow definition + +**Platform's flow definition** is a piece of data describing a space of flows for a +given platform, serialized into a _JSON_. +It's stored in a file that's named after the device's name under `sfbuild/platforms`. + +**Platform's flow definition** contains a list of modules available for constructing +flows and defines a set of values which the modules can reference. In case of some +modules it may also define a set of parameters used during their construction. +`mkdirs` module uses that to allow production of of multiple directories as separate +dependencies. This however is an experimental feature which possibly will be +removed in favor of having multiple instances of the same module with renameable +ouputs. + +Not all **dependencies** have to be **resolved** at this stage, a **platform's flow +definition** for example won't be able to provide a list of source files needed in a +**flow**. + +### Projects's flow configuration + +Similarly to **platform's flow definition**, **Projects's flow configuration** is a +_JSON_ that is used to configure **modules**. There are however a couple differences +here and there. + +* The most obvious one is that this file is unique for a project and + is provided by the user of _**sfbuild**_. + +* The other difference is that it doesn't list **modules** available for the + platform. + +* All the values provided in **projects's flow configuration** will override those + provided in **platform's flow definition**. + +* It can contain sections with configurations for different platforms. + +* Unlike **platform's flow definition** it can give explicit paths to dependencies. + +* At this stage all mandatory **dependencies** should be resolved. + +Typically **projects's flow configuration** will be used to resolve dependencies +for _HDL source code_ and _device constraints_. + +## Using sfbuild to build a target + +To build a **target** "`target_name`", use the following command: +``` +$ python3 /path/to/sfbuild.py flow.json -p platform_device_name -t target_name +``` +where `flow.json` is a path to **projects's flow configuration** + +For example, let's consider the following +**projects's flow configuration (flow.json)**: + +```json +{ + "dependencies": { + "sources": ["counter.v"], + "xdc": ["arty.xdc"], + "synth_log": "synth.log", + "pack_log": "pack.log", + "top": "top" + }, + "xc7a50t": { + "dependencies": { + "build_dir": "build/arty_35" + } + } +} +``` + +It specifies list of paths to Verilog source files as "`sources`" dependency. +Similarily it also provides an "`XDC`" file with constrains. ("`xdc`" dependency) + +It also names a path for synthesis and logs ("`synth_log`", "`pack_log`"). +These two are optional on-demand outputs, meaning they won't be produces unless +their paths are explicitely set. + +"`top`" value is set to in order to specify the name of top Verilog module, which +is required during synthesis. + +"`build_dir`" is an optional helper dependency. When available, modules will put +their outputs into that directory. It's also an _on-demand_ output of `mkdirs` +module in _xc7a50t_ flow definition, which means that if specified directory does +not exist, `mkdirs` will create it and provide as `build_dir` dependency. + +building a bitstream for *x7a50t* would look like that: + +With this flow configuration, you can build a bitstream for arty_35 using the +following command: + +``` +$ python3 /path/to/sfbuild.py flow.json -p x7a50t -t bitstream +``` + +### Pretend mode + +You can also add a `--pretend` (`-P`) option if you just want to see the results of +dependency resolution for a specified target without building it. This is useful +when you just want to know what files will be generated and where wilh they be +stored. + +### Info mode + +Modules have the ability to include description to the dependencies they produce. + +Running _**sfbuild**_ with `--info` (`-i`) flag allows youn to see descriptions of +these dependencies. This option doesn't require a target to be specified, but you +still have to provuide a flow configuration and platform name. + +This is still an experimental option, most targets currently lack descriptions +and no information whether the output is _on-demand_ is currently displayed. + +Example: +``` +$ python3 /path/to/sfbuild.py flow.json -p x7a50t -i +``` +``` +Platform dependencies/targets: + build_dir: + module: `mk_build_dir` + eblif: Extended BLIF hierarchical sequential designs file + generated by YOSYS + module: `synth` + fasm_extra: + module: `synth` + json: JSON file containing a design generated by YOSYS + module: `synth` + synth_json: + module: `synth` + sdc: + module: `synth` +``` + +_This is only a snippet of the entire output_ + +### Summary of all available sfbuild options + +| long | short | arguments | description | +|------------|:-----:|------------------------|-------------------------------------------------| +| --platform | -p | device name | Specify target device name (eg. x7a100t) | +| --target | -t | target dependency name | Specify target to produce | +| --info | -i | - | Display information about available targets | +| --pretend | -P | - | Resolve dependencies without executing the flow | + +### Dependency resolution display + +sfbuild displays some information about dependencies when requesting a target. + +Here's an example of a possible output when trying to build `bitstream` target: +``` +sfbuild: Symbiflow Build System +Scanning modules... + +Project status: + [R] bitstream: bitstream -> build/arty_35/top.bit + [O] build_dir: build/arty_35 + [R] eblif: synth -> build/arty_35/top.eblif + [R] fasm: fasm -> build/arty_35/top.fasm + [R] fasm_extra: synth -> build/arty_35/top_fasm_extra.fasm + [R] io_place: ioplace -> build/arty_35/top.ioplace + [R] net: pack -> build/arty_35/top.net + [X] pcf: MISSING + [R] place: place -> build/arty_35/top.place + [R] place_constraints: place_constraints -> build/arty_35/top.preplace + [R] route: route -> build/arty_35/top.route + [R] sdc: synth -> build/arty_35/top.sdc + [N] sources: ['counter.v'] + [O] xdc: ['arty.xdc'] + +sfbuild: DONE +``` + +The letters in the boxes describe the status of a dependency which's name is next +to the box. + + * **X** - dependency unresolved. This isn't always a bad sign. Some dependencies + are not required to, such as "`pcf`". + * **U** - dependency unreachable. The dependency has a module that could produce + it, but the module's dependencies are unresolved. This doesn't say whether the + dependency was necessary or not. + * **O** - dependency present, unchanged. This dependency is already built and is + confirmed to stay unchanged during flow execution. + * **N** - dependency present, new/changed. This dependency is already present on + the persistent storage, but it was either missing earlier, or + its content changed from the last time. + (WARNING: it won't continue to be reported as "**N**" after a successful build of + any target. This may lead to some false "**O**"s in some complex scenarios. This + should be fixed in the future.) + * **S** - depenendency not present, resolved. This dependency is not + currently available on the persistent storage, however it will be produced within + flow's execution. + * **R** - depenendency present, resolved, requires rebuild. This dependency is + currently available on the persistent storage, however it has to be rebuilt due + to the changes in the project. + +Additional info about a dependency will be displayed next to its name after a +colon: + +* In case of dependencies that are to be built (**S**/**R**), there's a name of a + module that will produce this dependency, followed by "`->`" and a path or list of + paths to file(s)/directory(ies) that will be produced as this dependency. + +* In case of dependencies which do not require execution of any modules, only + a path or list of paths to file(s)/directory(ies) that will be displayed + +* In case of unresolved dependencies (**X**), which are never produced by any + module, a text sying "`MISSING`" will be displayed +* In case of unreachable dependencies, a name of such module that could produce + them will be displayed followed by "`-> ???`". + +In the example above file `counter.v` has been modified and is now marked as +"**N**". This couses a bunch of other dependencies to be reqbuilt ("**R**"). +`build_dir` and `xdc` were already present, so they are marked as "**O**". \ No newline at end of file diff --git a/sfbuild/docs/Module.md b/sfbuild/docs/Module.md new file mode 100644 index 000000000..c11e9337b --- /dev/null +++ b/sfbuild/docs/Module.md @@ -0,0 +1,244 @@ +# sfbuild modules interface + +This document contains all the information needed to configure modules for +your _**sfbuild**_ project as well as some info about the API used to write +modules. + +## Configuration interface: + +Modules are configured through an internal API by _**sfbuild**_. +The basic requirement for a module script is to expose a class with `Module` +interface. + +_**sfbuild**_ reads configuration from two different places: +**platform's flow definition** file and **project's flow configuration** file. + +The files, as described in "_Getting Started_" document, contain _JSON_ serialized +data. And they contain snippets of _module configurations_ + +A _module configuration_ is a structure with the following fields: + +* `takes` - a dictionary that contains keys which are names of the dependencies + used by the module. The values are paths to those dependencies. They can be + either singular strings or lists of strings. +* `produces` = a dictionary that contains keys which are names of the + dependencies produced by the module. The values are requested filenames for the + files generated by the module. They can be either singular strings or lists of + strings. +* `values` - a dictionary that contains other values used to configure the module. + The keys are value's names and the values can have any type. +* `platform` - Platform's name. This is a string. + +## Platform-level configuration + +In case of **platform's flow definition**, a `values` dictionary can be defined +globally and the values defined there will be passed to every module's config. + +Those values can be overriden per-module through `module_options` dictionary. + +Parameters used during module's contruction can also be defined in `module_options` +as `params` (those are not a part of _module configuration_, instead they are used +during the actual construction of a module instance, before it declares any of its +input/outputs etc.) + +Defining dictionaries for `takes` and `produces` is disallowed within +**platform's flow definition**. + +For a detailed look on the concepts described here, please have a look at +`sfbuild/platforms/xc7a50t` + +## Project-level configuration + +Similarly to **platform's flow definition**, `values` dict can be provided. +The values provided there will overwrite the values from +**platform's flow definition** in case of a collision. + +Unlike **platform's flow definition**, **project's flow configuration** may contain +`dependencies` dict. This dictionary would be used to map saymbolic dependency +names to actual paths. Most dependencies can have their paths resolved implicitly +without the need to provide explicit paths, which is a mechanism that is described +in a later section of this document. However some dependencies must be provided +explicitelly, eg. paths to project's verilog source files. It should be noted that +depending on the flow definition and the dependency in question, the path does not +necessarily have to point to an already existing file. If the dependency is a +product of a module within the flow, the path assigned to it will be used +by the module to build that dependency. This is also used to in case of _on-demand_ +dependencies, which won't be produced unless the user explicitelly provides a path +for them. + +**project's flow configuration** cannot specify `params` for modules and does not +use `module_options` dictionary. + +Any entry with a key other than `dependencies` or `values` is treated as a +platform name. Thise entries are necessaery to enable support for a given platform. +Each of those entries may contain `dependencies`, `values` fields which will +overload the `dependecies` and `values` defined in a global scope of +**project's flow configuration**. Any other field under those platform entries +is treated as a _stage-specific-configuration_. The key is a name of a stage within +a flow for the specified platform and the values are dicts which may contain +`dependencies` and `values` fields that overload `dependencies` and `values` +repespectively, locally for the stage. + +## Internal environmental variables + +It's very usefule to be able to refer to some data within +**platform's flow definition** and **project's flow configuration** to +either avoid redundant definitions or to access results of certain operations. +_**sfbuild**_ allows doing that by using a special syntax for accessing internal +environmental variables. + +The syntax is `${variable_name}`. Any string value within +**platform's flow definition** and **project's flow configuration** that contains +such patterns will have them replaced with the values of the variables referenced +if those values are strings. Eg.: + +With the following values defined: + +```json +{ + "a_value": "1234", + "another_value": "a_value: ${a_value}" +} +``` + +`another_value` will resolve to: + +```json +"a_value: 1234" +``` + +If the value is a list however, the result would be a list with all entries being +the original string with the reference to a variable replaced by following +items of the original list. Eg.: + +With the following values defined +```json +{ + "list_of_values": ["a", "b", "c"], + "some_string": "item: ${list_of_values}" +} +``` + +`some_string` will resolve to + +```json +["item: a", "item: b", "item: c"] +``` + +Be careful when using this kind of resolution, as it's computational and memory +complexity grows exponentially in ragards to the number of list variables being +referenced, which is a rather obvious fact, but it's still worth mentioning. + +The variables that can be referenced within a definition/configuration fall into 3 +categories: + +* **value references** - anything declared as a `value` can be accessed by it's + name +* **dependency references** - any dependency path can be referenced using the name + of the dependency prefaced with a ':' prefix. Eg.: `${:eblif}` will resolve + to the path of `eblif` dependency. Make sure that the dependency can be + actually resolved when you are using this kind of reference. For example + you can't use the a reference to `eblif` dependency in a module which does not + rely on it. An exception is the producer module which can in fact reference it's + own outputs but these references cannot be used during the _mapping_ stage + (more on that later). +* **built-in references** - there are a couple of built-in variables which are very + handy: + * `shareDir` - path to symbiflow's _share_ directory. + * `prjxray_db` - Project X-Ray database path. + * `python3` - path to Python 3 interpreter. + * `noisyWarnings` - (this one should probably get removed) + +## `Module` class + +Each nmodule is represented as a class derived from `Module` class. + +The class should implement the following methods: + +* `execute(self, ctx: ModuleContext)` - executes the module in _exec_ mode +* `map_io(self, ctx: ModuleContext) -> 'dict[str, ]'` - executes the module in + _mapping_ mode +* `__init__(self, params: 'dict[str, ]')` - initializer. The `params` + is a dict with optional parameter for the module. + +Each module script should expose the class by defining it's name/type alias as +`ModuleClass`. sfbuild tries to access a `ModuleClass` attribute within a package +when instantiating a module. + +## Module's execution modes + +A module ahas essentially two execution modes: + +* _mapping_ mode +* _exec_ mode + +### _mapping_ mode + +In _mapping_ mode the module is provided with an incomplete configuration which +includes: + * `takes` namespace: this maps names of input dependecies to the paths of these + dependencies + * `values` namespace: this maps names of variables to the values of those + variables. + +The module has to provide a dictionary that will provide every output dependency +that's not _on-demand_ a default path. This is basically a promise that when +executed in _exec_ mode, the module will produce files for this paths. +Typically such paths would be derived from a path of one of it's input dependencies. +This mechanism allows the user to avoid specifying an explicit path for each +intermediate target. + +It should be noted that variables referring to the output dependencies +can't be accessed at this stage for the obvious reason as their values are yet +to be evaluated. + +### _exec_ mode + +In _exec_ mode the module does the actual work. + +The configuration passed into this mode is full and it includes: + +* `takes` namespace: this maps names of input dependecies to the paths of these + dependencies +* `values` namespace: this maps names of variables to the values of those + variables. +* `produces` namespace: this maps names of output dependencies to explicit paths. + This should not be used directly really, but it's useful for + `ModuleContext.is_output_explicit` method. +* `outputs` namespace: this maps names of output dependencies to their paths. + +When the module finishes executing in _exec_ mode, all of the dependencies +described in `outputs` should be present. + +## Module initialization/instantiation + +In the the `__init__` method of module's class, the following fields should be +set: + +* `takes` - a list of symbolic dependency names for dependencies used byb the module +* `produces` - a list of symbolic dependencies names for dependencies produced + by the module. +* `values` - a list of names given to the variables used withing the module +* `prod_meta` - A dictionary which maps product names to descriptions of these + products. + +### Qualifiers/decorators + +By default the presence of all the dependencies and values is mandatory +(In case of `produces` that means that the module always has to produce the listed +dependencies). This can be changed by "decorating" a name in one of the following +ways: + +* '`?`' _suffix_ + * In `takes` - the dependency is not necessary for the module to execute + * In `produces` - the dependency may be produceed, but it is not guaranteed. + * In `values` the value is not required for the module to execute. + Refferreing to it through `ModuleContext.values.value_name` won't raise an + exception if the value is not present, instead `None` will be returned. +* '`!`' _suffix_ + * In `produces` - the dependency is going to be produced only if the user + provides an explicit path for it. + +Currently it's impossible to combine both '`!`' and '`?`' together. This limitation +does not have any reason behind it other than the way the qualifier system +is implemented at the moment. It might be removed in the future. diff --git a/sfbuild/docs/browse_pydoc.sh b/sfbuild/docs/browse_pydoc.sh new file mode 100755 index 000000000..d3cae9fb4 --- /dev/null +++ b/sfbuild/docs/browse_pydoc.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +MY_DIR=`dirname $0` +SFBUILD_DIR=${MY_DIR}/.. +SFBUILD_PY=${SFBUILD_DIR}/sfbuild.py + +PYTHONPATH=${SFBUILD_DIR} pydoc -b \ No newline at end of file diff --git a/sfbuild/docs/common targets and variables.md b/sfbuild/docs/common targets and variables.md new file mode 100644 index 000000000..52d68af70 --- /dev/null +++ b/sfbuild/docs/common targets and variables.md @@ -0,0 +1,44 @@ +# sfbuild's common targets and values + +Targets and values are named with some conventions. +Below are lists of the target and value names along with their meanings" + +### Common targets that need to be provided by the user: + +| Target name | list | Description | +|-------------|:----:|-------------| +| `sources` | yes | Verilog sources | +| `sdc` | no | Synopsys Design Constraints | +| `xdc` | yes | Xilinx Design Constraints (available only for Xilinx platforms) | +| `pcf` | no | Physical Constraints File | + +### Commonly requested targets (available in most flows): + +| Target name | list | Description | +|-------------|:----:|-------------| +| `eblif` | no | Extended blif file | +| `bitstream` | no | Bitstream | +| `net` | no | Netlist | +| `fasm` | no | Final FPGA Assembly | +| `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis | +| `build_dir` | no | A directory to put the output files in | + +### Built-in values + +| Value name | type | Description | +|------------|------|-------------| +| `shareDir` | `string` | Path to symbiflow's installation "share" directory | +| `python3` | `string` | Path to Python 3 executable | +| `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) | +| `prjxray_db` | `string` | Path to Project X-Ray database | + +### Values commonly used in flow definitions: + +| Value name | type | Description | +|------------|------|-------------| +| `top` | `string` | Top module name | +| `build_dir` | `string` | Path to build directory (should be optional) | +| `device` | `string` | Name of the device | +| `vpr_options` | `dict[string -> string \| number]` | Named ptions passed to VPR. No `--` prefix included. | +| `part_name` | `string` | Name of the chip used. The distinction between `device` and `part_name` is ambiguous at the moment and should be addressed in the future. | +| `arch_def` | `string` | Path to an XML file containing architecture definition. | diff --git a/sfbuild/docs/modules/common/generic_script_wrapper.md b/sfbuild/docs/modules/common/generic_script_wrapper.md new file mode 100644 index 000000000..c6fa7c0c4 --- /dev/null +++ b/sfbuild/docs/modules/common/generic_script_wrapper.md @@ -0,0 +1,47 @@ +# sfbuild module "generic_script_wrapper" + +##### _Category: Common_ + +------------------------------- + +This module provides a way to integrate an external command into an sfbuild flow. +Its inputs and outputs are fully defined by the author of flow definition. + +## Setup + +### 1. Parameters + +Parameters are everything when it comes to this module: + +* `stage_name` (string, optional): Name describing the stage +* `script` (string, mandatory): Path to the script to be executed +* `interpreter` (string, optional): Interpreter for the script +* `cwd` (string, optional): Current Working Directory for the script +* `outputs` (dict[string -> dict[string -> string]], + mandatory): + A dict with output descriptions (dicts). Keys name output dependencies. + * `mode` (string, mandatory): "file" or "stdout". Describes how the output is + grabbed from the script. + * `file` (string, required if `mode` is "file"): Name of the file generated by the + script. + * `target` (string, required): Default name of the file of the generated + dependency. You can use all values available durng map_io stage. Each input + dependency alsogets two extra values associated with it: + `:dependency_name[noext]`, which contains the path to the dependency the + extension with anything after last "." removed and `:dependency_name[dir]` which + contains directory paths of the dependency. This is useful for deriving an output + name from the input. + * `meta` (string, optional): Description of the output dependency. +* `inputs` (dict[string -> string | bool], mandatory): + A dict with input descriptions. Key is can be a name of a named argument, a + position of unnamed argument, when prefaced with "#" (eg. "#1"), or a name of an + environmental variable, when prefaced with "$". Positions are indexed + from 1, as it's a convention that 0th argument is the path of the executed program. + Values are strings that can contain references to variables to be resolved + after the project flow configuration is loaded (that means they can reference + values and dependencies which are to be set by the user). All of modules inputs + will be determined by the references used. Thus dependency and value definitions + are implicit. If the value of the resolved string is empty and is associated with a + named argument, the argument in question will be skipped entirely. This allows + using optional dependencies. To use a named argument as a flag instead, set it to + `true`. \ No newline at end of file diff --git a/sfbuild/docs/modules/common/io_rename.md b/sfbuild/docs/modules/common/io_rename.md new file mode 100644 index 000000000..7c29ff5ff --- /dev/null +++ b/sfbuild/docs/modules/common/io_rename.md @@ -0,0 +1,27 @@ +# sfbuild module "io_rename" + +##### _Category: Common_ + +------------------------------- + +This module provides a way to rename (ie. change) dependencies and values of an +instance of a different module. It wraps another, module whoose name is specified in `params.module` and changes the names of the dependencies and values it relies on. + +## Setup + +### 1. Parameters + +* `module` (string, required) - name of the wrapped module +* `params` (dict[string -> any], optional): parameters passed to the wrapped + module instance. +* `rename_takes` (dict[string -> string]) - mapping for inputs ("takes") +* `rename_produces` (dict[string -> string]) - mapping for outputs ("products") +* `rename_values` (dict[string -> string]) - mapping for values + +In the three mapping dicts, keys represent the names visible to the wrapped module +and values represent the names visible to the modules outside. +Not specifying a mapping for a given entry will leave it with its original name. + +### 2. Values + +All values specified for this modules will be accessible by tyhe wrapped module. \ No newline at end of file diff --git a/sfbuild/docs/modules/common/mkdirs.md b/sfbuild/docs/modules/common/mkdirs.md new file mode 100644 index 000000000..160bdc75b --- /dev/null +++ b/sfbuild/docs/modules/common/mkdirs.md @@ -0,0 +1,13 @@ +# sfbuild module "io_rename" + +##### _Category: Common_ + +------------------------------- + +This modules creates directiories specified by the author of flow definition +as its targets.. + +### Parameters + +Each key serves as aname of a directory to becreated, while the value is the +path for that directory. \ No newline at end of file diff --git a/sfbuild/docs/modules/common/synth.md b/sfbuild/docs/modules/common/synth.md new file mode 100644 index 000000000..394f26e6a --- /dev/null +++ b/sfbuild/docs/modules/common/synth.md @@ -0,0 +1,47 @@ +# sfbuild module "synth" + +##### _Category: Common_ + +------------------------------- + +The _synth_ module is meant to be used to execute YOSYS synthesis. + +The module should guarantee the following outputs: + * `eblif` + * `fasm_extra` (can be empty) + * `json` + * `synth_json` + * `synth_log` (on demand) + +For detailed information about these targets, please refer to +`docs/common targets and variables.md` + +## Setup + +What files and how are they generated is dependendent on TCL scripts executed +withing YOSYS and the script vary depending on the target platform. Due to this +design choice it is required for the author of the flow defnition to parametrize +the `synth` module in a way that will **GUARANTEE** the targets mentioned above +will be generated upon a successful YOSYS run. + +The setup of the synth module follows the following specifications: + +### 1. Module parameters: + +The `params` section of a stage configuration may contain a `produces` list. +The list should specify additional targets that will be generated +(`?` qualifier is allowedd). + +### 2. Values: + +The `synth` module requires the following values: + +* `tcl_scripts` (string, required ): A path to a directory containing `synth.tcl` + and `conv.tcl` scripts that wiull be used by YOSYS. +* `read_verilog_args` (list[string | number], optional) - If specified, the verilog + sources will be read using the `read_verilog` procedure with options contained in + this value. +* `yosys_tcl_env` (dict[string -> string | list[string], required) - A mapping that + defines environmental variables that will be used within the TCL scripts. This + should contain the references to module's inputs and outputs in order to gurantee + the generation of the desired targets. diff --git a/sfbuild/part_db/parts.json b/sfbuild/part_db/parts.json new file mode 100644 index 000000000..e76ecfffd --- /dev/null +++ b/sfbuild/part_db/parts.json @@ -0,0 +1,193 @@ +{ + "XC7A50TCSG324-1": "xc7a50t", + "XC7A50TCSG324-2": "xc7a50t", + "XC7A50TCSG324-2L": "xc7a50t", + "XC7A50TCSG324-3": "xc7a50t", + "XC7A50TCPG236-1": "xc7a50t", + "XC7A50TCPG236-2": "xc7a50t", + "XC7A50TCPG236-2L": "xc7a50t", + "XC7A50TCPG236-3": "xc7a50t", + "XC7A50TCSG325-1": "xc7a50t", + "XC7A50TCSG325-2": "xc7a50t", + "XC7A50TCSG325-2L": "xc7a50t", + "XC7A50TCSG325-3": "xc7a50t", + "XC7A50TFGG484-1": "xc7a50t", + "XC7A50TFGG484-2": "xc7a50t", + "XC7A50TFGG484-2L": "xc7a50t", + "XC7A50TFGG484-3": "xc7a50t", + "XC7A50TFTG256-1": "xc7a50t", + "XC7A50TFTG256-2": "xc7a50t", + "XC7A50TFTG256-2L": "xc7a50t", + "XC7A50TFTG256-3": "xc7a50t", + "XC7A35TIFTG256-1L": "xc7a50t", + "XC7A35TIFGG484-1L": "xc7a50t", + "XC7A35TICSG325-1L": "xc7a50t", + "XC7A35TICSG324-1L": "xc7a50t", + "XC7A35TICPG236-1L": "xc7a50t", + "XC7A50TICPG236-1L": "xc7a50t", + "XC7A50TIFTG256-1L": "xc7a50t", + "XC7A50TIFGG484-1L": "xc7a50t", + "XC7A50TICSG325-1L": "xc7a50t", + "XC7A50TICSG324-1L": "xc7a50t", + "XC7A35TFTG256-1": "xc7a50t", + "XC7A35TFTG256-2": "xc7a50t", + "XC7A35TFTG256-2L": "xc7a50t", + "XC7A35TFTG256-3": "xc7a50t", + "XC7A35TFGG484-1": "xc7a50t", + "XC7A35TFGG484-2": "xc7a50t", + "XC7A35TFGG484-2L": "xc7a50t", + "XC7A35TFGG484-3": "xc7a50t", + "XC7A35TCSG325-1": "xc7a50t", + "XC7A35TCSG325-2": "xc7a50t", + "XC7A35TCSG325-2L": "xc7a50t", + "XC7A35TCSG325-3": "xc7a50t", + "XC7A35TCSG324-1": "xc7a50t", + "XC7A35TCSG324-2": "xc7a50t", + "XC7A35TCSG324-2L": "xc7a50t", + "XC7A35TCSG324-3": "xc7a50t", + "XC7A35TCPG236-1": "xc7a50t", + "XC7A35TCPG236-2": "xc7a50t", + "XC7A35TCPG236-2L": "xc7a50t", + "XC7A35TCPG236-3": "xc7a50t", + "XC7A100TIFTG256-1L": "xc7a100ti", + "XC7A100TIFGG676-1L": "xc7a100ti", + "XC7A100TIFGG484-1L": "xc7a100ti", + "XC7A100TICSG324-1L": "xc7a100ti", + "XC7A100TFTG256-1": "xc7a100t", + "XC7A100TFTG256-2": "xc7a100t", + "XC7A100TFTG256-2L": "xc7a100t", + "XC7A100TFTG256-3": "xc7a100t", + "XC7A100TFGG676-1": "xc7a100t", + "XC7A100TFGG676-2": "xc7a100t", + "XC7A100TFGG676-2L": "xc7a100t", + "XC7A100TFGG676-3": "xc7a100t", + "XC7A100TFGG484-1": "xc7a100t", + "XC7A100TFGG484-2": "xc7a100t", + "XC7A100TFGG484-2L": "xc7a100t", + "XC7A100TFGG484-3": "xc7a100t", + "XC7A100TCSG324-1": "xc7a100t", + "XC7A100TCSG324-2": "xc7a100t", + "XC7A100TCSG324-2L": "xc7a100t", + "XC7A100TCSG324-3": "xc7a100t", + "XC7A200TFBG484-1": "xc7a200t", + "XC7A200TFBG484-2": "xc7a200t", + "XC7A200TFBG484-2L": "xc7a200t", + "XC7A200TFBG484-3": "xc7a200t", + "XC7A200TFBG676-1": "xc7a200t", + "XC7A200TFBG676-2": "xc7a200t", + "XC7A200TFBG676-2L": "xc7a200t", + "XC7A200TFBG676-3": "xc7a200t", + "XC7A200TFBV484-1": "xc7a200t", + "XC7A200TFBV484-2": "xc7a200t", + "XC7A200TFBV484-2L": "xc7a200t", + "XC7A200TFBV484-3": "xc7a200t", + "XC7A200TFBV676-1": "xc7a200t", + "XC7A200TFBV676-2": "xc7a200t", + "XC7A200TFBV676-2L": "xc7a200t", + "XC7A200TFBV676-3": "xc7a200t", + "XC7A200TFFG1156-1": "xc7a200t", + "XC7A200TFFG1156-2": "xc7a200t", + "XC7A200TFFG1156-2L": "xc7a200t", + "XC7A200TFFG1156-3": "xc7a200t", + "XC7A200TSBV484-1": "xc7a200t", + "XC7A200TSBV484-2": "xc7a200t", + "XC7A200TSBV484-2L": "xc7a200t", + "XC7A200TSBV484-3": "xc7a200t", + "XC7A200TFFV1156-1": "xc7a200t", + "XC7A200TFFV1156-2": "xc7a200t", + "XC7A200TFFV1156-2L": "xc7a200t", + "XC7A200TFFV1156-3": "xc7a200t", + "XC7A200TSBG484-1": "xc7a200t", + "XC7A200TSBG484-2": "xc7a200t", + "XC7A200TSBG484-2L": "xc7a200t", + "XC7A200TSBG484-3": "xc7a200t", + "XC7A200TISBV484-1L": "xc7a200t", + "XC7A200TISBG484-1L": "xc7a200t", + "XC7A200TIFFV1156-1L": "xc7a200t", + "XC7A200TIFFG1156-1L": "xc7a200t", + "XC7A200TIFBV676-1L": "xc7a200t", + "XC7A200TIFBV484-1L": "xc7a200t", + "XC7A200TIFBG676-1L": "xc7a200t", + "XC7A200TIFBG484-1L": "xc7a200t", + "XC7A12TLCSG325-2L": "xc7a12tl", + "XC7A12TLCPG238-2L": "xc7a12tl", + "XC7A25TLCSG325-2L": "xc7a25tl", + "XC7A25TLCPG238-2L": "xc7a25tl", + "XC7A35TLCPG236-2L": "xc7a35tl", + "XC7A35TLCSG324-2L": "xc7a35tl", + "XC7A35TLCSG325-2L": "xc7a35tl", + "XC7A35TLFGG484-2L": "xc7a35tl", + "XC7A35TLFTG256-2L": "xc7a35tl", + "XC7A15TLCPG236-2L": "xc7a15tl", + "XC7A15TLCSG324-2L": "xc7a15tl", + "XC7A15TLCSG325-2L": "xc7a15tl", + "XC7A15TLFGG484-2L": "xc7a15tl", + "XC7A15TLFTG256-2L": "xc7a15tl", + "XC7A50TLCPG236-2L": "xc7a50tl", + "XC7A50TLCSG324-2L": "xc7a50tl", + "XC7A50TLCSG325-2L": "xc7a50tl", + "XC7A50TLFGG484-2L": "xc7a50tl", + "XC7A50TLFTG256-2L": "xc7a50tl", + "XC7A75TLFTG256-2L": "xc7a75tl", + "XC7A75TLFGG676-2L": "xc7a75tl", + "XC7A75TLFGG484-2L": "xc7a75tl", + "XC7A75TLCSG324-2L": "xc7a75tl", + "XC7A100TLCSG324-2L": "xc7a100tl", + "XC7A100TLFGG484-2L": "xc7a100tl", + "XC7A100TLFGG676-2L": "xc7a100tl", + "XC7A100TLFTG256-2L": "xc7a100tl", + "XC7A200TLFBG484-2L": "xc7a200tl", + "XC7A200TLFBG676-2L": "xc7a200tl", + "XC7A200TLFBV484-2L": "xc7a200tl", + "XC7A200TLFBV676-2L": "xc7a200tl", + "XC7A200TLFFG1156-2L": "xc7a200tl", + "XC7A200TLFFV1156-2L": "xc7a200tl", + "XC7A200TLSBG484-2L": "xc7a200tl", + "XC7A200TLSBV484-2L": "xc7a200tl", + "XA7A35TCSG325-1I": "xa7a50t", + "XA7A35TCSG325-1Q": "xa7a50t", + "XA7A35TCSG325-2I": "xa7a50t", + "XA7A35TCSG324-1I": "xa7a50t", + "XA7A35TCSG324-1Q": "xa7a50t", + "XA7A35TCSG324-2I": "xa7a50t", + "XA7A35TCPG236-1I": "xa7a50t", + "XA7A35TCPG236-1Q": "xa7a50t", + "XA7A35TCPG236-2I": "xa7a50t", + "XA7A15TCPG236-1I": "xa7a15t", + "XA7A15TCPG236-1Q": "xa7a15t", + "XA7A15TCPG236-2I": "xa7a15t", + "XA7A15TCSG324-1I": "xa7a15t", + "XA7A15TCSG324-1Q": "xa7a15t", + "XA7A15TCSG324-2I": "xa7a15t", + "XA7A15TCSG325-1I": "xa7a15t", + "XA7A15TCSG325-1Q": "xa7a15t", + "XA7A15TCSG325-2I": "xa7a15t", + "XA7A50TCPG236-1I": "xa7a50t", + "XA7A50TCPG236-1Q": "xa7a50t", + "XA7A50TCPG236-2I": "xa7a50t", + "XA7A50TCSG324-1I": "xa7a50t", + "XA7A50TCSG324-1Q": "xa7a50t", + "XA7A50TCSG324-2I": "xa7a50t", + "XA7A50TCSG325-1I": "xa7a50t", + "XA7A50TCSG325-1Q": "xa7a50t", + "XA7A50TCSG325-2I": "xa7a50t", + "XA7A100TFGG484-1I": "xa7a100t", + "XA7A100TFGG484-1Q": "xa7a100t", + "XA7A100TFGG484-2I": "xa7a100t", + "XA7A100TCSG324-1I": "xa7a100t", + "XA7A100TCSG324-1Q": "xa7a100t", + "XA7A100TCSG324-2I": "xa7a100t", + + "EOS3FF512-PDN64": "ql-eos-s3", + "EOS3FF512-WRN42": "ql-eos-s3", + "EOS3FLF512-PDN64": "ql-eos-s3", + "EOS3FLF512-WRN42": "ql-eos-s3", + "EOS3CF512-PDN64": "ql-eos-s3", + "EOS3CF512-WRN42": "ql-eos-s3", + "EOS3CLF512-PDN64": "ql-eos-s3", + "EOS3CLF512-WRN42": "ql-eos-s3", + + "K4N8": "ql-k4n8_slow", + "K4N8_SLOW": "ql-k4n8_slow", + "K4N8_FAST": "ql-k4n8_fast" +} \ No newline at end of file diff --git a/sfbuild/platforms/ql-eos-s3.json b/sfbuild/platforms/ql-eos-s3.json new file mode 100644 index 000000000..1cc606767 --- /dev/null +++ b/sfbuild/platforms/ql-eos-s3.json @@ -0,0 +1,121 @@ +{ + "stages": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:generic_script_wrapper", + "place": "common:place", + "route": "common:route", + "fasm": "common:fasm", + "bitstream": "common:generic_script_wrapper" + }, + + "values": { + "part_name": "pd64", + "device": "ql-eos-s3", + "device_alt": "ql-eos-s3_wlcsp", + "pinmap": "${shareDir}/arch/ql-eos-s3_wlcsp/pinmap_PD64.csv", + "arch_def": "${shareDir}/arch/ql-eos-s3_wlcsp/arch.timing.xml", + "rr_graph_lookahead_bin": "${shareDir}/arch/ql-eos-s3_wlcsp/rr_graph_ql-eos-s3_wlcsp.lookahead.bin", + "rr_graph_real_bin": "${shareDir}/arch/ql-eos-s3_wlcsp/rr_graph_ql-eos-s3_wlcsp.rr_graph.real.bin", + "vpr_place_delay": "${shareDir}/arch/ql-eos-s3_wlcsp/rr_graph_ql-eos-s3_wlcsp.place_delay.bin", + "vpr_grid_layout_name": "ql-eos-s3", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 100, + "clock_modeling": "route", + "place_delay_model": "delta_override", + "router_lookahead": "extended_map", + "check_route": "quick", + "strict_checks": "off", + "allow_dangling_combinational_nodes": "on", + "disable_errors": "check_unbuffered_edges:check_route", + "congested_routing_iteration_threshold": "0.8", + "incremental_reroute_delay_ripup": "off", + "base_cost_type": "delay_normalized_length_bounded", + "bb_factor": "10", + "initial_pres_fac": "4.0", + "check_rr_graph": "off", + "pack_high_fanout_threshold": "PB-lOGIC:18", + "suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment " + } + }, + + "stage_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${device}" + } + }, + "synth": { + "params": { + "takes": [ "pcf?" ], + "produces": [ "synth_v" ] + }, + "values": { + "tcl_scripts": "${shareDir}/scripts/pp3", + "read_verilog_args": [], + "yosys_tcl_env": { + "OUT_JSON": "${:json}", + "OUT_SYNTH_V": "${:synth_v}", + "OUT_EBLIF": "${:eblif}", + "OUT_FASM_EXTRA": "${:fasm_extra}", + "TECHMAP_PATH": "${shareDir}/techmaps/pp3", + "DEVICE_CELLS_SIM": "${shareDir}/arch/ql-eos-s3_wlcsp/cells/ram_sim.v", + "DEVICE_CELLS_MAP": "${shareDir}/arch/ql-eos-s3_wlcsp/cells/ram_map.v", + "PINMAP_FILE": "${shareDir}/arch/ql-eos-s3_wlcsp/pinmap_PD64.csv", + "PCF_FILE": "${:pcf}" + } + } + }, + "ioplace": { + "params": { + "stage_name": "ioplace", + "interpreter": "${python3}", + "script": "${binDir}/python/ql_pp3_create_ioplace.py", + "outputs": { + "io_place": { + "mode": "stdout", + "target": "${:eblif[noext]}.ioplace" + } + }, + "inputs": { + "blif": "${:eblif}", + "net": "${:net}", + "pcf": "${:pcf}", + "map": "${shareDir}/arch/ql-eos-s3_wlcsp/pinmap_PD64.csv", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "bitstream": { + "params": { + "stage_name": "bitstream", + "script": "qlfasm", + "outputs": { + "bitstream": { + "mode": "file", + "file": "bitstream-${device}.bit", + "target": "${build_dir?}/bitstream-${device}.bit" + }, + "bitstream_log": { + "mode": "stdout", + "target": "${build_dir?}/bitstream-${device}.log" + } + }, + "inputs": { + "#1": "${:fasm}", + "#2": "bitstream-${device}.bit", + "dev-type": "ql-eos-s3", + "db-root": "${shareDir}/fasm_database/pp3" + } + }, + "values": { + "build_dir?": "." + } + } + } +} \ No newline at end of file diff --git a/sfbuild/platforms/ql-k4n8_fast.json b/sfbuild/platforms/ql-k4n8_fast.json new file mode 100644 index 000000000..52b3dd59f --- /dev/null +++ b/sfbuild/platforms/ql-k4n8_fast.json @@ -0,0 +1,167 @@ +{ + "stages": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:generic_script_wrapper", + "place": "common:place", + "repack": "common:generic_script_wrapper", + "route": "common:io_rename", + "fasm": "common:io_rename", + "bitstream": "common:generic_script_wrapper" + }, + + "values": { + "part_name": "k4n8", + "device": "qlf_k4n8_umc22", + "rr_graph_lookahead_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/rr_graph_qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast.lookahead.bin", + "rr_graph_real_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/qlf_k4n8-qlf_k4n8_umc22_fast.rr_graph.bin", + "vpr_place_delay": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/rr_graph_qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast.place_delay.bin", + "vpr_grid_layout_name": "qlf_k4n8-qlf_k4n8_umc22_fast", + "arch_def": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/arch_qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast.xml", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 100, + "clock_modeling": "ideal", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta_override", + "router_lookahead": "extended_map", + "allow_dangling_combinational_nodes": "on", + "absorb_buffer_luts": "off" + } + }, + + "stage_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${device}" + } + }, + "synth": { + "params": { + "produces": [ "synth_v" ] + }, + "values": { + "tcl_scripts": "${shareDir}/scripts/qlf_k4n8", + "read_verilog_args": [], + "yosys_tcl_env": { + "TOP": "${top}", + "OUT_JSON": "${:json}", + "TECHMAP_PATH": "${shareDir}/techmaps/qlf_k4n8", + "OUT_SYNTH_V": "${:synth_v}", + "OUT_EBLIF": "${:eblif}", + "PYTHON3": "${python3}" + } + } + }, + "ioplace": { + "params": { + "stage_name": "ioplace", + "interpreter": "${python3}", + "script": "${binDir}/python/ql_qlf_create_ioplace.py", + "outputs": { + "io_place": { + "mode": "stdout", + "target": "${:eblif[noext]}.ioplace" + } + }, + "inputs": { + "blif": "${:eblif}", + "net": "${:net}", + "pcf": "${:pcf}", + "pinmap_xml": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/pinmap_qlf_k4n8_umc22.xml", + "csv_file": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/pinmap_qlf_k4n8_umc22.csv", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "repack": { + "values": { + "repacking_rules": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_fast_qlf_k4n8-qlf_k4n8_umc22_fast/qlf_k4n8-qlf_k4n8_umc22_fast.repacking_rules.json" + }, + "params": { + "stage_name": "repack", + "interpreter": "${python3}", + "script": "${binDir}/python/repacker/repack.py", + "outputs": { + "eblif_repacked": { + "mode": "file", + "file": "${:eblif[noext]}_repacked.eblif", + "target": "${:eblif[noext]}_repacked.eblif" + }, + "place_repacked": { + "mode": "file", + "file": "${:place[noext]}_repacked.place", + "target": "${:place[noext]}_repacked.place" + }, + "net_repacked": { + "mode": "file", + "file": "${:net[noext]}_repacked.net", + "target": "${:net[noext]}_repacked.net" + }, + "repack_log": { + "mode": "stdout", + "target": "${top}.repack.log" + } + }, + "inputs": { + "eblif-in": "${:eblif}", + "net-in": "${:net}", + "place-in": "${:place}", + "eblif-out": "${:eblif[noext]}_repacked.eblif", + "place-out": "${:place[noext]}_repacked.place", + "net-out": "${:net[noext]}_repacked.net", + "absorb_buffer_luts": "on", + "vpr-arch": "${arch_def}", + "repacking-rules": "${repacking_rules}", + "json-constraints": "${json_constraints?}", + "pcf-constraints": "${pcf?}", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "route": { + "params": { + "module": "common:route", + "rename_takes": { + "eblif": "eblif_repacked", + "place": "place_repacked", + "net": "net_repacked" + } + } + }, + "fasm": { + "params": { + "module": "common:fasm", + "rename_takes": { + "eblif": "eblif_repacked", + "place": "place_repacked", + "net": "net_repacked" + } + } + }, + "bitstream": { + "params": { + "stage_name": "bitstream", + "script": "qlf_fasm", + "outputs": { + "bitstream": { + "mode": "file", + "file": "${:fasm[noext]}.bit", + "target": "${:fasm[noext]}.bit" + } + }, + "inputs": { + "#1": "${:fasm}", + "#2": "${:fasm[noext]}.bit", + "db-root": "${shareDir}/fasm_database/qlf_k4n8", + "format": "4byte", + "assemble": true + } + } + } + } +} \ No newline at end of file diff --git a/sfbuild/platforms/ql-k4n8_slow.json b/sfbuild/platforms/ql-k4n8_slow.json new file mode 100644 index 000000000..ef10a9142 --- /dev/null +++ b/sfbuild/platforms/ql-k4n8_slow.json @@ -0,0 +1,167 @@ +{ + "stages": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:generic_script_wrapper", + "place": "common:place", + "repack": "common:generic_script_wrapper", + "route": "common:io_rename", + "fasm": "common:io_rename", + "bitstream": "common:generic_script_wrapper" + }, + + "values": { + "part_name": "k4n8", + "device": "qlf_k4n8_umc22", + "rr_graph_lookahead_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/rr_graph_qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow.lookahead.bin", + "rr_graph_real_bin": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/qlf_k4n8-qlf_k4n8_umc22_slow.rr_graph.bin", + "vpr_place_delay": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/rr_graph_qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow.place_delay.bin", + "vpr_grid_layout_name": "qlf_k4n8-qlf_k4n8_umc22_slow", + "arch_def": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/arch_qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow.xml", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 100, + "clock_modeling": "ideal", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta_override", + "router_lookahead": "extended_map", + "allow_dangling_combinational_nodes": "on", + "absorb_buffer_luts": "off" + } + }, + + "stage_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${device}" + } + }, + "synth": { + "params": { + "produces": [ "synth_v" ] + }, + "values": { + "tcl_scripts": "${shareDir}/scripts/qlf_k4n8", + "read_verilog_args": [], + "yosys_tcl_env": { + "TOP": "${top}", + "OUT_JSON": "${:json}", + "TECHMAP_PATH": "${shareDir}/techmaps/qlf_k4n8", + "OUT_SYNTH_V": "${:synth_v}", + "OUT_EBLIF": "${:eblif}", + "PYTHON3": "${python3}" + } + } + }, + "ioplace": { + "params": { + "stage_name": "ioplace", + "interpreter": "${python3}", + "script": "${binDir}/python/ql_qlf_create_ioplace.py", + "outputs": { + "io_place": { + "mode": "stdout", + "target": "${:eblif[noext]}.ioplace" + } + }, + "inputs": { + "blif": "${:eblif}", + "net": "${:net}", + "pcf": "${:pcf}", + "pinmap_xml": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/pinmap_qlf_k4n8_umc22.xml", + "csv_file": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/pinmap_qlf_k4n8_umc22.csv", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "repack": { + "values": { + "repacking_rules": "${shareDir}/arch/qlf_k4n8-qlf_k4n8_umc22_slow_qlf_k4n8-qlf_k4n8_umc22_slow/qlf_k4n8-qlf_k4n8_umc22_slow.repacking_rules.json" + }, + "params": { + "stage_name": "repack", + "interpreter": "${python3}", + "script": "${binDir}/python/repacker/repack.py", + "outputs": { + "eblif_repacked": { + "mode": "file", + "file": "${:eblif[noext]}_repacked.eblif", + "target": "${:eblif[noext]}_repacked.eblif" + }, + "place_repacked": { + "mode": "file", + "file": "${:place[noext]}_repacked.place", + "target": "${:place[noext]}_repacked.place" + }, + "net_repacked": { + "mode": "file", + "file": "${:net[noext]}_repacked.net", + "target": "${:net[noext]}_repacked.net" + }, + "repack_log": { + "mode": "stdout", + "target": "${top}.repack.log" + } + }, + "inputs": { + "eblif-in": "${:eblif}", + "net-in": "${:net}", + "place-in": "${:place}", + "eblif-out": "${:eblif[noext]}_repacked.eblif", + "place-out": "${:place[noext]}_repacked.place", + "net-out": "${:net[noext]}_repacked.net", + "absorb_buffer_luts": "on", + "vpr-arch": "${arch_def}", + "repacking-rules": "${repacking_rules}", + "json-constraints": "${json_constraints?}", + "pcf-constraints": "${pcf?}", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "route": { + "params": { + "module": "common:route", + "rename_takes": { + "eblif": "eblif_repacked", + "place": "place_repacked", + "net": "net_repacked" + } + } + }, + "fasm": { + "params": { + "module": "common:fasm", + "rename_takes": { + "eblif": "eblif_repacked", + "place": "place_repacked", + "net": "net_repacked" + } + } + }, + "bitstream": { + "params": { + "stage_name": "bitstream", + "script": "qlf_fasm", + "outputs": { + "bitstream": { + "mode": "file", + "file": "${:fasm[noext]}.bit", + "target": "${:fasm[noext]}.bit" + } + }, + "inputs": { + "#1": "${:fasm}", + "#2": "${:fasm[noext]}.bit", + "db-root": "${shareDir}/fasm_database/qlf_k4n8", + "format": "4byte", + "assemble": true + } + } + } + } +} \ No newline at end of file diff --git a/sfbuild/platforms/xc7a100t.json b/sfbuild/platforms/xc7a100t.json new file mode 100644 index 000000000..13f87e38c --- /dev/null +++ b/sfbuild/platforms/xc7a100t.json @@ -0,0 +1,155 @@ +{ + "values": { + "part_name": "xc7a100tcsg324-1", + "device": "xc7a100t_test", + "bitstream_device": "artix7", + "pinmap": "${shareDir}/arch/xc7a100t_test/vpr_grid_map.csv", + "arch_def": "${shareDir}/arch/xc7a100t_test/arch.timing.xml", + "rr_graph_lookahead_bin": "${shareDir}/arch/xc7a100t_test/rr_graph_xc7a100t_test.lookahead.bin", + "rr_graph_real_bin": "${shareDir}/arch/xc7a100t_test/rr_graph_xc7a100t_test.rr_graph.real.bin", + "vpr_place_delay": "${shareDir}/arch/xc7a100t_test/rr_graph_xc7a100t_test.place_delay.bin", + "vpr_grid_layout_name": "xc7a100t-test", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 500, + "router_heap": "bucket", + "clock_modeling": "route", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta", + "router_lookahead": "extended_map", + "check_route": "quick", + "strict_checks": "off", + "allow_dangling_combinational_nodes": "on", + "disable_errors": "check_unbuffered_edges:check_route", + "congested_routing_iteration_threshold": "0.8", + "incremental_reroute_delay_ripup": "off", + "base_cost_type": "delay_normalized_length_bounded", + "bb_factor": 10, + "acc_fac": "0.7", + "astar_fac": "1.8", + "initial_pres_fac": "2.828", + "pres_fac_mult": "1.2", + "check_rr_graph": "off", + "suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch" + } + }, + + "stages": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:generic_script_wrapper", + "place_constraints": "common:generic_script_wrapper", + "place": "common:place", + "route": "common:route", + "fasm": "common:fasm", + "bitstream": "common:generic_script_wrapper" + }, + + "stage_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${device}" + } + }, + "synth": { + "params": { + "takes": [ "xdc?" ], + "produces": [ + "sdc", + "synth_v" + ], + "prod_meta": { + "sdc": "Standard Design Constraints file for X7 series." + } + }, + "values": { + "tcl_scripts": "${shareDir}/scripts/xc7", + "yosys_tcl_env": { + "USE_ROI": "FALSE", + "TOP": "${top}", + "OUT_JSON": "${:json}", + "OUT_SDC": "${:sdc}", + "PART_JSON": "${prjxray_db}/${bitstream_device}/${part_name}/part.json", + "OUT_FASM_EXTRA": "${:fasm_extra}", + "TECHMAP_PATH": "${shareDir}/techmaps/xc7_vpr/techmap", + "OUT_SYNTH_V": "${:synth_v}", + "SYNTH_JSON": "${:synth_json}", + "OUT_EBLIF": "${:eblif}", + "PYTHON3": "${python3}", + "UTILS_PATH": "${shareDir}/scripts", + "INPUT_XDC_FILES": "${:xdc}" + } + } + }, + "ioplace": { + "params": { + "stage_name": "ioplace", + "interpreter": "${python3}", + "script": "${shareDir}/scripts/prjxray_create_ioplace.py", + "outputs": { + "io_place": { + "mode": "stdout", + "target": "${:net[noext]}.ioplace" + } + }, + "inputs": { + "blif": "${:eblif}", + "map": "${shareDir}/arch/${device}/${part_name}/pinmap.csv", + "net": "${:net}", + "pcf": "${:pcf?}", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "place_constraints": { + "params": { + "stage_name": "place_constraints", + "interpreter": "${python3}", + "script": "${shareDir}/scripts/prjxray_create_place_constraints.py", + "outputs": { + "place_constraints": { + "mode": "stdout", + "target": "${:net[noext]}.preplace" + } + }, + "inputs": { + "net": "${:net}", + "arch": "${shareDir}/arch/${device}/arch.timing.xml", + "blif": "${:eblif}", + "input": "${:io_place}", + "db_root": "${prjxray_db}", + "part": "${part_name}", + "vpr_grid_map": "${shareDir}/arch/${device}/vpr_grid_map.csv", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "bitstream": { + "params": { + "stage_name": "bitstream", + "script": "xcfasm", + "outputs": { + "bitstream": { + "mode": "file", + "file": "${:fasm[noext]}.bit", + "target": "${:fasm[noext]}.bit" + } + }, + "inputs": { + "db-root": "${prjxray_db}/${bitstream_device}", + "part": "${part_name}", + "part_file": "${prjxray_db}/${bitstream_device}/${part_name}/part.yaml", + "sparse": true, + "emit_pudc_b_pullup": true, + "fn_in": "${:fasm}", + "frm2bit": "xc7frames2bit", + "bit_out": "${:fasm[noext]}.bit" + } + } + } + } +} \ No newline at end of file diff --git a/sfbuild/platforms/xc7a200t.json b/sfbuild/platforms/xc7a200t.json new file mode 100644 index 000000000..0d446cc4c --- /dev/null +++ b/sfbuild/platforms/xc7a200t.json @@ -0,0 +1,155 @@ +{ + "values": { + "part_name": "xc7a200tsbg484-1", + "device": "xc7a200t_test", + "bitstream_device": "artix7", + "pinmap": "${shareDir}/arch/xc7a200t_test/vpr_grid_map.csv", + "arch_def": "${shareDir}/arch/xc7a200t_test/arch.timing.xml", + "rr_graph_lookahead_bin": "${shareDir}/arch/xc7a200t_test/rr_graph_xc7a200t_test.lookahead.bin", + "rr_graph_real_bin": "${shareDir}/arch/xc7a200t_test/rr_graph_xc7a200t_test.rr_graph.real.bin", + "vpr_place_delay": "${shareDir}/arch/xc7a200t_test/rr_graph_xc7a200t_test.place_delay.bin", + "vpr_grid_layout_name": "xc7a200t-test", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 500, + "router_heap": "bucket", + "clock_modeling": "route", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta", + "router_lookahead": "extended_map", + "check_route": "quick", + "strict_checks": "off", + "allow_dangling_combinational_nodes": "on", + "disable_errors": "check_unbuffered_edges:check_route", + "congested_routing_iteration_threshold": "0.8", + "incremental_reroute_delay_ripup": "off", + "base_cost_type": "delay_normalized_length_bounded", + "bb_factor": 10, + "acc_fac": "0.7", + "astar_fac": "1.8", + "initial_pres_fac": "2.828", + "pres_fac_mult": "1.2", + "check_rr_graph": "off", + "suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch" + } + }, + + "stages": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:generic_script_wrapper", + "place_constraints": "common:generic_script_wrapper", + "place": "common:place", + "route": "common:route", + "fasm": "common:fasm", + "bitstream": "common:generic_script_wrapper" + }, + + "stage_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${device}" + } + }, + "synth": { + "params": { + "takes": [ "xdc?" ], + "produces": [ + "sdc", + "synth_v" + ], + "prod_meta": { + "sdc": "Standard Design Constraints file for X7 series." + } + }, + "values": { + "tcl_scripts": "${shareDir}/scripts/xc7", + "yosys_tcl_env": { + "USE_ROI": "FALSE", + "TOP": "${top}", + "OUT_JSON": "${:json}", + "OUT_SDC": "${:sdc}", + "PART_JSON": "${prjxray_db}/${bitstream_device}/${part_name}/part.json", + "OUT_FASM_EXTRA": "${:fasm_extra}", + "TECHMAP_PATH": "${shareDir}/techmaps/xc7_vpr/techmap", + "OUT_SYNTH_V": "${:synth_v}", + "SYNTH_JSON": "${:synth_json}", + "OUT_EBLIF": "${:eblif}", + "PYTHON3": "${python3}", + "UTILS_PATH": "${shareDir}/scripts", + "INPUT_XDC_FILES": "${:xdc}" + } + } + }, + "ioplace": { + "params": { + "stage_name": "ioplace", + "interpreter": "${python3}", + "script": "${shareDir}/scripts/prjxray_create_ioplace.py", + "outputs": { + "io_place": { + "mode": "stdout", + "target": "${:net[noext]}.ioplace" + } + }, + "inputs": { + "blif": "${:eblif}", + "map": "${shareDir}/arch/${device}/${part_name}/pinmap.csv", + "net": "${:net}", + "pcf": "${:pcf?}", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "place_constraints": { + "params": { + "stage_name": "place_constraints", + "interpreter": "${python3}", + "script": "${shareDir}/scripts/prjxray_create_place_constraints.py", + "outputs": { + "place_constraints": { + "mode": "stdout", + "target": "${:net[noext]}.preplace" + } + }, + "inputs": { + "net": "${:net}", + "arch": "${shareDir}/arch/${device}/arch.timing.xml", + "blif": "${:eblif}", + "input": "${:io_place}", + "db_root": "${prjxray_db}", + "part": "${part_name}", + "vpr_grid_map": "${shareDir}/arch/${device}/vpr_grid_map.csv", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "bitstream": { + "params": { + "stage_name": "bitstream", + "script": "xcfasm", + "outputs": { + "bitstream": { + "mode": "file", + "file": "${:fasm[noext]}.bit", + "target": "${:fasm[noext]}.bit" + } + }, + "inputs": { + "db-root": "${prjxray_db}/${bitstream_device}", + "part": "${part_name}", + "part_file": "${prjxray_db}/${bitstream_device}/${part_name}/part.yaml", + "sparse": true, + "emit_pudc_b_pullup": true, + "fn_in": "${:fasm}", + "frm2bit": "xc7frames2bit", + "bit_out": "${:fasm[noext]}.bit" + } + } + } + } +} \ No newline at end of file diff --git a/sfbuild/platforms/xc7a50t.json b/sfbuild/platforms/xc7a50t.json new file mode 100644 index 000000000..b18f732b4 --- /dev/null +++ b/sfbuild/platforms/xc7a50t.json @@ -0,0 +1,155 @@ +{ + "stages": { + "mk_build_dir": "common:mkdirs", + "synth": "common:synth", + "pack": "common:pack", + "ioplace": "common:generic_script_wrapper", + "place_constraints": "common:generic_script_wrapper", + "place": "common:place", + "route": "common:route", + "fasm": "common:fasm", + "bitstream": "common:generic_script_wrapper" + }, + + "values": { + "part_name": "xc7a35tcsg324-1", + "device": "xc7a50t_test", + "bitstream_device": "artix7", + "pinmap": "${shareDir}/arch/xc7a50t_test/vpr_grid_map.csv", + "arch_def": "${shareDir}/arch/xc7a50t_test/arch.timing.xml", + "rr_graph_lookahead_bin": "${shareDir}/arch/xc7a50t_test/rr_graph_xc7a50t_test.lookahead.bin", + "rr_graph_real_bin": "${shareDir}/arch/xc7a50t_test/rr_graph_xc7a50t_test.rr_graph.real.bin", + "vpr_place_delay": "${shareDir}/arch/xc7a50t_test/rr_graph_xc7a50t_test.place_delay.bin", + "vpr_grid_layout_name": "xc7a50t-test", + "vpr_options": { + "max_router_iterations": 500, + "routing_failure_predictor": "off", + "router_high_fanout_threshold": -1, + "constant_net_method": "route", + "route_chan_width": 500, + "router_heap": "bucket", + "clock_modeling": "route", + "place_delta_delay_matrix_calculation_method": "dijkstra", + "place_delay_model": "delta", + "router_lookahead": "extended_map", + "check_route": "quick", + "strict_checks": "off", + "allow_dangling_combinational_nodes": "on", + "disable_errors": "check_unbuffered_edges:check_route", + "congested_routing_iteration_threshold": "0.8", + "incremental_reroute_delay_ripup": "off", + "base_cost_type": "delay_normalized_length_bounded", + "bb_factor": 10, + "acc_fac": "0.7", + "astar_fac": "1.8", + "initial_pres_fac": "2.828", + "pres_fac_mult": "1.2", + "check_rr_graph": "off", + "suppress_warnings": "${noisyWarnings},sum_pin_class:check_unbuffered_edges:load_rr_indexed_data_T_values:check_rr_node:trans_per_R:check_route:set_rr_graph_tool_comment:calculate_average_switch" + } + }, + + "stage_options": { + "mk_build_dir": { + "params": { + "build_dir": "build/${device}" + } + }, + "synth": { + "params": { + "takes": [ "xdc?" ], + "produces": [ + "sdc", + "synth_v" + ], + "prod_meta": { + "sdc": "Standard Design Constraints file for X7 series." + } + }, + "values": { + "tcl_scripts": "${shareDir}/scripts/xc7", + "yosys_tcl_env": { + "USE_ROI": "FALSE", + "TOP": "${top}", + "OUT_JSON": "${:json}", + "OUT_SDC": "${:sdc}", + "PART_JSON": "${prjxray_db}/${bitstream_device}/${part_name}/part.json", + "OUT_FASM_EXTRA": "${:fasm_extra}", + "TECHMAP_PATH": "${shareDir}/techmaps/xc7_vpr/techmap", + "OUT_SYNTH_V": "${:synth_v}", + "SYNTH_JSON": "${:synth_json}", + "OUT_EBLIF": "${:eblif}", + "PYTHON3": "${python3}", + "UTILS_PATH": "${shareDir}/scripts", + "INPUT_XDC_FILES": "${:xdc}" + } + } + }, + "ioplace": { + "params": { + "stage_name": "ioplace", + "interpreter": "${python3}", + "script": "${shareDir}/scripts/prjxray_create_ioplace.py", + "outputs": { + "io_place": { + "mode": "stdout", + "target": "${:net[noext]}.ioplace" + } + }, + "inputs": { + "blif": "${:eblif}", + "map": "${shareDir}/arch/${device}/${part_name}/pinmap.csv", + "net": "${:net}", + "pcf": "${:pcf?}", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "place_constraints": { + "params": { + "stage_name": "place_constraints", + "interpreter": "${python3}", + "script": "${shareDir}/scripts/prjxray_create_place_constraints.py", + "outputs": { + "place_constraints": { + "mode": "stdout", + "target": "${:net[noext]}.preplace" + } + }, + "inputs": { + "net": "${:net}", + "arch": "${shareDir}/arch/${device}/arch.timing.xml", + "blif": "${:eblif}", + "input": "${:io_place}", + "db_root": "${prjxray_db}", + "part": "${part_name}", + "vpr_grid_map": "${shareDir}/arch/${device}/vpr_grid_map.csv", + "$PYTHONPATH": "${binDir}/python/" + } + } + }, + "bitstream": { + "params": { + "stage_name": "bitstream", + "script": "xcfasm", + "outputs": { + "bitstream": { + "mode": "file", + "file": "${:fasm[noext]}.bit", + "target": "${:fasm[noext]}.bit" + } + }, + "inputs": { + "db-root": "${prjxray_db}/${bitstream_device}", + "part": "${part_name}", + "part_file": "${prjxray_db}/${bitstream_device}/${part_name}/part.yaml", + "sparse": true, + "emit_pudc_b_pullup": true, + "fn_in": "${:fasm}", + "frm2bit": "xc7frames2bit", + "bit_out": "${:fasm[noext]}.bit" + } + } + } + } +} \ No newline at end of file diff --git a/sfbuild/sf_argparse.py b/sfbuild/sf_argparse.py new file mode 100644 index 000000000..1f7ba2e83 --- /dev/null +++ b/sfbuild/sf_argparse.py @@ -0,0 +1,234 @@ +from argparse import ArgumentParser, Namespace +import re + +def _add_flow_arg(parser: ArgumentParser): + parser.add_argument('-f', '--flow', metavar='flow_path', type=str, + help='Path to flow definition file') + +def _setup_build_parser(parser: ArgumentParser): + _add_flow_arg(parser) + parser.add_argument('-t', '--target', metavar='target_name', type=str, + help='Perform stages necessary to acquire target') + parser.add_argument('--platform', metavar='platform_name', + help='Target platform_name') + parser.add_argument('-P', '--pretend', action='store_true', + help='Show dependency resolution without executing flow') + parser.add_argument('-i', '--info', action='store_true', + help='Display info about available targets') + parser.add_argument('-c', '--nocache', action='store_true', + help='Ignore caching and rebuild everything up to the ' + 'target.') + parser.add_argument('-S', '--stageinfo', nargs=1, metavar='stage_name', + help='Display info about stage') + parser.add_argument('-r', '--requirements', action='store_true', + help='Display info about project\'s requirements.') + parser.add_argument('-p', '--part', metavar='part_name', + help='Name of the target chip') + parser.add_argument('--dep', '-D', action='append', default=[]) + parser.add_argument('--val', '-V', action='append', default=[]) + # Currently unsupported + parser.add_argument('-M', '--moduleinfo', nargs=1, + metavar='module_name_or_path', + help='Display info about module. Requires `-p` option ' + 'in case of module name') + parser.add_argument('-T', '--take_explicit_paths', nargs='+', + metavar='', type=str, + help='Specify stage inputs explicitely. This might be ' + 'required if some files got renamed or deleted and ' + 'symbiflow is unable to deduce the flow that lead ' + 'to dependencies required by the requested stage') + +def _setup_show_dep_parser(parser: ArgumentParser): + parser.add_argument('-p', '--platform', metavar='platform_name', type=str, + help='Name of the platform (use to display ' + 'platform-specific values.') + parser.add_argument('-s', '--stage', metavar='stage_name', type=str, + help='Name of the stage (use if you want to set the ' + 'value only for that stage). Requires `-p`.') + _add_flow_arg(parser) + +# Set up argument parser for the program. Pretty self-explanatory. +def setup_argparser(): + parser = ArgumentParser(description='SymbiFlow Build System') + + parser.add_argument('-v', '--verbose', action='count', default=0) + parser.add_argument('-s', '--silent', action='store_true') + + subparsers = parser.add_subparsers(dest='command') + build = subparsers.add_parser('build') + _setup_build_parser(build) + show_dep = subparsers.add_parser('showd', + description='Show the value(s) assigned to a ' + 'dependency') + _setup_show_dep_parser(show_dep) + + return parser + +def _parse_depval(depvalstr: str): + """ + Parse a dependency or value definition in form of: + optional_stage_name.value_or_dependency_name=value + See `_parse_cli_value` for detail on how to pass different kinds of values. + """ + + d = { 'name': None, 'stage': None, 'value': None } + + splitted = list(_unescaped_separated('=', depvalstr)) + + if len(splitted) != 2: + raise Exception('Too many components') + + pathstr = splitted[0] + valstr = splitted[1] + + path_components = pathstr.split('.') + if len(path_components) < 1: + raise Exception('Missing value') + d['name'] = path_components.pop(len(path_components) - 1) + if len(path_components) > 0: + d['stage'] = path_components.pop(0) + if len(path_components) > 0: + raise Exception('Too many path components') + + d['value'] = _parse_cli_value(valstr) + + return d + +def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): + """ + Find all occurences of a pattern in a string that contains escape sequences. + Yields pairs of starting and ending indices of the pattern. + """ + + noescapes = '' + + # We remove all escape sequnces from a string, so it will match only with + # unescaped characters, but to map the results back to the string containing the + # escape sequences, we need to track the offsets by which the characters were + # shifted. + offsets = [] + offset = 0 + for sl in s.split(escape_chr): + l = len(sl) + if l <= 1: + continue + noescape = sl[(1 if offset != 0 else 0):] + for _ in noescape: + offsets.append(offset) + offset += 2 + noescapes += noescape + + iter = re.finditer(regexp, noescapes) + + for m in iter: + start = m.start() + end = m.end() + off1 = start + offsets[start] + off2 = end + offsets[end] + yield off1, off2 + +def _unescaped_separated(regexp: str, s: str, escape_chr='\\'): + """ Yields substrings of a string that contains escape sequences. """ + + last_end = 0; + for start, end in _unescaped_matches(regexp, s, escape_chr=escape_chr): + yield s[last_end:start] + last_end = end + if last_end < len(s): + yield s[last_end:] + else: + yield '' + +def _parse_cli_value(s: str): + """ + Parse a value/dependency passed to CLI + CLI values are generated by the following non-contextual grammar: + + S -> :str: (string/number value) + S -> [I] + S -> {D} + I -> I,I + I -> S + D -> D,D + D -> K:S + K -> :str: + + Starting symbol = S + Terminal symbols: '[', ']', '{', '}', ':', ,',', :str: + (:str: represents any string where terminals are escaped) + + TODO: The current implementation of my parser is crippled and is + not able to parse nested structures. Currently there is no real use + case for having nested structures as values, so it's kinda fine atm. + """ + + if len(s) == 0: + return '' + + # List + if s[0] == '[': + if len(s) < 2 or s[len(s)-1] != ']': + raise Exception('Missing \']\' delimiter') + inner = s[1:(len(s)-1)] + if inner == '': + return [] + return [_parse_cli_value(v) for v in _unescaped_separated(',', inner)] + + # Dictionary + if s[0] == '{': + if len(s) < 2 or s[len(s)-1] != '}': + raise Exception('Missing \'}\' delimiter') + d = {} + inner = s[1:(len(s)-1)] + if inner == '': + return {} + for kv in _unescaped_separated(',', inner): + k_v = list(_unescaped_separated(':', kv)) + if len(k_v) < 2: + raise Exception('Missing value in dictionary entry') + if len(k_v) > 2: + raise Exception('Unexpected \':\' token') + key = k_v[0] + value = _parse_cli_value(k_v[1]) + d[key] = value + + return d + + # Bool hack + if s == '\\True': + return True + if s == '\\False': + return False + + # Number hack + if len(s) >= 3 and s[0:1] == '\\N': + return int(s[2:]) + + # String + return s.replace('\\', '') + +def get_cli_flow_config(args: Namespace, platform: str): + def create_defdict(): + return { + 'dependencies': {}, + 'values': {}, + } + + platform_flow_config = create_defdict() + + def add_entries(arglist: 'list[str]', dict_name: str): + for value_def in (_parse_depval(cliv) for cliv in arglist): + stage = value_def['stage'] + if stage is None: + platform_flow_config[dict_name][value_def['name']] = \ + value_def['value'] + else: + if platform_flow_config.get(stage) is None: + platform_flow_config[stage] = create_defdict() + platform_flow_config[stage][dict_name][value_def['name']] = \ + value_def['value'] + + add_entries(args.dep, 'dependencies') + add_entries(args.val, 'values') + + return { platform: platform_flow_config } diff --git a/sfbuild/sf_cache.py b/sfbuild/sf_cache.py new file mode 100755 index 000000000..fd11177f6 --- /dev/null +++ b/sfbuild/sf_cache.py @@ -0,0 +1,115 @@ +import os +import zlib +import json + +def _get_file_hash(path: str): + with open(path, 'rb') as f: + b = f.read() + return str(zlib.adler32(b)) + +class SymbiCache: + """ + `SymbiCache` is used to track changes among dependencies and keep + the status of the files on a persistent storage. + Files which are tracked get their checksums calculated and stored in a file. + If file's checksum differs from the one saved in a file, that means, the file + has changed. + """ + + hashes: 'dict[str, dict[str, str]]' + status: 'dict[str, str]' + cachefile_path: str + + def __init__(self, cachefile_path): + """ `chachefile_path` - path to a file used for persistent storage of + checksums. """ + + self.status = {} + self.cachefile_path = cachefile_path + self.load() + + def _try_pop_consumer(self, path: str, consumer: str): + if self.status.get(path) and self.status[path].get(consumer): + self.status[path].pop(consumer) + if len(self.status[path]) == 0: + self.status.pop(path) + if self.hashes.get(path) and self.hashes[path].get(consumer): + self.hashes[path].pop(consumer) + if len(self.hashes[path]) == 0: + self.hashes.pop(path) + + def _try_push_consumer_hash(self, path: str, consumer: str, hash): + if not self.hashes.get(path): + self.hashes[path] = {} + self.hashes[path][consumer] = hash + def _try_push_consumer_status(self, path: str, consumer: str, status): + if not self.status.get(path): + self.status[path] = {} + self.status[path][consumer] = status + + def _get_last_hash(self, path: str, consumer: str): + last_hashes = self.hashes.get(path) + if last_hashes is None: + return None + return last_hashes.get(consumer) + + def update(self, path: str, consumer: str): + """ Add/remove a file to.from the tracked files, update checksum + if necessary and calculate status. + + Multiple hashes are stored per file, one for each consumer module. + "__target" is used as a convention for a "fake" consumer in case the file + is requested as a target and not used by a module within the active flow. + """ + + isdir = os.path.isdir(path) + if not (os.path.isfile(path) or os.path.islink(path) or isdir): + self._try_pop_consumer(path, consumer) + return True + hash = 0 # Directories always get '0' hash. + if not isdir: + hash = _get_file_hash(path) + last_hash = self._get_last_hash(path, consumer) + if hash != last_hash: + self._try_push_consumer_status(path, consumer, 'changed') + self._try_push_consumer_hash(path, consumer, hash) + return True + else: + self._try_push_consumer_status(path, consumer, 'same') + return False + + def get_status(self, path: str, consumer: str): + """ Get status for a file with a given path. + returns 'untracked' if the file is not tracked or hasn't been + treated with `update` procedure before calling `get_status`. """ + + statuses = self.status.get(path) + if not statuses: + return 'untracked' + status = statuses.get(consumer) + if not status: + return 'untracked' + return status + + def load(self): + """Loads cache's state from the persistent storage""" + + try: + with open(self.cachefile_path, 'r') as f: + b = f.read() + self.hashes = json.loads(b) + except json.JSONDecodeError as jerr: + print('WARNING: .symbicache is corrupted! ' + 'This will cause flow to re-execute from the beggining.') + self.hashes = {} + except FileNotFoundError: + print('Couldn\'t open .symbicache cache file. ' + 'This will cause flow to re-execute from the beggining.') + self.hashes = {} + + def save(self): + """Saves cache's state to the persistent storage""" + + with open(self.cachefile_path, 'w') as f: + b = json.dumps(self.hashes, indent=4) + f.write(b) \ No newline at end of file diff --git a/sfbuild/sf_common/__init__.py b/sfbuild/sf_common/__init__.py new file mode 100644 index 000000000..9bac5519d --- /dev/null +++ b/sfbuild/sf_common/__init__.py @@ -0,0 +1,265 @@ +from argparse import Namespace +import subprocess +import os +import shutil +import sys +import re + +def decompose_depname(name: str): + spec = 'req' + specchar = name[len(name) - 1] + if specchar == '?': + spec = 'maybe' + elif specchar == '!': + spec = 'demand' + if spec != 'req': + name = name[:len(name) - 1] + return name, spec + +def with_qualifier(name: str, q: str) -> str: + if q == 'req': + return decompose_depname(name)[0] + if q == 'maybe': + return decompose_depname(name)[0] + '?' + if q == 'demand': + return decompose_depname(name)[0] + '!' + +_sfbuild_module_collection_name_to_path = {} +def scan_modules(mypath: str): + global _sfbuild_module_collection_name_to_path + + sfbuild_home = mypath + sfbuild_home_dirs = os.listdir(sfbuild_home) + sfbuild_module_dirs = \ + [dir for dir in sfbuild_home_dirs if re.match('sf_.*_modules$', dir)] + _sfbuild_module_collection_name_to_path = \ + dict([(re.match('sf_(.*)_modules$', moddir).groups()[0], + os.path.join(sfbuild_home, moddir)) + for moddir in sfbuild_module_dirs]) + +"""Resolves module location from modulestr""" +def resolve_modstr(modstr: str): + sl = modstr.split(':') + if len(sl) > 2: + raise Exception('Incorrect module sysntax. ' + 'Expected one \':\' or one \'::\'') + if len(sl) < 2: + return modstr + collection_name = sl[0] + module_filename = sl[1] + '.py' + + col_path = _sfbuild_module_collection_name_to_path.get(collection_name) + if not col_path: + fatal(-1, f'Module collection {collection_name} does not exist') + return os.path.join(col_path, module_filename) + +def deep(fun): + """ + Create a recursive string transform function for 'str | list | dict', + i.e a dependency + """ + + def d(paths, *args, **kwargs): + if type(paths) is str: + return fun(paths) + elif type(paths) is list: + return [d(p) for p in paths]; + elif type(paths) is dict: + return dict([(k, d(p)) for k, p in paths.items()]) + + return d + +def file_noext(path: str): + """ Return a file without it's extenstion""" + m = re.match('(.*)\\.[^.]*$', path) + if m: + path = m.groups()[0] + return path + +class VprArgs: + """ Represents argument list for VPR (Versatile Place and Route) """ + + arch_dir: str + arch_def: str + lookahead: str + rr_graph: str + place_delay: str + device_name: str + eblif: str + optional: list + + def __init__(self, share: str, eblif, values: Namespace, + sdc_file: 'str | None' = None, + vpr_extra_opts: 'list | None' = None): + self.arch_dir = os.path.join(share, 'arch') + self.arch_def = values.arch_def + self.lookahead = values.rr_graph_lookahead_bin + self.rr_graph = values.rr_graph_real_bin + self.place_delay = values.vpr_place_delay + self.device_name = values.vpr_grid_layout_name + self.eblif = os.path.realpath(eblif) + if values.vpr_options is not None: + self.optional = options_dict_to_list(values.vpr_options) + else: + self.optional = [] + if vpr_extra_opts is not None: + self.optional += vpr_extra_opts + if sdc_file is not None: + self.optional += ['--sdc_file', sdc_file] + +class SubprocessException(Exception): + return_code: int + +def sub(*args, env=None, cwd=None): + """ Execute subroutine """ + + out = subprocess.run(args, capture_output=True, env=env, cwd=cwd) + if out.returncode != 0: + print(f'[ERROR]: {args[0]} non-zero return code.\n' + f'stderr:\n{out.stderr.decode()}\n\n' + ) + exit(out.returncode) + return out.stdout + +def vpr(mode: str, vprargs: VprArgs, cwd=None): + """ Execute `vpr` """ + + modeargs = [] + if mode == 'pack': + modeargs = ['--pack'] + elif mode == 'place': + modeargs = ['--place'] + elif mode == 'route': + modeargs = ['--route'] + + return sub(*(['vpr', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + '--read_rr_graph', vprargs.rr_graph, + '--read_router_lookahead', vprargs.lookahead, + '--read_placement_delay_lookup', vprargs.place_delay] + + modeargs + vprargs.optional), + cwd=cwd) + +_vpr_specific_values = [ + 'arch_def', + 'rr_graph_lookahead_bin', + 'rr_graph_real_bin', + 'vpr_place_delay', + 'vpr_grid_layout_name', + 'vpr_options?' +] +def vpr_specific_values(): + global _vpr_specific_values + return _vpr_specific_values + +def options_dict_to_list(opt_dict: dict): + """ + Converts a dictionary of named options for CLI program to a list. + Example: { "option_name": "value" } -> [ "--option_name", "value" ] + """ + + opts = [] + for key, val in opt_dict.items(): + opts.append('--' + key) + if not(type(val) is list and val == []): + opts.append(str(val)) + return opts + +def noisy_warnings(device): + """ Emit some noisy warnings """ + os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log' + +def my_path(): + """ Get current PWD """ + mypath = os.path.realpath(sys.argv[0]) + return os.path.dirname(mypath) + +def save_vpr_log(filename, build_dir=''): + """ Save VPR logic (moves the default output file into a desired path) """ + shutil.move(os.path.join(build_dir, 'vpr_stdout.log'), filename) + +def fatal(code, message): + """ + Print a message informing about an error that has occured and terminate program + with a given return code. + """ + + print(f'[FATAL ERROR]: {message}') + exit(code) + +class ResolutionEnv: + """ + ResolutionEnv is used to hold onto mappings for variables used in flow and + perform text substitutions using those variables. + Variables can be referred in any "resolvable" string using the following + syntax: 'Some static text ${variable_name}'. The '${variable_name}' part + will be replaced by the value associated with name 'variable_name', is such + mapping exists. + values: dict + """ + + def __init__(self, values={}): + self.values = values + + def __copy__(self): + return ResolutionEnv(self.values.copy()) + + def resolve(self, s, final=False): + """ + Perform resolution on `s`. + `s` can be a `str`, a `dict` with arbitrary keys and resolvable values, + or a `list` of resolvable values. + final=True - resolve any unknown variables into '' + This is a hack and probably should be removed in the future + """ + + if type(s) is str: + match_list = list(re.finditer('\$\{([^${}]*)\}', s)) + # Assumption: re.finditer finds matches in a left-to-right order + match_list.reverse() + for match in match_list: + match_str = match.group(1) + match_str = match_str.replace('?', '') + v = self.values.get(match_str) + if not v: + if final: + v = '' + else: + continue + span = match.span() + if type(v) is str: + s = s[:span[0]] + v + s[span[1]:] + elif type(v) is list: # Assume it's a list of strings + ns = list([s[:span[0]] + ve + s[span[1]:] for ve in v]) + s = ns + + elif type(s) is list: + s = list(map(self.resolve, s)) + elif type(s) is dict: + s = dict([(k, self.resolve(v)) for k, v in s.items()]) + return s + + def add_values(self, values: dict): + """ Add mappings from `values`""" + + for k, v in values.items(): + self.values[k] = self.resolve(v) + +verbosity_level = 0 + +def sfprint(verbosity: int, *args): + """ Print with regards to currently set verbosity level """ + + global verbosity_level + if verbosity <= verbosity_level: + print(*args) + +def set_verbosity_level(level: int): + global verbosity_level + verbosity_level = level + +def get_verbosity_level() -> int: + global verbosity_level + return verbosity_level diff --git a/sfbuild/sf_common_modules/__init__.py b/sfbuild/sf_common_modules/__init__.py new file mode 100644 index 000000000..041b0435b --- /dev/null +++ b/sfbuild/sf_common_modules/__init__.py @@ -0,0 +1 @@ +# This is only to make pydoc recognize this catalogue as a package diff --git a/sfbuild/sf_common_modules/fasm.py b/sfbuild/sf_common_modules/fasm.py new file mode 100644 index 000000000..83d57196b --- /dev/null +++ b/sfbuild/sf_common_modules/fasm.py @@ -0,0 +1,90 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def concat_fasm(fasm: str, fasm_extra: str, output: str): + fasm_data = None + fasm_extra_data = None + with open(fasm, 'r') as fasm_file, open(fasm_extra, 'r') as fasm_extra_file: + fasm_data = fasm_file.read() + fasm_extra_data = fasm_extra_file.read() + data = fasm_data + '\n' + fasm_extra_data + + with open(output, 'w') as output_file: + output_file.write(data) + +def fasm_output_path(build_dir: str, top: str): + return f'{build_dir}/{top}.fasm' + +class FasmModule(Module): + + def map_io(self, ctx: ModuleContext): + build_dir = os.path.dirname(ctx.takes.eblif) + return { + 'fasm': fasm_output_path(build_dir, ctx.values.top) + } + + def execute(self, ctx: ModuleContext): + build_dir = os.path.dirname(ctx.takes.eblif) + + vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values) + + optional = [] + if ctx.values.pnr_corner is not None: + optional += ['--pnr_corner', ctx.values.pnr_corner] + if ctx.takes.sdc: + optional += ['--sdc', ctx.takes.sdc] + + s = ['genfasm', vprargs.arch_def, + os.path.realpath(ctx.takes.eblif), + '--device', vprargs.device_name, + '--read_rr_graph', vprargs.rr_graph + ] + vprargs.optional + + if get_verbosity_level() >= 2: + yield 'Generating FASM...\n ' + ' '.join(s) + else: + yield 'Generating FASM...' + + sub(*s, cwd=build_dir) + + default_fasm_output_name = fasm_output_path(build_dir, ctx.values.top) + if default_fasm_output_name != ctx.outputs.fasm: + shutil.move(default_fasm_output_name, ctx.outputs.fasm) + + if ctx.takes.fasm_extra: + yield 'Appending extra FASM...' + concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm) + else: + yield 'No extra FASM to append' + + def __init__(self, _): + self.name = 'fasm' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'net', + 'place', + 'route', + 'fasm_extra?', + 'sdc?' + ] + self.produces = [ 'fasm' ] + self.values = [ + 'device', + 'top', + 'pnr_corner?' + ] + vpr_specific_values() + self.prod_meta = { + 'fasm': 'FPGA assembly file' + } + +ModuleClass = FasmModule diff --git a/sfbuild/sf_common_modules/generic_script_wrapper.py b/sfbuild/sf_common_modules/generic_script_wrapper.py new file mode 100644 index 000000000..b70846016 --- /dev/null +++ b/sfbuild/sf_common_modules/generic_script_wrapper.py @@ -0,0 +1,310 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +""" +This module is intended for wrapping simple scripts without rewriting them as +an sfbuild module. This is mostly to maintain compatibility with workflows +that do not use sfbuild and instead rely on legacy scripts. + +Accepted module parameters: +* `stage_name` (string, optional): Name describing the stage +* `script` (string, mandatory): Path to the script to be executed +* `interpreter` (string, optional): Interpreter for the script +* `cwd` (string, optional): Current Working Directory for the script +* `outputs` (dict[string -> dict[string -> string]], + mandatory): + A dict with output descriptions (dicts). Keys name output dependencies. + * `mode` (string, mandatory): "file" or "stdout". Describes how the output is + grabbed from the script. + * `file` (string, required if `mode` is "file"): Name of the file generated by the + script. + * `target` (string, required): Default name of the file of the generated + dependency. You can use all values available durng map_io stage. Each input + dependency alsogets two extra values associated with it: + `:dependency_name[noext]`, which contains the path to the dependency the + extension with anything after last "." removed and `:dependency_name[dir]` which + contains directory paths of the dependency. This is useful for deriving an output + name from the input. + * `meta` (string, optional): Description of the output dependency. +* `inputs` (dict[string -> string | bool], mandatory): + A dict with input descriptions. Key is either a name of a named argument or a + position of unnamed argument prefaced with "#" (eg. "#1"). Positions are indexed + from 1, as it's a convention that 0th argument is the path of the executed program. + Values are strings that can contains references to variables to be resolved + after the project flow configuration is loaded (that means they can reference + values and dependencies which are to be set by the user). All of modules inputs + will be determined by the references used. Thus dependency and value definitions + are implicit. If the value of the resolved string is empty and is associated with a + named argument, the argument in question will be skipped entirely. This allows + using optional dependencies. To use a named argument as a flag instead, set it to + `true`. +""" + +# TODO: `environment` input kind + +# ----------------------------------------------------------------------------- # + +import os +import shutil +import re + +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def _generate_stage_name(params): + stage_name = params.get('stage_name') + if stage_name is None: + stage_name = '' + return f'{stage_name}-generic' + +def _get_param(params, name: str): + param = params.get(name) + if not param: + raise Exception(f'generic module wrapper parameters ' + f'missing `{name}` field') + return param + +def _parse_param_def(param_def: str): + if param_def[0] == '#': + return 'positional', int(param_def[1:]) + elif param_def[0] == '$': + return 'environmental', param_def[1:] + return 'named', param_def + +_file_noext_deep = deep(file_noext) +_realdirpath_deep = deep(lambda p: os.path.realpath(os.path.dirname(p))) + +class InputReferences: + dependencies: 'set[str]' + values: 'set[str]' + + def merge(self, other): + self.dependencies.update(other.dependencies) + self.values.update(other.values) + + def __init__(self): + self.dependencies = set() + self.values = set() + +def _get_input_references(input: str) -> InputReferences: + refs = InputReferences() + + if type(input) is not str: + return refs + + matches = re.finditer('\$\{([^${}]*)\}', input) + for match in matches: + match_str = match.group(1) + if match_str[0] == ':': + if len(match_str) < 2: + raise Exception('Dependency name must be at least 1 character ' + 'long') + dep_name = re.match('([^\\[\\]]*)', match_str[1:]).group(1) + refs.dependencies.add(dep_name) + else: + refs.values.add(match_str) + + return refs + + +def _tailcall1(self, fun): + def newself(arg, self=self, fun=fun): + fun(arg) + self(arg) + return newself + +def _add_extra_values_to_env(ctx: ModuleContext): + takes = dict(vars(ctx.takes).items()) + for take_name, take_path in takes.items(): + if take_path is None: + continue + attr_name = f':{take_name}[noext]' + ctx.r_env.values[attr_name] = _file_noext_deep(take_path) + attr_name = f':{take_name}[dir]' + dirname = _realdirpath_deep(take_path) + ctx.r_env.values[attr_name] = dirname + +def _make_noop1(): + def noop(_): + return + return noop + +class GenericScriptWrapperModule(Module): + script_path: str + stdout_target: 'None | tuple[str, str]' + file_outputs: 'list[tuple[str, str, str]]' + interpreter: 'None | str' + cwd: 'None | str' + + def map_io(self, ctx: ModuleContext): + _add_extra_values_to_env(ctx) + + outputs = {} + for dep, _, out_path in self.file_outputs: + out_path_resolved = ctx.r_env.resolve(out_path, final=True) + outputs[dep] = out_path_resolved + + if self.stdout_target: + out_path_resolved = \ + ctx.r_env.resolve(self.stdout_target[1], final=True) + outputs[self.stdout_target[0]] = out_path_resolved + + return outputs + + def execute(self, ctx: ModuleContext): + _add_extra_values_to_env(ctx) + + cwd = ctx.r_env.resolve(self.cwd) + + sub_args = [ctx.r_env.resolve(self.script_path, final=True)] \ + + self.get_args(ctx) + if self.interpreter: + sub_args = [ctx.r_env.resolve(self.interpreter, final=True)] + sub_args + + sub_env = self.get_env(ctx) + + # XXX: This may produce incorrect string if arguments contains whitespace + # characters + cmd = ' '.join(sub_args) + + if get_verbosity_level() >= 2: + yield f'Running script...\n {cmd}' + else: + yield f'Running an externel script...' + + data = sub(*sub_args, cwd=cwd, env=sub_env) + + yield 'Writing outputs...' + if self.stdout_target: + target = ctx.r_env.resolve(self.stdout_target[1], final=True) + with open(target, 'wb') as f: + f.write(data) + + for _, file, target in self.file_outputs: + file = ctx.r_env.resolve(file, final=True) + target = ctx.r_env.resolve(target, final=True) + if target != file: + shutil.move(file, target) + + def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'): + self.stdout_target = None + self.file_outputs = [] + + for dep_name, output_def in output_defs.items(): + dname, _ = decompose_depname(dep_name) + self.produces.append(dep_name) + meta = output_def.get('meta') + if meta is str: + self.prod_meta[dname] = meta + + mode = output_def.get('mode') + if type(mode) is not str: + raise Exception(f'Output mode for `{dep_name}` is not specified') + + target = output_def.get('target') + if type(target) is not str: + raise Exception('`target` field is not specified') + + if mode == 'file': + file = output_def.get('file') + if type(file) is not str: + raise Exception('Output file is not specified') + self.file_outputs.append((dname, file, target)) + elif mode == 'stdout': + if self.stdout_target is not None: + raise Exception('stdout output is already specified') + self.stdout_target = dname, target + + # A very functional approach + def _init_inputs(self, input_defs): + positional_args = [] + named_args = [] + env_vars = {} + refs = InputReferences() + + get_args = _make_noop1() + get_env = _make_noop1() + + for arg_code, input in input_defs.items(): + param_kind, param = _parse_param_def(arg_code) + + push = None + push_env = None + if param_kind == 'named': + def push_named(val: 'str | bool | int', param=param): + nonlocal named_args + if type(val) is bool: + named_args.append(f'--{param}') + else: + named_args += [f'--{param}', str(val)] + push = push_named + elif param_kind == 'environmental': + def push_environ(val: 'str | bool | int', param=param): + nonlocal env_vars + env_vars[param] = val + push_env = push_environ + else: + def push_positional(val: str, param=param): + nonlocal positional_args + positional_args.append((param, val)) + push = push_positional + + input_refs = _get_input_references(input) + refs.merge(input_refs) + + if push is not None: + def push_q(ctx: ModuleContext, push=push, input=input): + val = ctx.r_env.resolve(input, final=True) + if val != '': + push(val) + get_args = _tailcall1(get_args, push_q) + else: + def push_q(ctx: ModuleContext, push_env=push_env, input=input): + val = ctx.r_env.resolve(input, final=True) + if val != '': + push_env(val) + get_env = _tailcall1(get_env, push_q) + + def get_all_args(ctx: ModuleContext): + nonlocal get_args, positional_args, named_args + + get_args(ctx) + + positional_args.sort(key=lambda t: t[0]) + pos = [ a for _, a in positional_args] + + return named_args + pos + + def get_all_env(ctx: ModuleContext): + nonlocal get_env, env_vars + get_env(ctx) + if len(env_vars.items()) == 0: + return None + return env_vars + + setattr(self, 'get_args', get_all_args) + setattr(self, 'get_env', get_all_env) + + for dep in refs.dependencies: + self.takes.append(dep) + for val in refs.values: + self.values.append(val) + + def __init__(self, params): + self.name = _generate_stage_name(params) + self.no_of_phases = 2 + self.script_path = params.get('script') + self.interpreter = params.get('interpreter') + self.cwd = params.get('cwd') + self.takes = [] + self.produces = [] + self.values = [] + self.prod_meta = {} + + self._init_outputs(_get_param(params, 'outputs')) + self._init_inputs(_get_param(params, 'inputs')) + +ModuleClass = GenericScriptWrapperModule \ No newline at end of file diff --git a/sfbuild/sf_common_modules/io_rename.py b/sfbuild/sf_common_modules/io_rename.py new file mode 100644 index 000000000..155450464 --- /dev/null +++ b/sfbuild/sf_common_modules/io_rename.py @@ -0,0 +1,113 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +""" +Rename (ie. change) dependencies and values of a module. This module wraps another, +module whoose name is specified in `params.module` and changes the names of the +dependencies and values it relies on. The parmeters for the wrapped module can be +specified through `params.params`. dict. There are three mapping for the names: +* `params.rename_takes` - mapping for inputs ("takes") +* `params.rename_produces` - mapping for outputs ("products") +* `params.rename_values` - mapping for values +Keys represent the names visible to the wrpped module and values represent the +names visible to the modules outside. +Not specifying a mapping for a given entry will leave it with its original name. + +--------------- + +Accepted module parameters: +* `module` (string, required) +* `params` (dict[string -> any], optional) +* `rename_takes` (dict[string -> string], optional) +* `rename_produces` (dict[string -> string], optional) +* `rename_values` (dict[string -> string], optional) + +""" + +# ----------------------------------------------------------------------------- # + +from sf_common import * +from sf_module import * +from sf_module_runner import get_module + +# ----------------------------------------------------------------------------- # + +def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]': + newd = {} + for k, v in d.items(): + r = renames.get(k) + if r is not None: + newd[r] = v + else: + newd[k] = v + return newd + +def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace: + newn = SimpleNamespace() + for k, v in vars(d).items(): + setattr(newn, k, v) + for k, r in renames.items(): + if hasattr(newn, r): + v = getattr(newn, r) + delattr(newn, r) + setattr(newn, k, v) + return newn + +def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]': + newl = [] + for e in l: + r = renames.get(e) + if r is not None: + _, q = decompose_depname(e) + newl.append(with_qualifier(r, q)) + else: + newl.append(r if r is not None else e) + return newl + +def _generate_stage_name(name: str): + return f'{name}-io_renamed' + +def _or_empty_dict(d: 'dict | None'): + return d if d is not None else {} + +class IORenameModule(Module): + module: Module + rename_takes: 'dict[str, str]' + rename_produces: 'dict[str, str]' + rename_values: 'dict[str, str]' + + def map_io(self, ctx: ModuleContext): + newctx = ctx.shallow_copy() + newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes) + newctx.values = _switchback_attrs(ctx.values, self.rename_values) + r = self.module.map_io(newctx) + return _switch_keys(r, self.rename_produces) + + def execute(self, ctx: ModuleContext): + newctx = ctx.shallow_copy() + newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes) + newctx.values = _switchback_attrs(ctx.values, self.rename_values) + newctx.outputs = _switchback_attrs(ctx.produces, self.rename_produces) + print(newctx.takes) + return self.module.execute(newctx) + + def __init__(self, params): + mod_path = resolve_modstr(params["module"]) + module_class = get_module(mod_path) + module: Module = module_class(params.get("params")) + + self.rename_takes = _or_empty_dict(params.get("rename_takes")) + self.rename_produces = _or_empty_dict(params.get("rename_produces")) + self.rename_values = _or_empty_dict(params.get("rename_values")) + + self.module = module + self.name = _generate_stage_name(module.name) + self.no_of_phases = module.no_of_phases + self.takes = _switch_entries(module.takes, self.rename_takes) + self.produces = _switch_entries(module.produces, self.rename_produces) + self.values = _switch_entries(module.values, self.rename_values) + if hasattr(module, 'prod_meta'): + self.prod_meta = _switch_keys(module.prod_meta, self.rename_produces) + +ModuleClass = IORenameModule \ No newline at end of file diff --git a/sfbuild/sf_common_modules/mkdirs.py b/sfbuild/sf_common_modules/mkdirs.py new file mode 100644 index 000000000..855e6f8a7 --- /dev/null +++ b/sfbuild/sf_common_modules/mkdirs.py @@ -0,0 +1,40 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +""" This module is used as a helper in a abuild chain to automate creating build +directiores. It' currenty the only parametric module, meaning it can take +user-provided input at an early stage in order todetermine its take/produces +I/O. This allows other repesenting configurable directories, such as a build +directory as dependencies and by doing so, allow the dependency algorithm to +lazily create the directories if they become necessary. """ + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +class MkDirsModule(Module): + deps_to_produce: 'dict[str, str]' + + def map_io(self, ctx: ModuleContext): + return ctx.r_env.resolve(self.deps_to_produce) + + def execute(self, ctx: ModuleContext): + outputs = vars(ctx.outputs) + for _, path in outputs.items(): + yield f'Creating directory {path}...' + os.makedirs(path, exist_ok=True) + + def __init__(self, params): + self.name = 'mkdirs' + self.no_of_phases = len(params) if params else 0 + self.takes = [] + self.produces = list(params.keys()) if params else [] + self.values = [] + self.deps_to_produce = params + +ModuleClass = MkDirsModule diff --git a/sfbuild/sf_common_modules/pack.py b/sfbuild/sf_common_modules/pack.py new file mode 100644 index 000000000..ab286867b --- /dev/null +++ b/sfbuild/sf_common_modules/pack.py @@ -0,0 +1,70 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +import re +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +DEFAULT_TIMING_RPT = 'pre_pack.report_timing.setup.rpt' +DEFAULT_UTIL_RPT = 'packing_pin_util.rpt' + +class PackModule(Module): + def map_io(self, ctx: ModuleContext): + p = file_noext(ctx.takes.eblif) + build_dir = os.path.dirname(p) + + return { + 'net': p + '.net', + 'util_rpt': os.path.join(build_dir, DEFAULT_UTIL_RPT), + 'timing_rpt': os.path.join(build_dir, DEFAULT_TIMING_RPT) + } + + def execute(self, ctx: ModuleContext): + vpr_args = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, + sdc_file=ctx.takes.sdc) + build_dir = os.path.dirname(ctx.outputs.net) + + noisy_warnings(ctx.values.device) + + yield 'Packing with VPR...' + vpr('pack', vpr_args, cwd=build_dir) + + og_log = os.path.join(build_dir, 'vpr_stdout.log') + + yield 'Moving/deleting files...' + if ctx.outputs.pack_log: + shutil.move(og_log, ctx.outputs.pack_log) + else: + os.remove(og_log) + + if ctx.outputs.timing_rpt: + shutil.move(os.path.join(build_dir, DEFAULT_TIMING_RPT), + ctx.outputs.timing_rpt) + if ctx.outputs.util_rpt: + shutil.move(os.path.join(build_dir, DEFAULT_UTIL_RPT), + ctx.outputs.util_rpt) + + def __init__(self, _): + self.name = 'pack' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'sdc?' + ] + self.produces = [ + 'net', + 'util_rpt', + 'timing_rpt', + 'pack_log!' + ] + self.values = [ + 'device', + ] + vpr_specific_values() + +ModuleClass = PackModule diff --git a/sfbuild/sf_common_modules/place.py b/sfbuild/sf_common_modules/place.py new file mode 100644 index 000000000..7821a52ea --- /dev/null +++ b/sfbuild/sf_common_modules/place.py @@ -0,0 +1,86 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def default_output_name(place_constraints): + p = place_constraints + m = re.match('(.*)\\.[^.]*$', place_constraints) + if m: + p = m.groups()[0] + '.place' + else: + p += '.place' + return p + +def place_constraints_file(ctx: ModuleContext): + dummy =- False + p = ctx.takes.place_constraints + if not p: + p = ctx.takes.io_place + if not p: + dummy = True + p = file_noext(ctx.takes.eblif) + '.place' + + return p, dummy + +class PlaceModule(Module): + def map_io(self, ctx: ModuleContext): + mapping = {} + p, _ = place_constraints_file(ctx) + + mapping['place'] = default_output_name(p) + return mapping + + def execute(self, ctx: ModuleContext): + place_constraints, dummy = place_constraints_file(ctx) + place_constraints = os.path.realpath(place_constraints) + if dummy: + with open(place_constraints, 'wb') as f: + f.write(b'') + + build_dir = os.path.dirname(ctx.takes.eblif) + + vpr_options = ['--fix_clusters', place_constraints] + + yield 'Running VPR...' + vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, + sdc_file=ctx.takes.sdc, vpr_extra_opts=vpr_options) + vpr('place', vprargs, cwd=build_dir) + + # VPR names output on its own. If user requested another name, the + # output file should be moved. + # TODO: This extends the set of names that would cause collisions. + # As for now (22-07-2021), no collision detection is being done, but + # when the problem gets tackled, we should keep in mind that VPR-based + # modules may produce some temporary files with names that differ from + # the ones in flow configuration. + if ctx.is_output_explicit('place'): + output_file = default_output_name(place_constraints) + shutil.move(output_file, ctx.outputs.place) + + yield 'Saving log...' + save_vpr_log('place.log', build_dir=build_dir) + + def __init__(self, _): + self.name = 'place' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'sdc?', + 'place_constraints?', + 'io_place?' + ] + self.produces = [ 'place' ] + self.values = [ + 'device', + 'vpr_options?' + ] + vpr_specific_values() + +ModuleClass = PlaceModule diff --git a/sfbuild/sf_common_modules/place_constraints.py b/sfbuild/sf_common_modules/place_constraints.py new file mode 100644 index 000000000..cd3a26ff5 --- /dev/null +++ b/sfbuild/sf_common_modules/place_constraints.py @@ -0,0 +1,62 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +class PlaceConstraintsModule(Module): + def map_io(self, ctx: ModuleContext): + return { + 'place_constraints': file_noext(ctx.takes.net) + '.preplace' + } + + def execute(self, ctx: ModuleContext): + arch_dir = os.path.join(ctx.share, 'arch') + arch_def = os.path.join(arch_dir, ctx.values.device, 'arch.timing.xml') + + database = sub('prjxray-config').decode().replace('\n', '') + + yield 'Generating .place...' + + extra_opts: 'list[str]' + if ctx.values.extra_opts: + extra_opts = options_dict_to_list(ctx.values.extra_opts) + else: + extra_opts = [] + + data = sub(*(['python3', ctx.values.script, + '--net', ctx.takes.net, + '--arch', arch_def, + '--blif', ctx.takes.eblif, + '--input', ctx.takes.io_place, + '--db_root', database, + '--part', ctx.values.part_name] + + extra_opts)) + + yield 'Saving place constraint data...' + with open(ctx.outputs.place_constraints, 'wb') as f: + f.write(data) + + def __init__(self, _): + self.name = 'place_constraints' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'net', + 'io_place' + ] + self.produces = [ 'place_constraints' ] + self.values = [ + 'device', + 'part_name', + 'script', + 'extra_opts?' + ] + +ModuleClass = PlaceConstraintsModule diff --git a/sfbuild/sf_common_modules/route.py b/sfbuild/sf_common_modules/route.py new file mode 100644 index 000000000..c9b440d2a --- /dev/null +++ b/sfbuild/sf_common_modules/route.py @@ -0,0 +1,57 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +import shutil +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +def route_place_file(eblif: str): + return file_noext(eblif) + '.route' + +class RouteModule(Module): + def map_io(self, ctx: ModuleContext): + return { + 'route': route_place_file(ctx.takes.eblif) + } + + def execute(self, ctx: ModuleContext): + build_dir = os.path.dirname(ctx.takes.eblif) + + vpr_options = [] + if ctx.values.vpr_options: + vpr_options = options_dict_to_list(ctx.values.vpr_options) + + + vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, + sdc_file=ctx.takes.sdc) + + yield 'Routing with VPR...' + vpr('route', vprargs, cwd=build_dir) + + if ctx.is_output_explicit('route'): + shutil.move(route_place_file(ctx.takes.eblif), ctx.outputs.route) + + yield 'Saving log...' + save_vpr_log('route.log', build_dir=build_dir) + + def __init__(self, _): + self.name = 'route' + self.no_of_phases = 2 + self.takes = [ + 'eblif', + 'place', + 'sdc?' + ] + self.produces = [ 'route' ] + self.values = [ + 'device', + 'vpr_options?' + ] + vpr_specific_values() + +ModuleClass = RouteModule diff --git a/sfbuild/sf_common_modules/synth.py b/sfbuild/sf_common_modules/synth.py new file mode 100755 index 000000000..8cac85214 --- /dev/null +++ b/sfbuild/sf_common_modules/synth.py @@ -0,0 +1,162 @@ +#!/usr/bin/python3 + +# Symbiflow Stage Module + +# ----------------------------------------------------------------------------- # + +import os +from sf_common import * +from sf_module import * + +# ----------------------------------------------------------------------------- # + +# Setup environmental variables for YOSYS TCL scripts +def yosys_setup_tcl_env(tcl_env_def): + env = {} + for key, value in tcl_env_def.items(): + if value is None: + continue + v = value + if type(value) is list: + v = ' '.join(value) + env[key] = v + return env + +def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None): + # Set up environment for TCL weirdness + optional = [] + if log: + optional += ['-l', log] + env = os.environ.copy() + env.update(tcl_env) + + tcl = f'tcl {tcl}' + + # Use append read_verilog commands to the scripts for more sophisticated + # input if arguments are specified. Omit direct input throught `yosys` command. + if read_verilog_args: + args_str = ' '.join(read_verilog_args) + for verilog in verilog_files: + tcl = f'read_verilog {args_str} {verilog}; {tcl}' + verilog_files = [] + + # Execute YOSYS command + return sub(*(['yosys', '-p', tcl] + optional + verilog_files), + env=env) + +def yosys_conv(tcl, tcl_env, synth_json): + # Set up environment for TCL weirdness + env = os.environ.copy() + env.update(tcl_env) + + # Execute YOSYS command + return sub('yosys', '-p', 'read_json ' + synth_json + '; tcl ' + tcl, + env=env) + +# ----------------------------------------------------------------------------- # + +class SynthModule(Module): + extra_products: 'list[str]' + + def map_io(self, ctx: ModuleContext): + mapping = {} + + top = ctx.values.top + if ctx.takes.build_dir: + top = os.path.join(ctx.takes.build_dir, top) + mapping['eblif'] = top + '.eblif' + mapping['fasm_extra'] = top + '_fasm_extra.fasm' + mapping['json'] = top + '.json' + mapping['synth_json'] = top + '_io.json' + + b_path = os.path.dirname(top) + + for extra in self.extra_products: + name, spec = decompose_depname(extra) + if spec == 'maybe': + raise ModuleRuntimeException( + f'Yosys synth extra products can\'t use \'maybe\ ' + f'(?) specifier. Product causing this error: `{extra}`.' + ) + elif spec == 'req': + mapping[name] = \ + os.path.join(b_path, + ctx.values.device + '_' + name + '.' + name) + + return mapping + + def execute(self, ctx: ModuleContext): + split_inouts = os.path.join(ctx.share, 'scripts/split_inouts.py') + synth_tcl = os.path.join(ctx.values.tcl_scripts, 'synth.tcl') + conv_tcl = os.path.join(ctx.values.tcl_scripts, 'conv.tcl') + + tcl_env = yosys_setup_tcl_env(ctx.values.yosys_tcl_env) \ + if ctx.values.yosys_tcl_env else {} + + if get_verbosity_level() >= 2: + yield f'Synthesizing sources: {ctx.takes.sources}...' + else: + yield f'Synthesizing sources...' + + yosys_synth(synth_tcl, tcl_env, ctx.takes.sources, + ctx.values.read_verilog_args, ctx.outputs.synth_log) + + yield f'Splitting in/outs...' + sub('python3', split_inouts, '-i', ctx.outputs.json, '-o', + ctx.outputs.synth_json) + + if not os.path.isfile(ctx.produces.fasm_extra): + with open(ctx.produces.fasm_extra, 'w') as f: + f.write('') + + yield f'Converting...' + yosys_conv(conv_tcl, tcl_env, ctx.outputs.synth_json) + + def __init__(self, params): + self.name = 'synthesize' + self.no_of_phases = 3 + self.takes = [ + 'sources', + 'build_dir?' + ] + # Extra takes for use with TCL scripts + extra_takes = params.get('takes') + if extra_takes: + self.takes += extra_takes + + self.produces = [ + 'eblif', + 'fasm_extra', + 'json', + 'synth_json', + 'synth_log!' + ] + # Extra products for use with TCL scripts + extra_products = params.get('produces') + if extra_products: + self.produces += extra_products + self.extra_products = extra_products + else: + self.extra_products = [] + + self.values = [ + 'top', + 'device', + 'tcl_scripts', + 'yosys_tcl_env?', + 'read_verilog_args?' + ] + self.prod_meta = { + 'eblif': 'Extended BLIF hierarchical sequential designs file\n' + 'generated by YOSYS', + 'json': 'JSON file containing a design generated by YOSYS', + 'synth_log': 'YOSYS synthesis log', + 'fasm_extra': 'Extra FASM generated during sythesis stage. Needed in ' + 'some designs.\nIn case it\'s not necessary, the file ' + 'will be empty.' + } + extra_meta = params.get('prod_meta') + if extra_meta: + self.prod_meta.update(extra_meta) + +ModuleClass = SynthModule diff --git a/sfbuild/sf_flow_config.py b/sfbuild/sf_flow_config.py new file mode 100644 index 000000000..30f9ba4e3 --- /dev/null +++ b/sfbuild/sf_flow_config.py @@ -0,0 +1,360 @@ +import os +import json + +from sf_common import file_noext, ResolutionEnv, deep +from sf_stage import Stage +from copy import copy + +_realpath_deep = deep(os.path.realpath) + +def open_flow_cfg(path: str) -> dict: + flow_cfg_json: str + with open(path, 'r') as flow_cfg_file: + flow_cfg_json = flow_cfg_file.read() + return json.loads(flow_cfg_json) + +def save_flow_cfg(flow: dict, path: str): + flow_cfg_json = json.dumps(flow, indent=4) + with open(path, 'w') as flow_cfg_file: + flow_cfg_file.write(flow_cfg_json) + +def _get_lazy_dict(parent: dict, name: str): + d = parent.get(name) + if d is None: + d = {} + parent[name] = d + return d + +def _get_ov_dict(dname: str, flow: dict, + platform: 'str | None' = None, stage: 'str | None' = None): + d: dict + if platform: + platform_dict: dict = flow[platform] + if stage: + stage_dict: dict = _get_lazy_dict(platform_dict, stage) + d = _get_lazy_dict(stage_dict, dname) + else: + d = _get_lazy_dict(platform_dict, dname) + else: + d = _get_lazy_dict(flow, dname) + + return d + +def _get_dep_dict(flow: dict, + platform: 'str | None' = None, stage: 'str | None' = None): + return _get_ov_dict('dependencies', flow, platform, stage) + +def _get_vals_dict(flow: dict, + platform: 'str | None' = None, stage: 'str | None' = None): + return _get_ov_dict('values', flow, platform, stage) + +def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str, + values: list, platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + d = ov_dict_getter(flow_cfg, platform, stage) + + deps = d.get(name) + if type(deps) is list: + deps += values + elif deps is None: + d[name] = values + else: + print(failstr_constr(name)) + return False + + return True + +def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr, + flow: dict, name: str, vals: list, + platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + values_to_remove = set(vals) + d = ov_dict_getter(flow, platform, stage) + + vallist: list = d.get(name) + if type(vallist) is list: + d[name] = [val for val in vallist if val not in values_to_remove] + elif type(vallist) is None: + print(notset_str_constr(name)) + return False + else: + print(notlist_str_constr(name)) + return False + + return True + + +def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr, + flow: dict, name: str, idcs: list, + platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + idcs.sort(reverse=True) + + if len(idcs) == 0: + print(f'Index list is emtpy!') + return False + + d = ov_dict_getter(flow, platform, stage) + vallist: list = d.get(name) + if type(vallist) is list: + if idcs[0] >= len(vallist) or idcs[len(idcs) - 1] < 0: + print(f'Index out of range (max: {len(vallist)}!') + return False + + for idx in idcs: + vallist.pop(idx) + elif vallist is None: + print(notset_str_constr(name)) + return False + else: + print(notlist_str_constr(name)) + return False + + return True + +def _get_ovs_raw(dict_name: str, flow_cfg, + platform: 'str | None', stage: 'str | None'): + vals = flow_cfg.get(dict_name) + if vals is None: + vals = {} + if platform is not None: + platform_vals= flow_cfg[platform].get(dict_name) + if platform_vals is not None: + vals.update(platform_vals) + if stage is not None: + stage_deps = flow_cfg[platform][stage].get(dict_name) + if stage_deps is not None: + vals.update(stage_deps) + + return vals + +def _remove_dependencies_by_values(flow: dict, name: str, deps: list, + platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + def notset_str_constr(dname): + return f'Dependency `{dname}` is not set. Nothing to remove.' + def notlist_str_constr(dname): + return f'Dependency `{dname}` is not a list! Use unsetd instead.' + return _rm_ov_by_values(_get_dep_dict, notset_str_constr, notlist_str_constr, + flow, name, deps, platform, stage) + +def _remove_dependencies_by_idx(flow: dict, name: str, idcs: list, + platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + def notset_str_constr(dname): + return f'Dependency `{dname}` is not set. Nothing to remove.' + def notlist_str_constr(dname): + return f'Dependency `{dname}` is not a list! Use unsetd instead.' + return _rm_ov_by_idx(_get_dep_dict, notset_str_constr, notlist_str_constr, + flow, name, idcs, platform, stage) + +def _remove_values_by_values(flow: dict, name: str, deps: list, + platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + def notset_str_constr(vname): + return f'Value `{vname}` is not set. Nothing to remove.' + def notlist_str_constr(vname): + return f'Value `{vname}` is not a list! Use unsetv instead.' + return _rm_ov_by_values(_get_vals_dict, notset_str_constr, notlist_str_constr, + flow, name, deps, platform, stage) + +def _remove_values_by_idx(flow: dict, name: str, idcs: list, + platform: 'str | None' = None, + stage: 'str | None' = None) -> bool: + def notset_str_constr(dname): + return f'Dependency `{dname}` is not set. Nothing to remove.' + def notlist_str_constr(dname): + return f'Dependency `{dname}` is not a list! Use unsetv instead.' + return _rm_ov_by_idx(_get_vals_dict, notset_str_constr, notlist_str_constr, + flow, name, idcs, platform, stage) + +def unset_dependency(flow: dict, name: str, + platform: 'str | None', stage: 'str | None'): + d = _get_dep_dict(flow, platform, stage) + if d.get(name) is None: + print(f'Dependency `{name}` is not set!') + return False + d.pop(name) + return True + +def verify_platform_name(platform: str, mypath: str): + for plat_def_filename in os.listdir(os.path.join(mypath, 'platforms')): + platform_name = file_noext(plat_def_filename) + if platform == platform_name: + return True + return False + +def verify_stage(platform: str, stage: str, mypath: str): + # TODO: Verify stage + return True + +def _is_kword(w: str): + return \ + (w == 'dependencies') | (w == 'values') | \ + (w == 'default_platform') | (w == 'default_target') + +class FlowDefinition: + # stage name -> module path mapping + stages: 'dict[str, Stage]' + r_env: ResolutionEnv + + def __init__(self, flow_def: dict, r_env: ResolutionEnv): + self.flow_def = flow_def + self.r_env = r_env + self.stages = {} + + global_vals = flow_def.get('values') + if global_vals is not None: + self.r_env.add_values(global_vals) + + stages_d = flow_def['stages'] + modopts_d = flow_def.get('stage_options') + if modopts_d is None: + modopts_d = {} + + for stage_name, modstr in stages_d.items(): + opts = modopts_d.get(stage_name) + self.stages[stage_name] = Stage(stage_name, modstr, opts) + + def stage_names(self): + return self.stages.keys() + + def get_stage_r_env(self, stage_name: 'str') -> ResolutionEnv: + stage = self.stages[stage_name] + r_env = copy(self.r_env) + r_env.add_values(stage.value_overrides) + return r_env + +class ProjectFlowConfig: + flow_cfg: dict + # r_env: ResolutionEnv + path: str + # platform_r_envs: 'dict[str, ResolutionEnv]' + + def __init__(self, path: str): + self.flow_cfg = {} + self.path = copy(path) + # self.r_env = ResolutionEnv({}) + # self.platform_r_envs = {} + + def platforms(self): + for platform, _ in self.flow_cfg.items(): + if not _is_kword(platform): + yield platform + + def add_platform(self, device: str) -> bool: + d = self.flow_cfg.get(device) + if d: + print(f'Device {device} already exists') + return False + + self.flow_cfg[device] = {} + return True + + def set_default_platform(self, device: str) -> bool: + self.flow_cfg['default_platform'] = device + return True + + def set_default_target(self, platform: str, target: str) -> bool: + self.flow_cfg[platform]['default_target'] = target + return True + + def get_default_platform(self) -> 'str | None': + return self.flow_cfg.get('default_platform') + + def get_default_target(self, platform: str) -> 'str | None': + return self.flow_cfg[platform].get('default_target') + + def get_stage_r_env(self, platform: str, stage: str) -> ResolutionEnv: + r_env = self._cache_platform_r_env(platform) + + stage_cfg = self.flow_cfg[platform][stage] + stage_values = stage_cfg.get('values') + if stage_values: + r_env.add_values(stage_values) + + return r_env + + """ Get dependencies without value resolution applied """ + def get_dependencies_raw(self, platform: 'str | None' = None): + return _get_ovs_raw('dependencies', self.flow_cfg, platform, None) + + """ Get values without value resolution applied """ + def get_values_raw(self, platform: 'str | None' = None, + stage: 'str | None' = None): + return _get_ovs_raw('values', self.flow_cfg, platform, stage) + + def get_stage_value_overrides(self, platform: str, stage: str): + stage_cfg = self.flow_cfg[platform].get(stage) + if stage_cfg is None: + return {} + + stage_vals_ovds = stage_cfg.get('values') + if stage_vals_ovds is None: + return {} + return stage_vals_ovds + + def get_dependency_platform_overrides(self, platform: str): + platform_ovds = self.flow_cfg[platform].get('dependencies') + if platform_ovds is None: + return {} + return platform_ovds + + +class FlowConfig: + platform: str + r_env: ResolutionEnv + dependencies_explicit: 'dict[str, ]' + stages: 'dict[str, Stage]' + + def __init__(self, project_config: ProjectFlowConfig, + platform_def: FlowDefinition, platform: str): + self.r_env = platform_def.r_env + platform_vals = project_config.get_values_raw(platform) + self.r_env.add_values(platform_vals) + self.stages = platform_def.stages + self.platform = platform + + raw_project_deps = project_config.get_dependencies_raw(platform) + + self.dependencies_explicit = \ + _realpath_deep(self.r_env.resolve(raw_project_deps)) + + for stage_name, stage in platform_def.stages.items(): + project_val_ovds = \ + project_config.get_stage_value_overrides(platform, stage_name) + stage.value_overrides.update(project_val_ovds) + + def get_dependency_overrides(self): + return self.dependencies_explicit + + def get_r_env(self, stage_name: str) -> ResolutionEnv: + stage = self.stages[stage_name] + r_env = copy(self.r_env) + r_env.add_values(stage.value_overrides) + + return r_env + + def get_stage(self, stage_name: str) -> Stage: + return self.stages[stage_name] + +class FlowConfigException(Exception): + path: str + message: str + + def __init__(self, path: str, message: str): + self.path = path + self.message = message + + def __str__(self) -> str: + return f'Error in config `{self.path}: {self.message}' + +def open_project_flow_cfg(path: str) -> ProjectFlowConfig: + cfg = ProjectFlowConfig(path) + + flow_cfg_json: str + with open(path, 'r') as flow_cfg_file: + flow_cfg_json = flow_cfg_file.read() + cfg.flow_cfg = json.loads(flow_cfg_json) + + return cfg \ No newline at end of file diff --git a/sfbuild/sf_module/__init__.py b/sfbuild/sf_module/__init__.py new file mode 100644 index 000000000..6bb4efd50 --- /dev/null +++ b/sfbuild/sf_module/__init__.py @@ -0,0 +1,147 @@ +# Here are the things necessary to write a symbiflow Module + +import abc +from types import SimpleNamespace +from sf_common import * +from colorama import Fore, Style + +class Module: + """ + A `Module` is a wrapper for whatever tool is used in a flow. + Modules can request dependencies, values and are guranteed to have all the + required ones present when entering `exec` mode. + They also have to specify what dependencies they produce and create the files + for these dependencies. + """ + + no_of_phases: int + name: str + takes: 'list[str]' + produces: 'list[str]' + values: 'list[str]' + prod_meta: 'dict[str, str]' + + @abc.abstractmethod + def execute(self, ctx): + """ + Executes module. Use yield to print a message informing about current + execution phase. + `ctx` is `ModuleContext`. + """ + pass + + @abc.abstractmethod + def map_io(self, ctx) -> 'dict[str, ]': + """ + Returns paths for outputs derived from given inputs. + `ctx` is `ModuleContext`. + """ + pass + + def __init__(self, params: 'dict[str, ]'): + self.no_of_phases = 0 + self.current_phase = 0 + self.name = '' + self.prod_meta = {} + +class ModuleContext: + """ + A class for object holding mappings for dependencies and values as well as + other information needed during modules execution. + """ + + share: str # Absolute path to Symbiflow's share directory + bin: str # Absolute path to Symbiflow's bin directory + takes: SimpleNamespace # Maps symbolic dependency names to relative + # paths. + produces: SimpleNamespace # Contains mappings for explicitely specified + # dependencies. Useful mostly for checking for + # on-demand optional outputs (such as logs) + # with `is_output_explicit` method. + outputs: SimpleNamespace # Contains mappings for all available outputs. + values: SimpleNamespace # Contains all available requested values. + r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings + # for current scope. + module_name: str # Name of the module. + + def is_output_explicit(self, name: str): + """ True if user has explicitely specified output's path. """ + o = getattr(self.produces, name) + return o is not None + + def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'): + """ + Add attribute for a dependency or panic if a required dependency has not + been given to the module on its input. + """ + + for name in deps: + name, spec = decompose_depname(name) + value = deps_cfg.get(name) + if value is None and spec == 'req': + fatal(-1, f'Dependency `{name}` is required by module ' + f'`{self.module_name}` but wasn\'t provided') + setattr(obj, name, self.r_env.resolve(value)) + + # `config` should be a dictionary given as modules input. + def __init__(self, module: Module, config: 'dict[str, ]', + r_env: ResolutionEnv, share: str, bin: str): + self.module_name = module.name + self.takes = SimpleNamespace() + self.produces = SimpleNamespace() + self.values = SimpleNamespace() + self.outputs = SimpleNamespace() + self.r_env = r_env + self.share = share + self.bin = bin + + self._getreqmaybe(self.takes, module.takes, config['takes']) + self._getreqmaybe(self.values, module.values, config['values']) + + produces_resolved = self.r_env.resolve(config['produces']) + for name, value in produces_resolved.items(): + setattr(self.produces, name, value) + + outputs = module.map_io(self) + outputs.update(produces_resolved) + + self._getreqmaybe(self.outputs, module.produces, outputs) + + def shallow_copy(self): + cls = type(self) + mycopy = cls.__new__(cls) + + mycopy.module_name = self.module_name + mycopy.takes = self.takes + mycopy.produces = self.produces + mycopy.values = self.values + mycopy.outputs = self.outputs + mycopy.r_env = self.r_env + mycopy.share = self.share + mycopy.bin = self.bin + + return mycopy + +class ModuleRuntimeException(Exception): + info: str + + def __init__(self, info: str): + self.info = info + + def __str___(self): + return self.info + +def get_mod_metadata(module: Module): + """ Get descriptions for produced dependencies. """ + + meta = {} + has_meta = hasattr(module, 'prod_meta') + for prod in module.produces: + prod = prod.replace('?', '') + prod = prod.replace('!', '') + if not has_meta: + meta[prod] = '' + continue + prod_meta = module.prod_meta.get(prod) + meta[prod] = prod_meta if prod_meta else '' + return meta diff --git a/sfbuild/sf_module_inspector.py b/sfbuild/sf_module_inspector.py new file mode 100644 index 000000000..bc725e12f --- /dev/null +++ b/sfbuild/sf_module_inspector.py @@ -0,0 +1,38 @@ +from sf_module import Module +from sf_common import decompose_depname +from colorama import Style + +def _get_if_qualifier(deplist: 'list[str]', qualifier: str): + for dep_name in deplist: + name, q = decompose_depname(dep_name) + if q == qualifier: + yield f'● {Style.BRIGHT}{name}{Style.RESET_ALL}' + +def _list_if_qualifier(deplist: 'list[str]', qualifier: str, indent: int = 4): + indent_str = ''.join([' ' for _ in range(0, indent)]) + r = '' + + for line in _get_if_qualifier(deplist, qualifier): + r += indent_str + line + '\n' + + return r + +def get_module_info(module: Module) -> str: + r= '' + r += f'Module `{Style.BRIGHT}{module.name}{Style.RESET_ALL}`:\n' + r += 'Inputs:\n Required:\n Dependencies\n' + r += _list_if_qualifier(module.takes, 'req', indent=6) + r += ' Values:\n' + r += _list_if_qualifier(module.values, 'req', indent=6) + r += ' Optional:\n Dependencies:\n' + r += _list_if_qualifier(module.takes, 'maybe', indent=6) + r += ' Values:\n' + r += _list_if_qualifier(module.values, 'maybe', indent=6) + r += 'Outputs:\n Guaranteed:\n' + r += _list_if_qualifier(module.produces, 'req', indent=4) + r += ' On-demand:\n' + r += _list_if_qualifier(module.produces, 'demand', indent=4) + r += ' Not guaranteed:\n' + r += _list_if_qualifier(module.produces, 'maybe', indent= 4) + + return r diff --git a/sfbuild/sf_module_runner/__init__.py b/sfbuild/sf_module_runner/__init__.py new file mode 100644 index 000000000..23be3c492 --- /dev/null +++ b/sfbuild/sf_module_runner/__init__.py @@ -0,0 +1,110 @@ +""" Dynamically import and run sfbuild modules """ + +from contextlib import contextmanager +import importlib +import os +from sf_module import Module, ModuleContext, get_mod_metadata +from sf_common import ResolutionEnv, deep, sfprint +from colorama import Fore, Style + +_realpath_deep = deep(os.path.realpath) + +@contextmanager +def _add_to_sys_path(path: str): + import sys + old_syspath = sys.path + sys.path = [path] + sys.path + try: + yield + finally: + sys.path = old_syspath + +def import_module_from_path(path: str): + absolute_path = os.path.realpath(path) + with _add_to_sys_path(path): + spec = importlib.util.spec_from_file_location(absolute_path, absolute_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module + +# Once imported a module will be added to that dict to avaid re-importing it +preloaded_modules = {} + +def get_module(path: str): + global preloaded_modules + + cached = preloaded_modules.get(path) + if cached: + return cached.ModuleClass + + mod = import_module_from_path(path) + preloaded_modules[path] = mod + + # All sfbuild modules should expose a `ModuleClass` type/alias which is a + # class implementing a Module interface + return mod.ModuleClass + +class ModRunCtx: + share: str + bin: str + config: 'dict[str, ]' + + def __init__(self, share: str, bin: str, config: 'dict[str, ]'): + self.share = share + self.bin = bin + self.config = config + + def make_r_env(self): + return ResolutionEnv(self.config['values']) + +class ModuleFailException(Exception): + module: str + mode: str + e: Exception + + def __init__(self, module: str, mode: str, e: Exception): + self.module = module + self.mode = mode + self.e = e + + def __str__(self) -> str: + return f'ModuleFailException:\n Module `{self.module}` failed ' \ + f'MODE: \'{self.mode}\'\n\nException `{type(self.e)}`: {self.e}' + +def module_io(module: Module): + return { + 'name': module.name, + 'takes': module.takes, + 'produces': module.produces, + 'meta': get_mod_metadata(module) + } + +def module_map(module: Module, ctx: ModRunCtx): + try: + mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, + ctx.bin) + except Exception as e: + raise ModuleFailException(module.name, 'map', e) + + return _realpath_deep(vars(mod_ctx.outputs)) + +def module_exec(module: Module, ctx: ModRunCtx): + try: + mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, + ctx.bin) + except Exception as e: + raise ModuleFailException(module.name, 'exec', e) + + sfprint(1, 'Executing module ' + f'`{Style.BRIGHT + module.name + Style.RESET_ALL}`:') + current_phase = 1 + try: + for phase_msg in module.execute(mod_ctx): + sfprint(1, f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}]' + f'{Style.RESET_ALL}: {phase_msg}') + current_phase += 1 + except Exception as e: + raise ModuleFailException(module.name, 'exec', e) + + sfprint(1, f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` ' + 'has finished its work!') \ No newline at end of file diff --git a/sfbuild/sf_stage.py b/sfbuild/sf_stage.py new file mode 100644 index 000000000..ec1baca90 --- /dev/null +++ b/sfbuild/sf_stage.py @@ -0,0 +1,78 @@ +from sf_common import decompose_depname, resolve_modstr +from sf_module import Module +from sf_module_runner import get_module, module_io + +class StageIO: + """ + Stage dependency input/output. + TODO: Solve the inconsistecy between usage of that and usage of + `decompose_depname` with an unprocessed string. + """ + + name: str # A symbolic name given to the dependency + spec: str + + def __init__(self, encoded_name: str): + """ + Encoded name feauters special characters that imply certain qualifiers. + Any name that ends with '?' is treated as with 'maybe' qualifier. + The '?' Symbol is then dropped from the dependency name. + """ + + self.name, self.spec = decompose_depname(encoded_name) + + def __repr__(self) -> str: + return 'StageIO { name: \'' + self.name + '\', spec: ' + \ + self.spec + '}' + +class Stage: + """ + Represents a single stage in a flow. I.e an instance of a module with a + local set of values. + """ + + name: str # Name of the stage (module's name) + takes: 'list[StageIO]' # List of symbolic names of dependencies used by + # the stage + produces: 'list[StageIO]' # List of symbolic names of dependencies + # produced by the stage + value_overrides: 'dict[str, ]' # Stage-specific values + module: Module + meta: 'dict[str, str]' # Stage's metadata extracted from module's + # output. + + def __init__(self, name: str, modstr: str, mod_opts: 'dict[str, ] | None'): + if mod_opts is None: + mod_opts = {} + + module_path = resolve_modstr(modstr) + ModuleClass = get_module(module_path) + self.module = ModuleClass(mod_opts.get('params')) + + values = mod_opts.get('values') + if values is not None: + self.value_overrides = values + else: + self.value_overrides = {} + + mod_io = module_io(self.module) + self.name = name + + self.takes = [] + for input in mod_io['takes']: + io = StageIO(input) + self.takes.append(io) + + self.produces = [] + for input in mod_io['produces']: + io = StageIO(input) + self.produces.append(io) + + self.meta = mod_io['meta'] + + def __repr__(self) -> str: + return 'Stage \'' + self.name + '\' {' \ + f' value_overrides: {self.value_ovds},' \ + f' args: {self.args},' \ + f' takes: {self.takes},' \ + f' produces: {self.produces} ' + '}' diff --git a/sfbuild/sf_ugly.py b/sfbuild/sf_ugly.py new file mode 100644 index 000000000..8dd38a54b --- /dev/null +++ b/sfbuild/sf_ugly.py @@ -0,0 +1,19 @@ +""" The "ugly" module is dedicated for some *ugly* workarounds """ + +import os +import sf_common + +def noisy_warnings(): + """ Emit some noisy warnings """ + + os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log' + return 'noisy_warnings.log' + +def generate_values(): + """ Generate initial values, available in configs """ + + return{ + 'prjxray_db': sf_common.sub('prjxray-config').decode().replace('\n', ''), + 'python3': sf_common.sub('which', 'python3').decode().replace('\n', ''), + 'noisyWarnings': noisy_warnings() + } diff --git a/sfbuild/sfbuild b/sfbuild/sfbuild new file mode 100644 index 000000000..1a2d46a4b --- /dev/null +++ b/sfbuild/sfbuild @@ -0,0 +1,5 @@ +#!/bin/sh + +MYDIR=`dirname $0` + +python3 ${MYDIR}/sfbuild.py $@ \ No newline at end of file diff --git a/sfbuild/sfbuild.py b/sfbuild/sfbuild.py new file mode 100755 index 000000000..82768068c --- /dev/null +++ b/sfbuild/sfbuild.py @@ -0,0 +1,641 @@ +#!/usr/bin/env python3 + +""" +sfbuild - Symbiflow Build System + +This tool allows for building FPGA targets (such as bitstreams) for any supported +platform with just one simple command and a project file. + +The idea is that sfbuild wraps all the tools needed by different platforms in +"modules", which define inputs/outputs and various parameters. This allows +sfbuild to resolve dependencies for any target provided that a "flow definition" +file exists for such target. The flow defeinition file list modules available for +that platform and may tweak some settings of those modules. + +A basic example of using sfbuild: +$ sfbuild build --platform arty_35 -t bitstream + +This will make sfbuild attempt to create a bitstream for arty_35 platform. +flow.json is a flow configuration file, which should be created for a project +that uses sfbuild. Iontains project-specific definitions needed within the flow, +such as list of source code files. +""" + +from argparse import Namespace +import os +import json +from typing import Iterable +from colorama import Fore, Style +from sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \ + sfprint +from sf_module import * +from sf_cache import SymbiCache +import sf_ugly +from sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \ + open_project_flow_cfg, verify_platform_name, \ + verify_stage +from sf_module_runner import * +from sf_module_inspector import get_module_info +from sf_stage import Stage +from sf_argparse import setup_argparser, get_cli_flow_config + +SYMBICACHEPATH = '.symbicache' + +mypath = os.path.realpath(os.sys.argv[0]) +mypath = os.path.dirname(mypath) +binpath = os.path.realpath(os.path.join(mypath, '..')) + +share_dir_path = os.path.realpath(os.path.join(mypath, '../../share/symbiflow')) + +class DependencyNotProducedException(Exception): + dep_name: str + provider: str + + def __init__(self, dep_name: str, provider: str): + self.dep_name = dep_name + self.provider = provider + + def __str__(self) -> str: + return f'Stage `{self.provider}` did not produce promised ' \ + f'dependency `{self.dep_name}`' + +def dep_value_str(dep: str): + return ':' + dep + +def platform_stages(platform_flow, r_env): + """ Iterates over all stages available in a given flow. """ + + stage_options = platform_flow.get('stage_options') + for stage_name, modulestr in platform_flow['stages'].items(): + mod_opts = stage_options.get(stage_name) if stage_options else None + yield Stage(stage_name, modulestr, mod_opts, r_env) + +def req_exists(r): + """ Checks whether a dependency exists on a drive. """ + + if type(r) is str: + if not os.path.isfile(r) and not os.path.islink(r) \ + and not os.path.isdir(r): + return False + elif type(r) is list: + return not (False in map(req_exists, r)) + else: + raise Exception(f'Requirements can be currently checked only for single ' + f'paths, or path lists (reason: {r})') + return True + +def map_outputs_to_stages(stages: 'list[Stage]'): + """ + Associates a stage with every possible output. + This is commonly refferef to as `os_map` (output-stage-map) through the code. + """ + + os_map: 'dict[str, Stage]' = {} # Output-Stage map + for stage in stages: + for output in stage.produces: + if not os_map.get(output.name): + os_map[output.name] = stage + elif os_map[output.name] != stage: + raise Exception(f'Dependency `{output.name}` is generated by ' + f'stage `{os_map[output.name].name}` and ' + f'`{stage.name}`. Dependencies can have only one ' + 'provider at most.') + return os_map + +def filter_existing_deps(deps: 'dict[str, ]', symbicache): + return [(n, p) for n, p in deps.items() \ + if req_exists(p)] # and not dep_differ(p, symbicache)] + +def get_stage_values_override(og_values: dict, stage: Stage): + values = og_values.copy() + values.update(stage.value_ovds) + return values + +def prepare_stage_io_input(stage: Stage): + return { 'params': stage.params } if stage.params is not None else {} + +def prepare_stage_input(stage: Stage, platform_name: str, values: dict, + dep_paths: 'dict[str, ]', config_paths: 'dict[str, ]'): + takes = {} + for take in stage.takes: + paths = dep_paths.get(take.name) + if paths: # Some takes may be not required + takes[take.name] = paths + + produces = {} + for prod in stage.produces: + if dep_paths.get(prod.name): + produces[prod.name] = dep_paths[prod.name] + elif config_paths.get(prod.name): + produces[prod.name] = config_paths[prod.name] + + stage_mod_cfg = { + 'takes': takes, + 'produces': produces, + 'values': values, + 'platform': platform_name, + } + + return stage_mod_cfg + +def update_dep_statuses(paths, consumer: str, symbicache: SymbiCache): + if type(paths) is str: + return symbicache.update(paths, consumer) + elif type(paths) is list: + for p in paths: + return update_dep_statuses(p, consumer, symbicache) + elif type(paths) is dict: + for _, p in paths.items(): + return update_dep_statuses(p, consumer, symbicache) + fatal(-1, 'WRONG PATHS TYPE') + +def dep_differ(paths, consumer: str, symbicache: SymbiCache): + """ + Check if a dependency differs from its last version, lack of dependency is + treated as "differs" + """ + + if type(paths) is str: + s = symbicache.get_status(paths, consumer) + if s == 'untracked': + symbicache.update(paths, consumer) + return symbicache.get_status(paths, consumer) != 'same' + elif type(paths) is list: + return True in [dep_differ(p, consumer, symbicache) for p in paths] + elif type(paths) is dict: + return True in [dep_differ(p, consumer, symbicache) \ + for _, p in paths.items()] + return False +def dep_will_differ(target: str, paths, consumer: str, + os_map: 'dict[str, Stage]', run_stages: 'set[str]', + symbicache: SymbiCache): + """ + Check if a dependency or any of the dependencies it depends on differ from + their last versions. + """ + + provider = os_map.get(target) + if provider: + return (provider.name in run_stages) or \ + dep_differ(paths, consumer, symbicache) + return dep_differ(paths, consumer, symbicache) + +def _print_unreachable_stage_message(provider: Stage, take: str): + sfprint(0, ' Stage ' + f'`{Style.BRIGHT + provider.name + Style.RESET_ALL}` is ' + 'unreachable due to unmet dependency ' + f'`{Style.BRIGHT + take.name + Style.RESET_ALL}`') + +def config_mod_runctx(stage: Stage, platform_name: str, values: 'dict[str, ]', + dep_paths: 'dict[str, str | list[str]]', + config_paths: 'dict[str, str | list[str]]'): + config = prepare_stage_input(stage, platform_name, values, + dep_paths, config_paths) + return ModRunCtx(share_dir_path, binpath, config) + +class Flow: + """ Describes a complete, configured flow, ready for execution. """ + + # Dependendecy to build + target: str + # Values in global scope + cfg: FlowConfig + # dependency-producer map + os_map: 'dict[str, Stage]' + # Paths resolved for dependencies + dep_paths: 'dict[str, str | list[str]]' + # Explicit configs for dependency paths + # config_paths: 'dict[str, str | list[str]]' + # Stages that need to be run + run_stages: 'set[str]' + # Number of stages that relied on outdated version of a (checked) dependency + deps_rebuilds: 'dict[str, int]' + symbicache: 'SymbiCache | None' + flow_cfg: FlowConfig + + def __init__(self, target: str, cfg: FlowConfig, + symbicache: 'SymbiCache | None'): + self.target = target + self.os_map = map_outputs_to_stages(cfg.stages.values()) + + explicit_deps = cfg.get_dependency_overrides() + # print(explicit_deps) + + self.dep_paths = dict(filter_existing_deps(explicit_deps, symbicache)) + self.run_stages = set() + self.symbicache = symbicache + self.cfg = cfg + self.deps_rebuilds = {} + + self._resolve_dependencies(self.target, set()) + + def _dep_will_differ(self, dep: str, paths, consumer: str): + if not self.symbicache: # Handle --nocache mode + return True + return dep_will_differ(dep, paths, consumer, + self.os_map, self.run_stages, + self.symbicache) + + def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): + # Initialize the dependency status if necessary + if self.deps_rebuilds.get(dep) is None: + self.deps_rebuilds[dep] = 0 + # Check if an explicit dependency is already resolved + paths = self.dep_paths.get(dep) + if paths and not self.os_map.get(dep): + return + # Check if a stage can provide the required dependency + provider = self.os_map.get(dep) + if not provider or provider.name in stages_checked: + return + + # TODO: Check if the dependency is "on-demand" and force it in provider's + # config if it is. + + for take in provider.takes: + self._resolve_dependencies(take.name, stages_checked) + # If any of the required dependencies is unavailable, then the + # provider stage cannot be run + take_paths = self.dep_paths.get(take.name) + # Add input path to values (dirty hack) + provider.value_overrides[dep_value_str(take.name)] = take_paths + + if not take_paths and take.spec == 'req': + _print_unreachable_stage_message(provider, take) + return + + if self._dep_will_differ(take.name, take_paths, provider.name): + sfprint(2, f'{take.name} is causing rebuild for {provider.name}') + self.run_stages.add(provider.name) + self.deps_rebuilds[take.name] += 1 + + stage_values = self.cfg.get_r_env(provider.name).values + modrunctx = config_mod_runctx(provider, self.cfg.platform, + stage_values, self.dep_paths, + self.cfg.get_dependency_overrides()) + + outputs = module_map(provider.module, modrunctx) + + stages_checked.add(provider.name) + self.dep_paths.update(outputs) + + for _, out_paths in outputs.items(): + if (out_paths is not None) and not (req_exists(out_paths)): + self.run_stages.add(provider.name) + + # Verify module's outputs and add paths as values. + outs = outputs.keys() + # print(outs) + for o in provider.produces: + if o.name not in outs: + if o.spec == 'req' or (o.spec == 'demand' and \ + o.name in self.cfg.get_dependency_overrides().keys()): + fatal(-1, f'Module {provider.name} did not produce a mapping ' + f'for a required output `{o.name}`') + else: + # Remove an on-demand/optional output that is not produced + # from os_map. + self.os_map.pop(o.name) + # Add a value for the output (dirty ack yet again) + o_path = outputs.get(o.name) + + if o_path is not None: + provider.value_overrides[dep_value_str(o.name)] = \ + outputs.get(o.name) + + + def print_resolved_dependencies(self, verbosity: int): + deps = list(self.deps_rebuilds.keys()) + deps.sort() + + for dep in deps: + status = Fore.RED + '[X]' + Fore.RESET + source = Fore.YELLOW + 'MISSING' + Fore.RESET + paths = self.dep_paths.get(dep) + + if paths: + exists = req_exists(paths) + provider = self.os_map.get(dep) + if provider and provider.name in self.run_stages: + if exists: + status = Fore.YELLOW + '[R]' + Fore.RESET + else: + status = Fore.YELLOW + '[S]' + Fore.RESET + source = f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} ' \ + f'-> {paths}' + elif exists: + if self.deps_rebuilds[dep] > 0: + status = Fore.GREEN + '[N]' + Fore.RESET + else: + status = Fore.GREEN + '[O]' + Fore.RESET + source = paths + elif self.os_map.get(dep): + status = Fore.RED + '[U]' + Fore.RESET + source = \ + f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} -> ???' + + sfprint(verbosity, f' {Style.BRIGHT + status} ' + f'{dep + Style.RESET_ALL}: {source}') + + def _build_dep(self, dep): + paths = self.dep_paths.get(dep) + provider = self.os_map.get(dep) + run = (provider.name in self.run_stages) if provider else False + if not paths: + sfprint(2, f'Dependency {dep} is unresolved.') + return False + + if req_exists(paths) and not run: + return True + else: + assert(provider) + + any_dep_differ = False if self.symbicache else True + for p_dep in provider.takes: + if not self._build_dep(p_dep.name): + assert (p_dep.spec != 'req') + continue + + if self.symbicache: + any_dep_differ |= \ + update_dep_statuses(self.dep_paths[p_dep.name], + provider.name, self.symbicache) + + # If dependencies remained the same, consider the dep as up-to date + # For example, when changing a comment in Verilog source code, + # the initial dependency resolution will report a need for complete + # rebuild, however, after the synthesis stage, the generated eblif + # will reamin the same, thus making it unnecessary to continue the + # rebuild process. + if (not any_dep_differ) and req_exists(paths): + sfprint(2, f'Skipping rebuild of `' + f'{Style.BRIGHT + dep + Style.RESET_ALL}` because all ' + f'of it\'s dependencies remained unchanged') + return True + + stage_values = self.cfg.get_r_env(provider.name).values + modrunctx = config_mod_runctx(provider, self.cfg.platform, + stage_values, self.dep_paths, + self.cfg.get_dependency_overrides()) + module_exec(provider.module, modrunctx) + + self.run_stages.discard(provider.name) + + if not req_exists(paths): + raise DependencyNotProducedException(dep, provider.name) + + return True + + def execute(self): + self._build_dep(self.target) + if self.symbicache: + update_dep_statuses(self.dep_paths[self.target], '__target', + self.symbicache) + sfprint(0, f'Target `{Style.BRIGHT + self.target + Style.RESET_ALL}` ' + f'-> {self.dep_paths[self.target]}') + +def display_dep_info(stages: 'Iterable[Stage]'): + sfprint(0, 'Platform dependencies/targets:') + longest_out_name_len = 0 + for stage in stages: + for out in stage.produces: + l = len(out.name) + if l > longest_out_name_len: + longest_out_name_len = l + + desc_indent = longest_out_name_len + 7 + nl_indentstr = '\n' + for _ in range(0, desc_indent): + nl_indentstr += ' ' + + for stage in stages: + for out in stage.produces: + pname = Style.BRIGHT + out.name + Style.RESET_ALL + indent = '' + for _ in range(0, desc_indent - len(pname) + 3): + indent += ' ' + specstr = '???' + if out.spec == 'req': + specstr = f'{Fore.BLUE}guaranteed{Fore.RESET}' + elif out.spec == 'maybe': + specstr = f'{Fore.YELLOW}not guaranteed{Fore.RESET}' + elif out.spec == 'demand': + specstr = f'{Fore.RED}on-demand{Fore.RESET}' + pgen = f'{Style.DIM}stage: `{stage.name}`, '\ + f'spec: {specstr}{Style.RESET_ALL}' + pdesc = stage.meta[out.name].replace('\n', nl_indentstr) + sfprint(0, f' {Style.BRIGHT + out.name + Style.RESET_ALL}:' + f'{indent}{pdesc}{nl_indentstr}{pgen}') + +def display_stage_info(stage: Stage): + if stage is None: + sfprint(0, f'Stage does not exist') + sfbuild_fail() + return + + sfprint(0, f'Stage `{Style.BRIGHT}{stage.name}{Style.RESET_ALL}`:') + sfprint(0, f' Module: `{Style.BRIGHT}{stage.module.name}{Style.RESET_ALL}`') + sfprint(0, f' Module info:') + + mod_info = get_module_info(stage.module) + mod_info = '\n '.join(mod_info.split('\n')) + + sfprint(0, f' {mod_info}') + +sfbuild_done_str = Style.BRIGHT + Fore.GREEN + 'DONE' +sfbuild_silent = 0 + +def sfbuild_fail(): + global sfbuild_done_str + sfbuild_done_str = Style.BRIGHT + Fore.RED + 'FAILED' + +def sfbuild_done(): + sfprint(1, f'sfbuild: {sfbuild_done_str}' + f'{Style.RESET_ALL + Fore.RESET}') + exit(0) + +def setup_resolution_env(): + """ Sets up a ResolutionEnv with sfbuild's default built-ins. """ + + r_env = ResolutionEnv({ + 'shareDir': share_dir_path, + 'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin')) + }) + r_env.add_values(sf_ugly.generate_values()) + return r_env + +def open_project_flow_config(path: str) -> ProjectFlowConfig: + try: + flow_cfg = open_project_flow_cfg(path) + except FileNotFoundError as _: + fatal(-1, 'The provided flow configuration file does not exist') + return flow_cfg + +def verify_platform_stage_params(flow_cfg: FlowConfig, + platform: 'str | None' = None, + stage: 'str | None' = None): + if platform: + if not verify_platform_name(platform, mypath): + sfprint(0, f'Platform `{platform}`` is unsupported.') + return False + if args.platform not in flow_cfg.platforms(): + sfprint(0, f'Platform `{platform}`` is not in project.') + return False + + if stage: + if not verify_stage(platform, stage, mypath): + sfprint(0, f'Stage `{stage}` is invalid.') + sfbuild_fail() + return False + + return True + +def get_platform_name_for_part(part_name: str): + """ + Gets a name that identifies the platform setup required for a specific chip. + The reason for such distinction is that plenty of chips with different names + differ only in a type of package they use. + """ + + d: dict + with open(os.path.join(mypath, 'part_db/parts.json')) as f: + d = json.loads(f.read()) + return d.get(part_name.upper()) + +def cmd_build(args: Namespace): + """ sfbuild's `build` command implementation """ + + project_flow_cfg: ProjectFlowConfig = None + + + platform = args.platform + if platform is None: + if args.part: + platform = get_platform_name_for_part(args.part) + + if args.flow: + project_flow_cfg = open_project_flow_config(args.flow) + elif platform is not None: + project_flow_cfg = ProjectFlowConfig('.temp.flow.json') + project_flow_cfg.flow_cfg = get_cli_flow_config(args, platform) + if platform is None and project_flow_cfg is not None: + platform = project_flow_cfg.get_default_platform() + if platform is None: + fatal(-1, 'You have to specify a platform name or a part name or ' + 'configure a default platform.') + if platform is None or project_flow_cfg is None: + fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or ' + '`--part` to configure flow..') + + platform_path = os.path.join(mypath, 'platforms', platform + '.json') + platform_def = None + try: + with open(platform_path) as platform_file: + platform_def = platform_file.read() + except FileNotFoundError as _: + fatal(-1, f'The platform flow definition file {platform_path} for the platform ' + f'{platform} referenced in flow definition file {args.flow} ' + 'cannot be found.') + + r_env = setup_resolution_env() + + sfprint(2, 'Scanning modules...') + scan_modules(mypath) + + flow_definition_dict = json.loads(platform_def) + flow_def = FlowDefinition(flow_definition_dict, r_env) + flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform) + + + if len(flow_cfg.stages) == 0: + fatal(-1, 'Platform flow does not define any stage') + + if args.info: + display_dep_info(flow_cfg.stages.values()) + sfbuild_done() + + if args.stageinfo: + display_stage_info(flow_cfg.stages.get(args.stageinfo[0])) + sfbuild_done() + + target = args.target + if target is None: + target = project_flow_cfg.get_default_target(platform) + if target is None: + fatal(-1, 'Please specify desired target using `--target` option ' + 'or configure a default target.') + + flow = Flow( + target=target, + cfg=flow_cfg, + symbicache=SymbiCache(SYMBICACHEPATH) if not args.nocache else None + ) + + dep_print_verbosity = 0 if args.pretend else 2 + sfprint(dep_print_verbosity, '\nProject status:') + flow.print_resolved_dependencies(dep_print_verbosity) + sfprint(dep_print_verbosity, '') + + if args.pretend: + sfbuild_done() + + try: + flow.execute() + except Exception as e: + sfprint(0, e) + sfbuild_fail() + + if flow.symbicache: + flow.symbicache.save() + +def cmd_show_dependencies(args: Namespace): + """ sfbuild's `showd` command implementation """ + + flow_cfg = open_project_flow_config(args.flow) + + if not verify_platform_stage_params(flow_cfg, args.platform): + sfbuild_fail() + return + + platform_overrides: 'set | None' = None + if args.platform is not None: + platform_overrides = \ + set(flow_cfg.get_dependency_platform_overrides(args.platform).keys()) + + display_list = [] + + raw_deps = flow_cfg.get_dependencies_raw(args.platform) + + for dep_name, dep_paths in raw_deps.items(): + prstr: str + if (platform_overrides is not None) and (dep_name in platform_overrides): + prstr = f'{Style.DIM}({args.platform}){Style.RESET_ALL} ' \ + f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' + else: + prstr = f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' + + display_list.append((dep_name, prstr)) + + display_list.sort(key = lambda p: p[0]) + + for _, prstr in display_list: + sfprint(0, prstr) + + set_verbosity_level(-1) + +if __name__ == '__main__': + parser = setup_argparser() + args = parser.parse_args() + + set_verbosity_level(args.verbose - (1 if args.silent else 0)) + + if args.command == 'build': + cmd_build(args) + sfbuild_done() + + if args.command == 'showd': + cmd_show_dependencies(args) + sfbuild_done() + + sfprint(0, 'Please use a command.\nUse `--help` flag to learn more.') + sfbuild_done() diff --git a/xc/xc7/toolchain_wrappers/__init__.py b/xc/xc7/toolchain_wrappers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/xc/xc7/toolchain_wrappers/symbiflow_common.py b/xc/xc7/toolchain_wrappers/symbiflow_common.py new file mode 100644 index 000000000..41f8b45a0 --- /dev/null +++ b/xc/xc7/toolchain_wrappers/symbiflow_common.py @@ -0,0 +1,99 @@ +import subprocess +import argparse +import os +import shutil + +class VprArgs: + arch_dir: str + arch_def: str + lookahead: str + rr_graph: str + rr_graph_xml: str + place_delay: str + device_name: str + eblif: str + vpr_options: str + optional: list[str] + + def __init__(self, mypath, args): + self.arch_dir = \ + os.path.join(mypath, '../share/symbiflow/arch/', args.device) + self.arch_dir = os.path.realpath(self.arch_dir) + self.arch_def = os.path.join(self.arch_dir, 'arch.timing.xml') + self.lookahead = \ + os.path.join(self.arch_dir, + 'rr_graph_' + args.device + '.lookahead.bin') + self.rr_graph = \ + os.path.join(self.arch_dir, + 'rr_graph_' + args.device + '.rr_graph.real.bin') + self.rr_graph_xml = \ + os.path.join(self.arch_dir, + 'rr_graph_' + args.device + '.rr_graph.real.xml') + self.place_delay = \ + os.path.join(self.arch_dir, + 'rr_graph_' + args.device + '.place_delay.bin') + self.device_name = args.device.replace('_', '-') + self.eblif = args.eblif + self.vpr_options = args.vpr_options + self.optional = [] + if args.sdc: + self.optional += ['--sdc_file', args.sdc] + + def export(self): + os.environ['ARCH_DIR'] = self.arch_dir + os.environ['ARCH_DEF'] = self.arch_def + os.environ['LOOKAHEAD'] = self.lookahead + os.environ['RR_GRAPH'] = self.rr_graph + os.environ['RR_GRAPH_XML'] = self.rr_graph_xml + os.environ['PLACE_DELAY'] = self.place_delay + os.environ['DEVICE_NAME'] = self.device_name + +def setup_vpr_arg_parser(): + parser = argparse.ArgumentParser(description="Parse flags") + parser.add_argument('-d', '--device', nargs=1, metavar='', + type=str, help='Device type (e.g. artix7)') + parser.add_argument('-e', '--eblif', nargs=1, metavar='', + type=str, help='EBLIF filename') + parser.add_argument('-p', '--pcf', nargs=1, metavar='', + type=str, help='PCF filename') + parser.add_argument('-P', '--part', nargs=1, metavar='', + type=str, help='Part name') + parser.add_argument('-s', '--sdc', nargs=1, metavar='', + type=str, help='SDC file') + parser.add_argument('-a', '--additional_vpr_options', metavar='', + type=str, help='Additional VPR options') + parser.add_argument('additional_vpr_args', nargs='*', metavar='', + type=str, help='Additional arguments for vpr command') + return parser + +# Exwecute subroutine +def sub(*args): + out = subprocess.run(args, capture_output=True) + if out.returncode != 0: + exit(out.returncode) + return out.stdout + +# Execute `vpr` +def vpr(vprargs: VprArgs): + return sub('vpr', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + vprargs.vpr_options, + '--read_rr_graph', vprargs.rr_graph, + '--read_router_lookahead', vprargs.lookahead, + 'read_placement_delay_lookup', vprargs.place_delay, + *vprargs.optional) + +# Emit some noisy warnings +def noisy_warnings(device): + os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log' + +# Get current PWD +def my_path(): + mypath = os.path.realpath(sys.argv[0]) + return os.path.dirname(mypath) + +# Save VPR log +def save_vpr_log(filename): + shutil.move('vpr_stdout.log', filename) \ No newline at end of file diff --git a/xc/xc7/toolchain_wrappers/symbiflow_place.py b/xc/xc7/toolchain_wrappers/symbiflow_place.py new file mode 100644 index 000000000..e3a016d2b --- /dev/null +++ b/xc/xc7/toolchain_wrappers/symbiflow_place.py @@ -0,0 +1,28 @@ +#!/usr/bin/python3 + +import shutil +from symbiflow_common import * + +mypath = my_path() +parser = setup_vpr_arg_parser() +parser.add_argument('-n', '--net', nargs='+', metavar='', + type=str, help='NET filename') +args = parser.parse_args() +vprargs = VprArgs(mypath, args) +vprargs += ['--fix_clusters', 'constraints.place', '--place'] +vprargs.export() + +if not args.net: + print('Please provide NET filename') + exit(1) + +noisy_warnings() + +print('Generating constraints...\n') + +sub('symbiflow_generate_constraints', + args.eblif, args.net, args.part, vprargs.arch_def, args.pcf) + +vpr(vprargs) + +save_vpr_log('place.log') \ No newline at end of file diff --git a/xc/xc7/toolchain_wrappers/symbiflow_route.py b/xc/xc7/toolchain_wrappers/symbiflow_route.py new file mode 100644 index 000000000..099260cfd --- /dev/null +++ b/xc/xc7/toolchain_wrappers/symbiflow_route.py @@ -0,0 +1,23 @@ +#!/usr/bin/python3 + +import argparse +import subprocess +import os +import shutil +from symbiflow_common import * + +mypath = my_path() +parser = setup_vpr_arg_parser() +args = parser.parse_args() + +vprargs = VprArgs(mypath, args) +vprargs.export() + +noisy_warnings(args.device) + +vprargs.optional += '--route' + +print('Routing...') +vpr(vprargs) + +save_vpr_log('route.log') \ No newline at end of file diff --git a/xc/xc7/toolchain_wrappers/symbiflow_synth.py b/xc/xc7/toolchain_wrappers/symbiflow_synth.py new file mode 100755 index 000000000..ae9745721 --- /dev/null +++ b/xc/xc7/toolchain_wrappers/symbiflow_synth.py @@ -0,0 +1,84 @@ +#!/usr/bin/python3 + +import sys +import os +import argparse +from symbiflow_common import * + +def setup_arg_parser(): + parser = argparse.ArgumentParser(description="Parse flags") + parser.add_argument('-t', '--top', nargs=1, metavar='', + type=str, help='Top module name') + parser.add_argument('-v', '--verilog', nargs='+', metavar='', + type=str, help='Verilog file list') + parser.add_argument('-x', '--xdc', nargs='+', metavar='', + type=str, help='XDC file list') + parser.add_argument('-d', '--device', nargs=1, metavar='', + type=str, help='Device type (e.g. artix7)') + parser.add_argument('-p', '--part', nargs=1, metavar='', + type=str, help='Part name') + return parser + +mypath = os.path.realpath(sys.argv[0]) +mypath = os.path.dirname(mypath) + +share_dir_path = os.path.realpath(os.path.join(mypath, '../share/symbiflow')) +techmap_path = os.path.join(share_dir_path, 'techmaps/xc7_vpr/techmap') +utils_path = os.path.join(share_dir_path, 'scripts') +synth_tcl_path = os.path.join(utils_path, 'xc7/synth.tcl') +conv_tcl_path = os.path.join(utils_path, 'xc7/conv.tcl') +split_inouts = os.path.join(utils_path, 'split_inouts.py') + +os.environ['SHARE_DIR_PATH'] = share_dir_path +os.environ['TECHMAP_PATH'] = techmap_path +os.environ['UTILS_PATH'] = utils_path + +parser = setup_arg_parser() + +args = parser.parse_args() + +if not os.environ['DATABASE_DIR']: + os.environ['DATABASE_DIR'] = sub(['prjxray-config']) +database_dir = os.environ['DATABASE_DIR'] + +# TODO: is this crossplatform??? +if not os.environ['PYTHON3']: + os.environ['PYTHON3'] = sub(['which', 'python3']) + +if not args.verilog: + print('Please provide at least one Verilog file\n') + exit(0) +if not args.top: + print('Top module must be specified\n') + exit(0) +if not args.device: + print('Device parameter required\n') + exit(0) +if not args.part: + print('Part parameter required\n') + exit(0) + +out_json = args.top + '.json' +synth_json = args.top + '_io.json' +log = args.top + '_synth.log' + +os.environ['TOP'] = args.top +os.environ['OUT_JSON'] = out_json +os.environ['OUT_SDC'] = args.top + '.sdc' +os.environ['SYNTH_JSON'] = synth_json +os.environ['OUT_SYNTH_V'] = args.top + '_synth.v' +os.environ['OUT_EBLIF'] = args.top + '.eblif' +os.environ['PART_JSON'] = \ + os.path.join(database_dir, args.device, args.part, 'part.json') +os.environ['OUT_FASM_EXTRA'] = args.top + '_fasm_extra.fasm' + +if args.xdc: + os.environ['INPUT_XDC_FILES'] = ' '.join(args.xdc) + +verilog_paths_str = ' '.join(args.verilog) + +print('------------------------------------> In symbiflow_synth!!!\n') + +sub('yosys', '-p', f'\"tcl {synth_tcl_path}\"', '-l', 'log', verilog_paths_str) +sub('python3', split_inouts, '-i', out_json, '-o', synth_json) +sub('yosys', '-p', f'\"read_json {synth_json}; tcl {conv_tcl_path}\"') diff --git a/xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py b/xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py new file mode 100644 index 000000000..373088fd9 --- /dev/null +++ b/xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py @@ -0,0 +1,42 @@ +#!/usr/bin/python3 + +import shutil +import re +from symbiflow_common import * + +mypath = my_path() +parser = setup_vpr_arg_parser() +args = parser.parse_args() +vprargs = VprArgs(args) + + +top = vprargs.eblif +top_ext_match = re.search('.*\\.[^.]*', vprargs.eblif) +if top_ext_match: + top = top[:top_ext_match.pos] + +fasm_extra = top + '_fasm_extra.fasm' + +noisy_warnings() + +sub('genfasm', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + vprargs.vpr_options, + '--read_rr_graph', vprargs.rr_graph) + +print(f'FASM extra: {fasm_extra}\n') + +# Concatenate top.fasm with extra.fasm if necessary +if os.path.isfile(fasm_extra): + print('writing final fasm') + with open(top + '.fasm', 'r+<') as top_file, open(fasm_extra) as extra_file: + cat = top_file.read() + cat += '\n' + cat += extra_file.read() + top_file.seek(0) + top_file.write(cat) + top_file.truncate() + +save_vpr_log('fasm.log') From 4cbdd76fe763205263122b0e50da56353e8a3617 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 27 Feb 2022 14:18:02 +0100 Subject: [PATCH 03/33] mv sfbuild f4pga Signed-off-by: Unai Martinez-Corral --- {sfbuild => f4pga}/CMakeLists.txt | 0 {sfbuild => f4pga}/__init__.py | 0 {sfbuild => f4pga}/docs/DevNotes.md | 0 {sfbuild => f4pga}/docs/GettingStarted.md | 0 {sfbuild => f4pga}/docs/Module.md | 0 {sfbuild => f4pga}/docs/browse_pydoc.sh | 0 {sfbuild => f4pga}/docs/common targets and variables.md | 0 {sfbuild => f4pga}/docs/modules/common/generic_script_wrapper.md | 0 {sfbuild => f4pga}/docs/modules/common/io_rename.md | 0 {sfbuild => f4pga}/docs/modules/common/mkdirs.md | 0 {sfbuild => f4pga}/docs/modules/common/synth.md | 0 {sfbuild => f4pga}/part_db/parts.json | 0 {sfbuild => f4pga}/platforms/ql-eos-s3.json | 0 {sfbuild => f4pga}/platforms/ql-k4n8_fast.json | 0 {sfbuild => f4pga}/platforms/ql-k4n8_slow.json | 0 {sfbuild => f4pga}/platforms/xc7a100t.json | 0 {sfbuild => f4pga}/platforms/xc7a200t.json | 0 {sfbuild => f4pga}/platforms/xc7a50t.json | 0 {sfbuild => f4pga}/sf_argparse.py | 0 {sfbuild => f4pga}/sf_cache.py | 0 {sfbuild => f4pga}/sf_common/__init__.py | 0 {sfbuild => f4pga}/sf_common_modules/__init__.py | 0 {sfbuild => f4pga}/sf_common_modules/fasm.py | 0 {sfbuild => f4pga}/sf_common_modules/generic_script_wrapper.py | 0 {sfbuild => f4pga}/sf_common_modules/io_rename.py | 0 {sfbuild => f4pga}/sf_common_modules/mkdirs.py | 0 {sfbuild => f4pga}/sf_common_modules/pack.py | 0 {sfbuild => f4pga}/sf_common_modules/place.py | 0 {sfbuild => f4pga}/sf_common_modules/place_constraints.py | 0 {sfbuild => f4pga}/sf_common_modules/route.py | 0 {sfbuild => f4pga}/sf_common_modules/synth.py | 0 {sfbuild => f4pga}/sf_flow_config.py | 0 {sfbuild => f4pga}/sf_module/__init__.py | 0 {sfbuild => f4pga}/sf_module_inspector.py | 0 {sfbuild => f4pga}/sf_module_runner/__init__.py | 0 {sfbuild => f4pga}/sf_stage.py | 0 {sfbuild => f4pga}/sf_ugly.py | 0 {sfbuild => f4pga}/sfbuild | 0 {sfbuild => f4pga}/sfbuild.py | 0 39 files changed, 0 insertions(+), 0 deletions(-) rename {sfbuild => f4pga}/CMakeLists.txt (100%) rename {sfbuild => f4pga}/__init__.py (100%) rename {sfbuild => f4pga}/docs/DevNotes.md (100%) rename {sfbuild => f4pga}/docs/GettingStarted.md (100%) rename {sfbuild => f4pga}/docs/Module.md (100%) rename {sfbuild => f4pga}/docs/browse_pydoc.sh (100%) rename {sfbuild => f4pga}/docs/common targets and variables.md (100%) rename {sfbuild => f4pga}/docs/modules/common/generic_script_wrapper.md (100%) rename {sfbuild => f4pga}/docs/modules/common/io_rename.md (100%) rename {sfbuild => f4pga}/docs/modules/common/mkdirs.md (100%) rename {sfbuild => f4pga}/docs/modules/common/synth.md (100%) rename {sfbuild => f4pga}/part_db/parts.json (100%) rename {sfbuild => f4pga}/platforms/ql-eos-s3.json (100%) rename {sfbuild => f4pga}/platforms/ql-k4n8_fast.json (100%) rename {sfbuild => f4pga}/platforms/ql-k4n8_slow.json (100%) rename {sfbuild => f4pga}/platforms/xc7a100t.json (100%) rename {sfbuild => f4pga}/platforms/xc7a200t.json (100%) rename {sfbuild => f4pga}/platforms/xc7a50t.json (100%) rename {sfbuild => f4pga}/sf_argparse.py (100%) rename {sfbuild => f4pga}/sf_cache.py (100%) rename {sfbuild => f4pga}/sf_common/__init__.py (100%) rename {sfbuild => f4pga}/sf_common_modules/__init__.py (100%) rename {sfbuild => f4pga}/sf_common_modules/fasm.py (100%) rename {sfbuild => f4pga}/sf_common_modules/generic_script_wrapper.py (100%) rename {sfbuild => f4pga}/sf_common_modules/io_rename.py (100%) rename {sfbuild => f4pga}/sf_common_modules/mkdirs.py (100%) rename {sfbuild => f4pga}/sf_common_modules/pack.py (100%) rename {sfbuild => f4pga}/sf_common_modules/place.py (100%) rename {sfbuild => f4pga}/sf_common_modules/place_constraints.py (100%) rename {sfbuild => f4pga}/sf_common_modules/route.py (100%) rename {sfbuild => f4pga}/sf_common_modules/synth.py (100%) rename {sfbuild => f4pga}/sf_flow_config.py (100%) rename {sfbuild => f4pga}/sf_module/__init__.py (100%) rename {sfbuild => f4pga}/sf_module_inspector.py (100%) rename {sfbuild => f4pga}/sf_module_runner/__init__.py (100%) rename {sfbuild => f4pga}/sf_stage.py (100%) rename {sfbuild => f4pga}/sf_ugly.py (100%) rename {sfbuild => f4pga}/sfbuild (100%) rename {sfbuild => f4pga}/sfbuild.py (100%) diff --git a/sfbuild/CMakeLists.txt b/f4pga/CMakeLists.txt similarity index 100% rename from sfbuild/CMakeLists.txt rename to f4pga/CMakeLists.txt diff --git a/sfbuild/__init__.py b/f4pga/__init__.py similarity index 100% rename from sfbuild/__init__.py rename to f4pga/__init__.py diff --git a/sfbuild/docs/DevNotes.md b/f4pga/docs/DevNotes.md similarity index 100% rename from sfbuild/docs/DevNotes.md rename to f4pga/docs/DevNotes.md diff --git a/sfbuild/docs/GettingStarted.md b/f4pga/docs/GettingStarted.md similarity index 100% rename from sfbuild/docs/GettingStarted.md rename to f4pga/docs/GettingStarted.md diff --git a/sfbuild/docs/Module.md b/f4pga/docs/Module.md similarity index 100% rename from sfbuild/docs/Module.md rename to f4pga/docs/Module.md diff --git a/sfbuild/docs/browse_pydoc.sh b/f4pga/docs/browse_pydoc.sh similarity index 100% rename from sfbuild/docs/browse_pydoc.sh rename to f4pga/docs/browse_pydoc.sh diff --git a/sfbuild/docs/common targets and variables.md b/f4pga/docs/common targets and variables.md similarity index 100% rename from sfbuild/docs/common targets and variables.md rename to f4pga/docs/common targets and variables.md diff --git a/sfbuild/docs/modules/common/generic_script_wrapper.md b/f4pga/docs/modules/common/generic_script_wrapper.md similarity index 100% rename from sfbuild/docs/modules/common/generic_script_wrapper.md rename to f4pga/docs/modules/common/generic_script_wrapper.md diff --git a/sfbuild/docs/modules/common/io_rename.md b/f4pga/docs/modules/common/io_rename.md similarity index 100% rename from sfbuild/docs/modules/common/io_rename.md rename to f4pga/docs/modules/common/io_rename.md diff --git a/sfbuild/docs/modules/common/mkdirs.md b/f4pga/docs/modules/common/mkdirs.md similarity index 100% rename from sfbuild/docs/modules/common/mkdirs.md rename to f4pga/docs/modules/common/mkdirs.md diff --git a/sfbuild/docs/modules/common/synth.md b/f4pga/docs/modules/common/synth.md similarity index 100% rename from sfbuild/docs/modules/common/synth.md rename to f4pga/docs/modules/common/synth.md diff --git a/sfbuild/part_db/parts.json b/f4pga/part_db/parts.json similarity index 100% rename from sfbuild/part_db/parts.json rename to f4pga/part_db/parts.json diff --git a/sfbuild/platforms/ql-eos-s3.json b/f4pga/platforms/ql-eos-s3.json similarity index 100% rename from sfbuild/platforms/ql-eos-s3.json rename to f4pga/platforms/ql-eos-s3.json diff --git a/sfbuild/platforms/ql-k4n8_fast.json b/f4pga/platforms/ql-k4n8_fast.json similarity index 100% rename from sfbuild/platforms/ql-k4n8_fast.json rename to f4pga/platforms/ql-k4n8_fast.json diff --git a/sfbuild/platforms/ql-k4n8_slow.json b/f4pga/platforms/ql-k4n8_slow.json similarity index 100% rename from sfbuild/platforms/ql-k4n8_slow.json rename to f4pga/platforms/ql-k4n8_slow.json diff --git a/sfbuild/platforms/xc7a100t.json b/f4pga/platforms/xc7a100t.json similarity index 100% rename from sfbuild/platforms/xc7a100t.json rename to f4pga/platforms/xc7a100t.json diff --git a/sfbuild/platforms/xc7a200t.json b/f4pga/platforms/xc7a200t.json similarity index 100% rename from sfbuild/platforms/xc7a200t.json rename to f4pga/platforms/xc7a200t.json diff --git a/sfbuild/platforms/xc7a50t.json b/f4pga/platforms/xc7a50t.json similarity index 100% rename from sfbuild/platforms/xc7a50t.json rename to f4pga/platforms/xc7a50t.json diff --git a/sfbuild/sf_argparse.py b/f4pga/sf_argparse.py similarity index 100% rename from sfbuild/sf_argparse.py rename to f4pga/sf_argparse.py diff --git a/sfbuild/sf_cache.py b/f4pga/sf_cache.py similarity index 100% rename from sfbuild/sf_cache.py rename to f4pga/sf_cache.py diff --git a/sfbuild/sf_common/__init__.py b/f4pga/sf_common/__init__.py similarity index 100% rename from sfbuild/sf_common/__init__.py rename to f4pga/sf_common/__init__.py diff --git a/sfbuild/sf_common_modules/__init__.py b/f4pga/sf_common_modules/__init__.py similarity index 100% rename from sfbuild/sf_common_modules/__init__.py rename to f4pga/sf_common_modules/__init__.py diff --git a/sfbuild/sf_common_modules/fasm.py b/f4pga/sf_common_modules/fasm.py similarity index 100% rename from sfbuild/sf_common_modules/fasm.py rename to f4pga/sf_common_modules/fasm.py diff --git a/sfbuild/sf_common_modules/generic_script_wrapper.py b/f4pga/sf_common_modules/generic_script_wrapper.py similarity index 100% rename from sfbuild/sf_common_modules/generic_script_wrapper.py rename to f4pga/sf_common_modules/generic_script_wrapper.py diff --git a/sfbuild/sf_common_modules/io_rename.py b/f4pga/sf_common_modules/io_rename.py similarity index 100% rename from sfbuild/sf_common_modules/io_rename.py rename to f4pga/sf_common_modules/io_rename.py diff --git a/sfbuild/sf_common_modules/mkdirs.py b/f4pga/sf_common_modules/mkdirs.py similarity index 100% rename from sfbuild/sf_common_modules/mkdirs.py rename to f4pga/sf_common_modules/mkdirs.py diff --git a/sfbuild/sf_common_modules/pack.py b/f4pga/sf_common_modules/pack.py similarity index 100% rename from sfbuild/sf_common_modules/pack.py rename to f4pga/sf_common_modules/pack.py diff --git a/sfbuild/sf_common_modules/place.py b/f4pga/sf_common_modules/place.py similarity index 100% rename from sfbuild/sf_common_modules/place.py rename to f4pga/sf_common_modules/place.py diff --git a/sfbuild/sf_common_modules/place_constraints.py b/f4pga/sf_common_modules/place_constraints.py similarity index 100% rename from sfbuild/sf_common_modules/place_constraints.py rename to f4pga/sf_common_modules/place_constraints.py diff --git a/sfbuild/sf_common_modules/route.py b/f4pga/sf_common_modules/route.py similarity index 100% rename from sfbuild/sf_common_modules/route.py rename to f4pga/sf_common_modules/route.py diff --git a/sfbuild/sf_common_modules/synth.py b/f4pga/sf_common_modules/synth.py similarity index 100% rename from sfbuild/sf_common_modules/synth.py rename to f4pga/sf_common_modules/synth.py diff --git a/sfbuild/sf_flow_config.py b/f4pga/sf_flow_config.py similarity index 100% rename from sfbuild/sf_flow_config.py rename to f4pga/sf_flow_config.py diff --git a/sfbuild/sf_module/__init__.py b/f4pga/sf_module/__init__.py similarity index 100% rename from sfbuild/sf_module/__init__.py rename to f4pga/sf_module/__init__.py diff --git a/sfbuild/sf_module_inspector.py b/f4pga/sf_module_inspector.py similarity index 100% rename from sfbuild/sf_module_inspector.py rename to f4pga/sf_module_inspector.py diff --git a/sfbuild/sf_module_runner/__init__.py b/f4pga/sf_module_runner/__init__.py similarity index 100% rename from sfbuild/sf_module_runner/__init__.py rename to f4pga/sf_module_runner/__init__.py diff --git a/sfbuild/sf_stage.py b/f4pga/sf_stage.py similarity index 100% rename from sfbuild/sf_stage.py rename to f4pga/sf_stage.py diff --git a/sfbuild/sf_ugly.py b/f4pga/sf_ugly.py similarity index 100% rename from sfbuild/sf_ugly.py rename to f4pga/sf_ugly.py diff --git a/sfbuild/sfbuild b/f4pga/sfbuild similarity index 100% rename from sfbuild/sfbuild rename to f4pga/sfbuild diff --git a/sfbuild/sfbuild.py b/f4pga/sfbuild.py similarity index 100% rename from sfbuild/sfbuild.py rename to f4pga/sfbuild.py From 60e7c8505e0c3973dd874f04a829f1c03af56cf7 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 27 Feb 2022 14:26:29 +0100 Subject: [PATCH 04/33] mv f4pga/docs docs/f4pga Signed-off-by: Unai Martinez-Corral --- docs/conf.py | 6 +++++- .../f4pga/CommonTargetsAndVariables.md | 8 ++++---- {f4pga/docs => docs/f4pga}/DevNotes.md | 0 {f4pga/docs => docs/f4pga}/GettingStarted.md | 0 {f4pga/docs => docs/f4pga}/Module.md | 0 {f4pga/docs => docs/f4pga}/browse_pydoc.sh | 4 ++-- .../f4pga}/common/generic_script_wrapper.md | 14 ++++---------- docs/f4pga/common/index.rst | 9 +++++++++ .../modules => docs/f4pga}/common/io_rename.md | 14 ++++---------- .../docs/modules => docs/f4pga}/common/mkdirs.md | 8 ++------ .../docs/modules => docs/f4pga}/common/synth.md | 12 +++--------- docs/index.rst | 16 ++++++++++++++-- docs/requirements.txt | 1 + 13 files changed, 48 insertions(+), 44 deletions(-) rename f4pga/docs/common targets and variables.md => docs/f4pga/CommonTargetsAndVariables.md (90%) rename {f4pga/docs => docs/f4pga}/DevNotes.md (100%) rename {f4pga/docs => docs/f4pga}/GettingStarted.md (100%) rename {f4pga/docs => docs/f4pga}/Module.md (100%) rename {f4pga/docs => docs/f4pga}/browse_pydoc.sh (50%) rename {f4pga/docs/modules => docs/f4pga}/common/generic_script_wrapper.md (93%) create mode 100644 docs/f4pga/common/index.rst rename {f4pga/docs/modules => docs/f4pga}/common/io_rename.md (86%) rename {f4pga/docs/modules => docs/f4pga}/common/mkdirs.md (65%) rename {f4pga/docs/modules => docs/f4pga}/common/synth.md (92%) diff --git a/docs/conf.py b/docs/conf.py index 281dd71da..1fb9030d7 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -47,6 +47,7 @@ 'sphinx.ext.intersphinx', 'sphinx_verilog_domain', 'sphinxcontrib.bibtex', + 'recommonmark' ] bibtex_default_style = 'plain' @@ -56,7 +57,10 @@ templates_path = ['_templates'] -source_suffix = ['.rst', '.md'] +source_suffix = { + '.rst': 'restructuredtext', + '.md': 'markdown' +} master_doc = 'index' diff --git a/f4pga/docs/common targets and variables.md b/docs/f4pga/CommonTargetsAndVariables.md similarity index 90% rename from f4pga/docs/common targets and variables.md rename to docs/f4pga/CommonTargetsAndVariables.md index 52d68af70..3e24b60dd 100644 --- a/f4pga/docs/common targets and variables.md +++ b/docs/f4pga/CommonTargetsAndVariables.md @@ -3,7 +3,7 @@ Targets and values are named with some conventions. Below are lists of the target and value names along with their meanings" -### Common targets that need to be provided by the user: +## Common targets that need to be provided by the user: | Target name | list | Description | |-------------|:----:|-------------| @@ -12,7 +12,7 @@ Below are lists of the target and value names along with their meanings" | `xdc` | yes | Xilinx Design Constraints (available only for Xilinx platforms) | | `pcf` | no | Physical Constraints File | -### Commonly requested targets (available in most flows): +## Commonly requested targets (available in most flows): | Target name | list | Description | |-------------|:----:|-------------| @@ -23,7 +23,7 @@ Below are lists of the target and value names along with their meanings" | `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis | | `build_dir` | no | A directory to put the output files in | -### Built-in values +## Built-in values | Value name | type | Description | |------------|------|-------------| @@ -32,7 +32,7 @@ Below are lists of the target and value names along with their meanings" | `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) | | `prjxray_db` | `string` | Path to Project X-Ray database | -### Values commonly used in flow definitions: +## Values commonly used in flow definitions: | Value name | type | Description | |------------|------|-------------| diff --git a/f4pga/docs/DevNotes.md b/docs/f4pga/DevNotes.md similarity index 100% rename from f4pga/docs/DevNotes.md rename to docs/f4pga/DevNotes.md diff --git a/f4pga/docs/GettingStarted.md b/docs/f4pga/GettingStarted.md similarity index 100% rename from f4pga/docs/GettingStarted.md rename to docs/f4pga/GettingStarted.md diff --git a/f4pga/docs/Module.md b/docs/f4pga/Module.md similarity index 100% rename from f4pga/docs/Module.md rename to docs/f4pga/Module.md diff --git a/f4pga/docs/browse_pydoc.sh b/docs/f4pga/browse_pydoc.sh similarity index 50% rename from f4pga/docs/browse_pydoc.sh rename to docs/f4pga/browse_pydoc.sh index d3cae9fb4..d82dd669c 100755 --- a/f4pga/docs/browse_pydoc.sh +++ b/docs/f4pga/browse_pydoc.sh @@ -1,7 +1,7 @@ #!/bin/sh MY_DIR=`dirname $0` -SFBUILD_DIR=${MY_DIR}/.. +SFBUILD_DIR=${MY_DIR}/../../f4pga SFBUILD_PY=${SFBUILD_DIR}/sfbuild.py -PYTHONPATH=${SFBUILD_DIR} pydoc -b \ No newline at end of file +PYTHONPATH=${SFBUILD_DIR} pydoc -b diff --git a/f4pga/docs/modules/common/generic_script_wrapper.md b/docs/f4pga/common/generic_script_wrapper.md similarity index 93% rename from f4pga/docs/modules/common/generic_script_wrapper.md rename to docs/f4pga/common/generic_script_wrapper.md index c6fa7c0c4..32c2a5085 100644 --- a/f4pga/docs/modules/common/generic_script_wrapper.md +++ b/docs/f4pga/common/generic_script_wrapper.md @@ -1,15 +1,9 @@ -# sfbuild module "generic_script_wrapper" - -##### _Category: Common_ - -------------------------------- +# generic_script_wrapper This module provides a way to integrate an external command into an sfbuild flow. Its inputs and outputs are fully defined by the author of flow definition. -## Setup - -### 1. Parameters +## Parameters Parameters are everything when it comes to this module: @@ -29,7 +23,7 @@ Parameters are everything when it comes to this module: dependency alsogets two extra values associated with it: `:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "." removed and `:dependency_name[dir]` which - contains directory paths of the dependency. This is useful for deriving an output + contains directory paths of the dependency. This is useful for deriving an output name from the input. * `meta` (string, optional): Description of the output dependency. * `inputs` (dict[string -> string | bool], mandatory): @@ -44,4 +38,4 @@ Parameters are everything when it comes to this module: are implicit. If the value of the resolved string is empty and is associated with a named argument, the argument in question will be skipped entirely. This allows using optional dependencies. To use a named argument as a flag instead, set it to - `true`. \ No newline at end of file + `true`. diff --git a/docs/f4pga/common/index.rst b/docs/f4pga/common/index.rst new file mode 100644 index 000000000..348dcc8df --- /dev/null +++ b/docs/f4pga/common/index.rst @@ -0,0 +1,9 @@ +Modules +####### + +.. toctree:: + + generic_script_wrapper + io_rename + mkdirs + synth diff --git a/f4pga/docs/modules/common/io_rename.md b/docs/f4pga/common/io_rename.md similarity index 86% rename from f4pga/docs/modules/common/io_rename.md rename to docs/f4pga/common/io_rename.md index 7c29ff5ff..4e994d594 100644 --- a/f4pga/docs/modules/common/io_rename.md +++ b/docs/f4pga/common/io_rename.md @@ -1,15 +1,9 @@ -# sfbuild module "io_rename" - -##### _Category: Common_ - -------------------------------- +# io_rename This module provides a way to rename (ie. change) dependencies and values of an instance of a different module. It wraps another, module whoose name is specified in `params.module` and changes the names of the dependencies and values it relies on. -## Setup - -### 1. Parameters +## Parameters * `module` (string, required) - name of the wrapped module * `params` (dict[string -> any], optional): parameters passed to the wrapped @@ -22,6 +16,6 @@ In the three mapping dicts, keys represent the names visible to the wrapped modu and values represent the names visible to the modules outside. Not specifying a mapping for a given entry will leave it with its original name. -### 2. Values +## Values -All values specified for this modules will be accessible by tyhe wrapped module. \ No newline at end of file +All values specified for this modules will be accessible by tyhe wrapped module. diff --git a/f4pga/docs/modules/common/mkdirs.md b/docs/f4pga/common/mkdirs.md similarity index 65% rename from f4pga/docs/modules/common/mkdirs.md rename to docs/f4pga/common/mkdirs.md index 160bdc75b..5e778b808 100644 --- a/f4pga/docs/modules/common/mkdirs.md +++ b/docs/f4pga/common/mkdirs.md @@ -1,13 +1,9 @@ -# sfbuild module "io_rename" - -##### _Category: Common_ - -------------------------------- +# io_rename This modules creates directiories specified by the author of flow definition as its targets.. -### Parameters +## Parameters Each key serves as aname of a directory to becreated, while the value is the path for that directory. \ No newline at end of file diff --git a/f4pga/docs/modules/common/synth.md b/docs/f4pga/common/synth.md similarity index 92% rename from f4pga/docs/modules/common/synth.md rename to docs/f4pga/common/synth.md index 394f26e6a..5478816c0 100644 --- a/f4pga/docs/modules/common/synth.md +++ b/docs/f4pga/common/synth.md @@ -1,8 +1,4 @@ -# sfbuild module "synth" - -##### _Category: Common_ - -------------------------------- +# synth The _synth_ module is meant to be used to execute YOSYS synthesis. @@ -16,8 +12,6 @@ The module should guarantee the following outputs: For detailed information about these targets, please refer to `docs/common targets and variables.md` -## Setup - What files and how are they generated is dependendent on TCL scripts executed withing YOSYS and the script vary depending on the target platform. Due to this design choice it is required for the author of the flow defnition to parametrize @@ -26,13 +20,13 @@ will be generated upon a successful YOSYS run. The setup of the synth module follows the following specifications: -### 1. Module parameters: +## Parameters: The `params` section of a stage configuration may contain a `produces` list. The list should specify additional targets that will be generated (`?` qualifier is allowedd). -### 2. Values: +## Values: The `synth` module requires the following values: diff --git a/docs/index.rst b/docs/index.rst index bcedf9716..21b7f0329 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -10,6 +10,7 @@ The project aims to design tools that are highly extendable and multiplatform. :align: center + .. toctree:: :caption: About F4PGA @@ -38,8 +39,19 @@ The project aims to design tools that are highly extendable and multiplatform. .. toctree:: :caption: Development - development/building-docs - development/venv + contributing/building-docs + contributing/venv + + +.. toctree:: + :caption: pyF4PGA Reference + :maxdepth: 2 + + f4pga/GettingStarted + f4pga/CommonTargetsAndVariables + f4pga/Module + f4pga/common/index + f4pga/DevNotes .. toctree:: diff --git a/docs/requirements.txt b/docs/requirements.txt index a7505cb78..aa87fdf31 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,4 @@ +recommonmark sphinx>=4.5.0 sphinxcontrib-bibtex https://github.com/SymbiFlow/sphinx_symbiflow_theme/archive/chips.zip#sphinx-symbiflow-theme From 1b304a42713e85c6762b55af4cfdd0efc6b5d43e Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Wed, 9 Mar 2022 02:59:06 +0100 Subject: [PATCH 05/33] docs: use MyST instead of recommonmark Signed-off-by: Unai Martinez-Corral --- docs/conf.py | 2 +- docs/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 1fb9030d7..e07d8d4b2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -47,7 +47,7 @@ 'sphinx.ext.intersphinx', 'sphinx_verilog_domain', 'sphinxcontrib.bibtex', - 'recommonmark' + 'myst_parser' ] bibtex_default_style = 'plain' diff --git a/docs/requirements.txt b/docs/requirements.txt index aa87fdf31..49893c24b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,4 @@ -recommonmark +myst-parser sphinx>=4.5.0 sphinxcontrib-bibtex https://github.com/SymbiFlow/sphinx_symbiflow_theme/archive/chips.zip#sphinx-symbiflow-theme From 2ba01e787a6389b37231b4e70045a125de667351 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 27 Feb 2022 15:43:11 +0100 Subject: [PATCH 06/33] docs/f4pga: reorganise, add scope and references Signed-off-by: Unai Martinez-Corral --- docs/f4pga/CommonTargetsAndVariables.md | 44 ------------- docs/f4pga/{GettingStarted.md => Usage.md} | 61 ++++++++++++++++--- docs/f4pga/common/index.rst | 9 --- docs/f4pga/index.rst | 27 ++++++++ .../generic_script_wrapper.md | 0 docs/f4pga/{Module.md => modules/index.md} | 47 ++++++++------ docs/f4pga/{common => modules}/io_rename.md | 0 docs/f4pga/{common => modules}/mkdirs.md | 0 docs/f4pga/{common => modules}/synth.md | 0 docs/index.rst | 9 ++- 10 files changed, 112 insertions(+), 85 deletions(-) delete mode 100644 docs/f4pga/CommonTargetsAndVariables.md rename docs/f4pga/{GettingStarted.md => Usage.md} (86%) delete mode 100644 docs/f4pga/common/index.rst create mode 100644 docs/f4pga/index.rst rename docs/f4pga/{common => modules}/generic_script_wrapper.md (100%) rename docs/f4pga/{Module.md => modules/index.md} (95%) rename docs/f4pga/{common => modules}/io_rename.md (100%) rename docs/f4pga/{common => modules}/mkdirs.md (100%) rename docs/f4pga/{common => modules}/synth.md (100%) diff --git a/docs/f4pga/CommonTargetsAndVariables.md b/docs/f4pga/CommonTargetsAndVariables.md deleted file mode 100644 index 3e24b60dd..000000000 --- a/docs/f4pga/CommonTargetsAndVariables.md +++ /dev/null @@ -1,44 +0,0 @@ -# sfbuild's common targets and values - -Targets and values are named with some conventions. -Below are lists of the target and value names along with their meanings" - -## Common targets that need to be provided by the user: - -| Target name | list | Description | -|-------------|:----:|-------------| -| `sources` | yes | Verilog sources | -| `sdc` | no | Synopsys Design Constraints | -| `xdc` | yes | Xilinx Design Constraints (available only for Xilinx platforms) | -| `pcf` | no | Physical Constraints File | - -## Commonly requested targets (available in most flows): - -| Target name | list | Description | -|-------------|:----:|-------------| -| `eblif` | no | Extended blif file | -| `bitstream` | no | Bitstream | -| `net` | no | Netlist | -| `fasm` | no | Final FPGA Assembly | -| `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis | -| `build_dir` | no | A directory to put the output files in | - -## Built-in values - -| Value name | type | Description | -|------------|------|-------------| -| `shareDir` | `string` | Path to symbiflow's installation "share" directory | -| `python3` | `string` | Path to Python 3 executable | -| `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) | -| `prjxray_db` | `string` | Path to Project X-Ray database | - -## Values commonly used in flow definitions: - -| Value name | type | Description | -|------------|------|-------------| -| `top` | `string` | Top module name | -| `build_dir` | `string` | Path to build directory (should be optional) | -| `device` | `string` | Name of the device | -| `vpr_options` | `dict[string -> string \| number]` | Named ptions passed to VPR. No `--` prefix included. | -| `part_name` | `string` | Name of the chip used. The distinction between `device` and `part_name` is ambiguous at the moment and should be addressed in the future. | -| `arch_def` | `string` | Path to an XML file containing architecture definition. | diff --git a/docs/f4pga/GettingStarted.md b/docs/f4pga/Usage.md similarity index 86% rename from docs/f4pga/GettingStarted.md rename to docs/f4pga/Usage.md index e20620a59..d0e0ec19a 100644 --- a/docs/f4pga/GettingStarted.md +++ b/docs/f4pga/Usage.md @@ -1,4 +1,4 @@ -# sfbuild +# Usage ## Getting started @@ -28,8 +28,6 @@ flow completes. Look for a line like this one on stdout.: Target `bitstream` -> build/arty_35/top.bit ``` -------------------------------------------------------------------------------------- - ## Fundamental concepts If you want to create a new sfbuild project, it's highly recommended that you @@ -66,7 +64,7 @@ as well as provide information about files required and produced by the tool. ### Dependecies A **dependency** is any file, directory or a list of such that a **module** takes as -its input or produces on its output. +its input or produces on its output. Modules specify their dependencies by using symbolic names instead of file paths. The files they produce are also given symbolic names and paths which are either set @@ -142,7 +140,7 @@ here and there. Typically **projects's flow configuration** will be used to resolve dependencies for _HDL source code_ and _device constraints_. -## Using sfbuild to build a target +## Build a target To build a **target** "`target_name`", use the following command: ``` @@ -188,7 +186,7 @@ not exist, `mkdirs` will create it and provide as `build_dir` dependency. building a bitstream for *x7a50t* would look like that: With this flow configuration, you can build a bitstream for arty_35 using the -following command: +following command: ``` $ python3 /path/to/sfbuild.py flow.json -p x7a50t -t bitstream @@ -282,7 +280,7 @@ to the box. dependency was necessary or not. * **O** - dependency present, unchanged. This dependency is already built and is confirmed to stay unchanged during flow execution. - * **N** - dependency present, new/changed. This dependency is already present on + * **N** - dependency present, new/changed. This dependency is already present on the persistent storage, but it was either missing earlier, or its content changed from the last time. (WARNING: it won't continue to be reported as "**N**" after a successful build of @@ -290,7 +288,7 @@ to the box. should be fixed in the future.) * **S** - depenendency not present, resolved. This dependency is not currently available on the persistent storage, however it will be produced within - flow's execution. + flow's execution. * **R** - depenendency present, resolved, requires rebuild. This dependency is currently available on the persistent storage, however it has to be rebuilt due to the changes in the project. @@ -312,4 +310,49 @@ colon: In the example above file `counter.v` has been modified and is now marked as "**N**". This couses a bunch of other dependencies to be reqbuilt ("**R**"). -`build_dir` and `xdc` were already present, so they are marked as "**O**". \ No newline at end of file +`build_dir` and `xdc` were already present, so they are marked as "**O**". + +## Common targets and values + +Targets and values are named with some conventions. +Below are lists of the target and value names along with their meanings" + +### Need to be provided by the user + +| Target name | list | Description | +|-------------|:----:|-------------| +| `sources` | yes | Verilog sources | +| `sdc` | no | Synopsys Design Constraints | +| `xdc` | yes | Xilinx Design Constraints (available only for Xilinx platforms) | +| `pcf` | no | Physical Constraints File | + +### Available in most flows + +| Target name | list | Description | +|-------------|:----:|-------------| +| `eblif` | no | Extended blif file | +| `bitstream` | no | Bitstream | +| `net` | no | Netlist | +| `fasm` | no | Final FPGA Assembly | +| `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis | +| `build_dir` | no | A directory to put the output files in | + +### Built-in values + +| Value name | type | Description | +|------------|------|-------------| +| `shareDir` | `string` | Path to symbiflow's installation "share" directory | +| `python3` | `string` | Path to Python 3 executable | +| `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) | +| `prjxray_db` | `string` | Path to Project X-Ray database | + +### Used in flow definitions + +| Value name | type | Description | +|------------|------|-------------| +| `top` | `string` | Top module name | +| `build_dir` | `string` | Path to build directory (should be optional) | +| `device` | `string` | Name of the device | +| `vpr_options` | `dict[string -> string \| number]` | Named ptions passed to VPR. No `--` prefix included. | +| `part_name` | `string` | Name of the chip used. The distinction between `device` and `part_name` is ambiguous at the moment and should be addressed in the future. | +| `arch_def` | `string` | Path to an XML file containing architecture definition. | diff --git a/docs/f4pga/common/index.rst b/docs/f4pga/common/index.rst deleted file mode 100644 index 348dcc8df..000000000 --- a/docs/f4pga/common/index.rst +++ /dev/null @@ -1,9 +0,0 @@ -Modules -####### - -.. toctree:: - - generic_script_wrapper - io_rename - mkdirs - synth diff --git a/docs/f4pga/index.rst b/docs/f4pga/index.rst new file mode 100644 index 000000000..baf587d4d --- /dev/null +++ b/docs/f4pga/index.rst @@ -0,0 +1,27 @@ +Overview +######## + +Python F4PGA is a package containing multiple modules to facilitate the usage of all the tools integrated in the F4PGA +ecosystem, and beyond. +The scope of Python F4PGA is threefold: + +* Provide a fine-grained *pythonic* interface to the tools and utilities available as either command-line interfaces + (CLIs) or application proggraming interfaces (APIs) (either web or through shared libraries). +* Provide a CLI entrypoint covering the whole flows for end-users to produce bitstreams from HDL and/or software sources. +* Provide a CLI entrypoint for developers contributing to bitstream documentation and testing (continuous integration). + +.. ATTENTION:: + This is work-in-progress to adapt and organize the existing shell/bash based plumbing from multiple F4PGA repositories. + Therefore, it's still a *pre-alpha* and the codebase, commands and flows are subject to change. + It is strongly suggested not to rely on Python F4PGA until this note is updated/removed. + +References +========== + +* :gh:`chipsalliance/fpga-tool-perf#390@issuecomment-1023487178 ` +* :ghsharp:`2225` +* :ghsharp:`2371` +* :ghsharp:`2455` +* `F4PGA GSoC 2022 project ideas: Generalization of wrapper scripts for installed F4PGA toolchain and making them OS agnostic `__ +* :gh:`FuseSoc ` | :gh:`Edalize ` +* `Electronic Design Automation Abstraction (EDA²) `__ diff --git a/docs/f4pga/common/generic_script_wrapper.md b/docs/f4pga/modules/generic_script_wrapper.md similarity index 100% rename from docs/f4pga/common/generic_script_wrapper.md rename to docs/f4pga/modules/generic_script_wrapper.md diff --git a/docs/f4pga/Module.md b/docs/f4pga/modules/index.md similarity index 95% rename from docs/f4pga/Module.md rename to docs/f4pga/modules/index.md index c11e9337b..10e172e5b 100644 --- a/docs/f4pga/Module.md +++ b/docs/f4pga/modules/index.md @@ -1,10 +1,12 @@ -# sfbuild modules interface +# Modules + +## Interface This document contains all the information needed to configure modules for your _**sfbuild**_ project as well as some info about the API used to write modules. -## Configuration interface: +### Configuration interface: Modules are configured through an internal API by _**sfbuild**_. The basic requirement for a module script is to expose a class with `Module` @@ -23,13 +25,13 @@ A _module configuration_ is a structure with the following fields: either singular strings or lists of strings. * `produces` = a dictionary that contains keys which are names of the dependencies produced by the module. The values are requested filenames for the - files generated by the module. They can be either singular strings or lists of + files generated by the module. They can be either singular strings or lists of strings. * `values` - a dictionary that contains other values used to configure the module. The keys are value's names and the values can have any type. * `platform` - Platform's name. This is a string. -## Platform-level configuration +### Platform-level configuration In case of **platform's flow definition**, a `values` dictionary can be defined globally and the values defined there will be passed to every module's config. @@ -38,7 +40,7 @@ Those values can be overriden per-module through `module_options` dictionary. Parameters used during module's contruction can also be defined in `module_options` as `params` (those are not a part of _module configuration_, instead they are used -during the actual construction of a module instance, before it declares any of its +during the actual construction of a module instance, before it declares any of its input/outputs etc.) Defining dictionaries for `takes` and `produces` is disallowed within @@ -47,7 +49,7 @@ Defining dictionaries for `takes` and `produces` is disallowed within For a detailed look on the concepts described here, please have a look at `sfbuild/platforms/xc7a50t` -## Project-level configuration +### Project-level configuration Similarly to **platform's flow definition**, `values` dict can be provided. The values provided there will overwrite the values from @@ -57,7 +59,7 @@ Unlike **platform's flow definition**, **project's flow configuration** may cont `dependencies` dict. This dictionary would be used to map saymbolic dependency names to actual paths. Most dependencies can have their paths resolved implicitly without the need to provide explicit paths, which is a mechanism that is described -in a later section of this document. However some dependencies must be provided +in a later section of this document. However some dependencies must be provided explicitelly, eg. paths to project's verilog source files. It should be noted that depending on the flow definition and the dependency in question, the path does not necessarily have to point to an already existing file. If the dependency is a @@ -75,11 +77,11 @@ Each of those entries may contain `dependencies`, `values` fields which will overload the `dependecies` and `values` defined in a global scope of **project's flow configuration**. Any other field under those platform entries is treated as a _stage-specific-configuration_. The key is a name of a stage within -a flow for the specified platform and the values are dicts which may contain +a flow for the specified platform and the values are dicts which may contain `dependencies` and `values` fields that overload `dependencies` and `values` repespectively, locally for the stage. -## Internal environmental variables +### Internal environmental variables It's very usefule to be able to refer to some data within **platform's flow definition** and **project's flow configuration** to @@ -129,13 +131,13 @@ Be careful when using this kind of resolution, as it's computational and memory complexity grows exponentially in ragards to the number of list variables being referenced, which is a rather obvious fact, but it's still worth mentioning. -The variables that can be referenced within a definition/configuration fall into 3 +The variables that can be referenced within a definition/configuration fall into 3 categories: * **value references** - anything declared as a `value` can be accessed by it's name * **dependency references** - any dependency path can be referenced using the name - of the dependency prefaced with a ':' prefix. Eg.: `${:eblif}` will resolve + of the dependency prefaced with a ':' prefix. Eg.: `${:eblif}` will resolve to the path of `eblif` dependency. Make sure that the dependency can be actually resolved when you are using this kind of reference. For example you can't use the a reference to `eblif` dependency in a module which does not @@ -149,7 +151,7 @@ categories: * `python3` - path to Python 3 interpreter. * `noisyWarnings` - (this one should probably get removed) -## `Module` class +### `Module` class Each nmodule is represented as a class derived from `Module` class. @@ -165,14 +167,14 @@ Each module script should expose the class by defining it's name/type alias as `ModuleClass`. sfbuild tries to access a `ModuleClass` attribute within a package when instantiating a module. -## Module's execution modes +### Module's execution modes A module ahas essentially two execution modes: * _mapping_ mode * _exec_ mode -### _mapping_ mode +#### _mapping_ mode In _mapping_ mode the module is provided with an incomplete configuration which includes: @@ -184,7 +186,7 @@ includes: The module has to provide a dictionary that will provide every output dependency that's not _on-demand_ a default path. This is basically a promise that when executed in _exec_ mode, the module will produce files for this paths. -Typically such paths would be derived from a path of one of it's input dependencies. +Typically such paths would be derived from a path of one of it's input dependencies. This mechanism allows the user to avoid specifying an explicit path for each intermediate target. @@ -192,7 +194,7 @@ It should be noted that variables referring to the output dependencies can't be accessed at this stage for the obvious reason as their values are yet to be evaluated. -### _exec_ mode +#### _exec_ mode In _exec_ mode the module does the actual work. @@ -210,7 +212,7 @@ The configuration passed into this mode is full and it includes: When the module finishes executing in _exec_ mode, all of the dependencies described in `outputs` should be present. -## Module initialization/instantiation +### Module initialization/instantiation In the the `__init__` method of module's class, the following fields should be set: @@ -222,7 +224,7 @@ set: * `prod_meta` - A dictionary which maps product names to descriptions of these products. -### Qualifiers/decorators +#### Qualifiers/decorators By default the presence of all the dependencies and values is mandatory (In case of `produces` that means that the module always has to produce the listed @@ -242,3 +244,12 @@ ways: Currently it's impossible to combine both '`!`' and '`?`' together. This limitation does not have any reason behind it other than the way the qualifier system is implemented at the moment. It might be removed in the future. + +## Common modules + +```{toctree} +generic_script_wrapper +io_rename +mkdirs +synth +``` diff --git a/docs/f4pga/common/io_rename.md b/docs/f4pga/modules/io_rename.md similarity index 100% rename from docs/f4pga/common/io_rename.md rename to docs/f4pga/modules/io_rename.md diff --git a/docs/f4pga/common/mkdirs.md b/docs/f4pga/modules/mkdirs.md similarity index 100% rename from docs/f4pga/common/mkdirs.md rename to docs/f4pga/modules/mkdirs.md diff --git a/docs/f4pga/common/synth.md b/docs/f4pga/modules/synth.md similarity index 100% rename from docs/f4pga/common/synth.md rename to docs/f4pga/modules/synth.md diff --git a/docs/index.rst b/docs/index.rst index 21b7f0329..f952b5179 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -44,13 +44,12 @@ The project aims to design tools that are highly extendable and multiplatform. .. toctree:: - :caption: pyF4PGA Reference + :caption: Python utils :maxdepth: 2 - f4pga/GettingStarted - f4pga/CommonTargetsAndVariables - f4pga/Module - f4pga/common/index + f4pga/index + f4pga/Usage + f4pga/modules/index f4pga/DevNotes From f00ff8924dc9f754ae64230cc61430768a1d30f6 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 13 Mar 2022 07:53:18 +0100 Subject: [PATCH 07/33] docs: s/sfbuild/f4pga/ Signed-off-by: Unai Martinez-Corral --- docs/conf.py | 4 + docs/f4pga/Usage.md | 301 +++++++++++++++++------------------- docs/f4pga/modules/index.md | 16 +- 3 files changed, 157 insertions(+), 164 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index e07d8d4b2..15ba3d464 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -53,6 +53,10 @@ bibtex_default_style = 'plain' bibtex_bibfiles = ['refs.bib'] +myst_enable_extensions = [ + "colon_fence", +] + numfig = True templates_path = ['_templates'] diff --git a/docs/f4pga/Usage.md b/docs/f4pga/Usage.md index d0e0ec19a..66a8d55b5 100644 --- a/docs/f4pga/Usage.md +++ b/docs/f4pga/Usage.md @@ -2,154 +2,134 @@ ## Getting started -To use _**sfbuild**_ you need a working python 3 installation which should be icluded -as a part of conda virtual environment set up during symbiflow installation. -_**sfbuild**_ installs along _**Symbiflow**_ with any version of toolchain. However, -only _XC7_ architectures are supported currently and _Quicklogic_ support is a work -in progress. _**sfbuild**_'s installation directory is `bin/sfbuild`, under your -_**Symbiflow**_ installation directory. `sfbuild.py` is the script that you should -run to use _**sfbuild**_. - -To get started with a project that already uses sfbuild, go to the project's -directory and run the following line to build a bitstream: -``` -$ python3 /path/to/sfbuild.py flow.json -p platform_name -t bitstream +To use `f4pga` you need a working Python 3 installation which should be included as a part of the conda virtual +environment set up during F4PGA installation. +`f4pga` is installed together with F4PGA, regardless of the version of the toolchain. +However, only _XC7_ architectures are supported currently and _Quicklogic_ support is a work in progress. + +To get started with a project that already uses `f4pga`, go to the project's directory and run the following line to +generate a bitstream: + +```bash +$ f4pga flow.json -p platform_name -t bitstream ``` Substitute `platform_name` by the name of the target platform (eg. `x7a50t`). -`flow.json` should be a **project's flow configuration** file included with the -project. If you are unsure if you got the right file, you can check an example of -the contents of such file shown in the "_Using sfbuild to build a target_" section. +`flow.json` should be a *project flow configuration* file included with the project. +If you are unsure if you got the right file, you can check an example of the contents of such file shown in the +*Build a target* section below. -The location of the file containing bitstream will be indicated by sfbuild after the -flow completes. Look for a line like this one on stdout.: +The location of the bitstream will be indicated by `f4pga` after the flow completes. +Look for a line like this one on stdout: -``` +```bash Target `bitstream` -> build/arty_35/top.bit ``` ## Fundamental concepts -If you want to create a new sfbuild project, it's highly recommended that you -read this section first. +If you want to create a new project, it's highly recommended that you read this section first. -### sfbuild +### f4pga -_**sfbuild**_ is a modular build system designed to handle various -_Verilog-to-bitsream_ flows for FPGAs. It works by wrapping the necessary tools -in python scripts, which are called **sfbuild modules**. The modules are then -referenced in a **platform's flow definition** files along configurations specific -for given platform. These files for come included as a part of _**sfbuild**_ for the -following platforms: +`f4pga` is a modular build system designed to handle various _Verilog-to-bitsream_ flows for FPGAs. +It works by wrapping the necessary tools in Python, which are called *f4pga modules*. +Modules are then referenced in *platform flow definition* files, together with configuration specific for a given +platform. +Flow definition files for the following platforms are included as a part of `f4pga`: * x7a50t * x7a100t * x7a200t (_soon_) -You can also write your own **platform's flow definition** file if you want to bring -support to a different device. +You can also write your own *platform flow definition* file if you want to bring support for a different device. -Each project that uses _**sfbuild**_ to perform any flow should include a _.json_ -file describing the project. The purpose of that file is to configure inputs -for the flow and possibly override configuration values if necessary. +Each project that uses `f4pga` to perform any flow should include a _.json_ file describing the project. +The purpose of that file is to configure inputs for the flow and override configuration values if necessary. ### Modules -A **module** (also referred to as **sfbuild module** in sistuations where there might -be confusion between Python's _modules_ and sfbuild's _modules_) is a python scripts -that wraps a tool used within **Symbilfow's** ecosystem. The main purpouse of this -wrapper is to provide a unified interface for sfbuild to use and configure the tool +A *module* (also referred to as *f4pga module* in situations where there might be confusion between arbitrary Python +_modules_ and f4pga _modules_) is a Python script that wraps a tool used within the F4PGA ecosystem. +The main purpose of the wrappers is to provide a unified interface for `f4pga` to use and to configure the tool, as well as provide information about files required and produced by the tool. ### Dependecies -A **dependency** is any file, directory or a list of such that a **module** takes as -its input or produces on its output. +A *dependency* is any file, directory or a list of such that a *module* takes as its input or produces on its output. Modules specify their dependencies by using symbolic names instead of file paths. -The files they produce are also given symbolic names and paths which are either set -through **project's flow configuration** file or derived from the paths of the -dependencies taken by the module. +The files they produce are also given symbolic names and paths which are either set through *project flow configuration* +file or derived from the paths of the dependencies taken by the module. ### Target -**Target** is a dependency that the user has asked sfbuild to produce. +*Target* is a dependency that the user has asked F4PGA to produce. ### Flow -A **flow** is set of **modules** executed in a right order to produce a **target**. +A *flow* is set of *modules* executed in a right order to produce a *target*. ### .symbicache -All **dependencies** are tracked by a modification tracking system which stores hashes -of the files (directories get always `'0'` hash) in `.symbicache` file in the root of -the project. When _**sfbuild**_ constructs a **flow**, it will try to omit execution -of modules which would receive the same data on their input. There's a strong -_assumption_ there that a **module**'s output remains unchanged if the input -doconfiguring esn't -change, ie. **modules** are deterministic. +All *dependencies* are tracked by a modification tracking system which stores hashes of the files +(directories get always `'0'` hash) in `.symbicache` file in the root of the project. +When F4PGA constructs a *flow*, it will try to omit execution of modules which would receive the same data on their +input. +There is a strong _assumption_ there that a *module*'s output remains unchanged if the input configuration isn't +change, ie. *modules* are deterministic. ### Resolution -A **dependency** is said to be **resolved** if it meets one of the following -critereia: +A *dependency* is said to be *resolved* if it meets one of the following critereia: * it exists on persistent storage and its hash matches the one stored in .symbicache -* there exists such **flow** that all of the dependieces of its modules are - **resolved** and it produces the **dependency** in question. +* there exists such *flow* that all of the dependieces of its modules are *resolved* and it produces the *dependency* in + question. ### Platform's flow definition -**Platform's flow definition** is a piece of data describing a space of flows for a -given platform, serialized into a _JSON_. -It's stored in a file that's named after the device's name under `sfbuild/platforms`. +*Platform flow definition* is a piece of data describing a space of flows for a given platform, serialized into a _JSON_. +It's stored in a file that's named after the device's name under `f4pga/platforms`. -**Platform's flow definition** contains a list of modules available for constructing -flows and defines a set of values which the modules can reference. In case of some -modules it may also define a set of parameters used during their construction. -`mkdirs` module uses that to allow production of of multiple directories as separate -dependencies. This however is an experimental feature which possibly will be -removed in favor of having multiple instances of the same module with renameable -ouputs. +*Platform flow definition* contains a list of modules available for constructing flows and defines a set of values which +the modules can reference. +In case of some modules it may also define a set of parameters used during their construction. +`mkdirs` module uses that to allow production of of multiple directories as separate dependencies. +This however is an experimental feature which possibly will be removed in favor of having multiple instances of the same +module with renameable ouputs. -Not all **dependencies** have to be **resolved** at this stage, a **platform's flow -definition** for example won't be able to provide a list of source files needed in a -**flow**. +Not all *dependencies** have to be *resolved* at this stage, a *platform's flow definition* for example won't be able to +provide a list of source files needed in a *flow*. ### Projects's flow configuration -Similarly to **platform's flow definition**, **Projects's flow configuration** is a -_JSON_ that is used to configure **modules**. There are however a couple differences -here and there. +Similarly to *platform flow definition*, *Projects flow configuration* is a _JSON_ that is used to configure *modules*. There are however a couple differences here and there. -* The most obvious one is that this file is unique for a project and - is provided by the user of _**sfbuild**_. +* The most obvious one is that this file is unique for a project and is provided by the user of `f4pga`. -* The other difference is that it doesn't list **modules** available for the - platform. +* The other difference is that it doesn't list *modules* available for the platform. -* All the values provided in **projects's flow configuration** will override those - provided in **platform's flow definition**. +* All the values provided in *projects flow configuration* will override those provided in *platform flow definition*. * It can contain sections with configurations for different platforms. -* Unlike **platform's flow definition** it can give explicit paths to dependencies. +* Unlike *platform flow definition* it can give explicit paths to dependencies. -* At this stage all mandatory **dependencies** should be resolved. +* At this stage all mandatory *dependencies* should be resolved. -Typically **projects's flow configuration** will be used to resolve dependencies -for _HDL source code_ and _device constraints_. +Typically *projects flow configuration* will be used to resolve dependencies for _HDL source code_ and _device constraints_. ## Build a target -To build a **target** "`target_name`", use the following command: -``` -$ python3 /path/to/sfbuild.py flow.json -p platform_device_name -t target_name +To build a *target* `target_name`, use the following command: + +```bash +$ f4pga flow.json -p platform_device_name -t target_name ``` -where `flow.json` is a path to **projects's flow configuration** +where `flow.json` is a path to *projects flow configuration*. -For example, let's consider the following -**projects's flow configuration (flow.json)**: +For example, let's consider the following *projects flow configuration (flow.json)*: ```json { @@ -168,52 +148,49 @@ For example, let's consider the following } ``` -It specifies list of paths to Verilog source files as "`sources`" dependency. -Similarily it also provides an "`XDC`" file with constrains. ("`xdc`" dependency) +It specifies list of paths to Verilog source files as `sources` dependency. +Similarily it also provides an `XDC` file with constrains (`xdc` dependency). -It also names a path for synthesis and logs ("`synth_log`", "`pack_log`"). -These two are optional on-demand outputs, meaning they won't be produces unless -their paths are explicitely set. +It also names a path for synthesis and logs (`synth_log`, `pack_log`). +These two are optional on-demand outputs, meaning they won't be produces unless their paths are explicitely set. -"`top`" value is set to in order to specify the name of top Verilog module, which -is required during synthesis. +`top` value is set to in order to specify the name of top Verilog module, which is required during synthesis. -"`build_dir`" is an optional helper dependency. When available, modules will put -their outputs into that directory. It's also an _on-demand_ output of `mkdirs` -module in _xc7a50t_ flow definition, which means that if specified directory does -not exist, `mkdirs` will create it and provide as `build_dir` dependency. - -building a bitstream for *x7a50t* would look like that: +`build_dir` is an optional helper dependency. +When available, modules will put their outputs into that directory. +It's also an _on-demand_ output of `mkdirs` module in _xc7a50t_ flow definition, which means that if specified directory +does not exist, `mkdirs` will create it and provide as `build_dir` dependency. With this flow configuration, you can build a bitstream for arty_35 using the following command: ``` -$ python3 /path/to/sfbuild.py flow.json -p x7a50t -t bitstream +$ f4pga flow.json -p x7a50t -t bitstream ``` ### Pretend mode -You can also add a `--pretend` (`-P`) option if you just want to see the results of -dependency resolution for a specified target without building it. This is useful -when you just want to know what files will be generated and where wilh they be -stored. +You can also add a `--pretend` (`-P`) option if you just want to see the results of dependency resolution for a +specified target without building it. +This is useful when you just want to know what files will be generated and where wilh they be stored. ### Info mode Modules have the ability to include description to the dependencies they produce. -Running _**sfbuild**_ with `--info` (`-i`) flag allows youn to see descriptions of -these dependencies. This option doesn't require a target to be specified, but you -still have to provuide a flow configuration and platform name. +Running `f4pga` with `--info` (`-i`) flag allows youn to see descriptions of these dependencies. +This option doesn't require a target to be specified, but you still have to provuide a flow configuration and platform +name. -This is still an experimental option, most targets currently lack descriptions -and no information whether the output is _on-demand_ is currently displayed. +This is still an experimental option, most targets currently lack descriptions and no information whether the output is +_on-demand_ is currently displayed. Example: + +```bash +$ f4pga flow.json -p x7a50t -i ``` -$ python3 /path/to/sfbuild.py flow.json -p x7a50t -i -``` + ``` Platform dependencies/targets: build_dir: @@ -231,9 +208,11 @@ Platform dependencies/targets: module: `synth` ``` -_This is only a snippet of the entire output_ +:::{important} +This is only a snippet of the entire output. +::: -### Summary of all available sfbuild options +### Summary of all available options | long | short | arguments | description | |------------|:-----:|------------------------|-------------------------------------------------| @@ -244,11 +223,12 @@ _This is only a snippet of the entire output_ ### Dependency resolution display -sfbuild displays some information about dependencies when requesting a target. +F4PGA displays some information about dependencies when requesting a target. Here's an example of a possible output when trying to build `bitstream` target: + ``` -sfbuild: Symbiflow Build System +F4PGA Build System Scanning modules... Project status: @@ -267,55 +247,62 @@ Project status: [N] sources: ['counter.v'] [O] xdc: ['arty.xdc'] -sfbuild: DONE +F4PGA: DONE ``` -The letters in the boxes describe the status of a dependency which's name is next -to the box. - - * **X** - dependency unresolved. This isn't always a bad sign. Some dependencies - are not required to, such as "`pcf`". - * **U** - dependency unreachable. The dependency has a module that could produce - it, but the module's dependencies are unresolved. This doesn't say whether the - dependency was necessary or not. - * **O** - dependency present, unchanged. This dependency is already built and is - confirmed to stay unchanged during flow execution. - * **N** - dependency present, new/changed. This dependency is already present on - the persistent storage, but it was either missing earlier, or - its content changed from the last time. - (WARNING: it won't continue to be reported as "**N**" after a successful build of - any target. This may lead to some false "**O**"s in some complex scenarios. This - should be fixed in the future.) - * **S** - depenendency not present, resolved. This dependency is not - currently available on the persistent storage, however it will be produced within - flow's execution. - * **R** - depenendency present, resolved, requires rebuild. This dependency is - currently available on the persistent storage, however it has to be rebuilt due - to the changes in the project. - -Additional info about a dependency will be displayed next to its name after a -colon: - -* In case of dependencies that are to be built (**S**/**R**), there's a name of a - module that will produce this dependency, followed by "`->`" and a path or list of - paths to file(s)/directory(ies) that will be produced as this dependency. - -* In case of dependencies which do not require execution of any modules, only - a path or list of paths to file(s)/directory(ies) that will be displayed - -* In case of unresolved dependencies (**X**), which are never produced by any - module, a text sying "`MISSING`" will be displayed -* In case of unreachable dependencies, a name of such module that could produce - them will be displayed followed by "`-> ???`". - -In the example above file `counter.v` has been modified and is now marked as -"**N**". This couses a bunch of other dependencies to be reqbuilt ("**R**"). +The letters in the boxes describe the status of a dependency which's name is next to the box. + + * **X** - dependency unresolved. + This isn't always a bad sign. Some dependencies are not required to, such as `pcf`. + + * **U** - dependency unreachable. + The dependency has a module that could produce it, but the module's dependencies are unresolved. + This doesn't say whether the dependency was necessary or not. + + * **O** - dependency present, unchanged. + This dependency is already built and is confirmed to stay unchanged during flow execution. + + * **N** - dependency present, new/changed. + This dependency is already present on the persistent storage, but it was either missing earlier, or its content + changed from the last time. + + :::{warning} + It won't continue to be reported as "**N**" after a successful build of any target. + This may lead to some false "**O**"s in some complex scenarios. + This should be fixed in the future. + ::: + + * **S** - depenendency not present, resolved. + This dependency is not currently available on the persistent storage, however it will be produced within flow's + execution. + + * **R** - depenendency present, resolved, requires rebuild. + This dependency is currently available on the persistent storage, however it has to be rebuilt due to the changes in + the project. + +Additional info about a dependency will be displayed next to its name after a colon: + +* In case of dependencies that are to be built (**S**/**R**), there's a name of a module that will produce this + dependency, followed by `->` and a path or list of paths to file(s)/directory(ies) that will be produced as this + dependency. + +* In case of dependencies which do not require execution of any modules, only a path or list of paths to + file(s)/directory(ies) that will be displayed. + +* In case of unresolved dependencies (**X**), which are never produced by any module, a text sying "`MISSING`" will be + displayed. + +* In case of unreachable dependencies, a name of such module that could produce them will be displayed followed by + `-> ???`. + +In the example above file `counter.v` has been modified and is now marked as "**N**". +This couses a bunch of other dependencies to be reqbuilt ("**R**"). `build_dir` and `xdc` were already present, so they are marked as "**O**". ## Common targets and values Targets and values are named with some conventions. -Below are lists of the target and value names along with their meanings" +Below are lists of the target and value names along with their meanings. ### Need to be provided by the user diff --git a/docs/f4pga/modules/index.md b/docs/f4pga/modules/index.md index 10e172e5b..dd0c0e1f4 100644 --- a/docs/f4pga/modules/index.md +++ b/docs/f4pga/modules/index.md @@ -20,15 +20,17 @@ data. And they contain snippets of _module configurations_ A _module configuration_ is a structure with the following fields: -* `takes` - a dictionary that contains keys which are names of the dependencies - used by the module. The values are paths to those dependencies. They can be - either singular strings or lists of strings. -* `produces` = a dictionary that contains keys which are names of the - dependencies produced by the module. The values are requested filenames for the - files generated by the module. They can be either singular strings or lists of - strings. +* `takes` - a dictionary that contains keys which are names of the dependencies used by the module. + The values are paths to those dependencies. + They can be either singular strings or lists of strings. + +* `produces` = a dictionary that contains keys which are names of the dependencies produced by the module. + The values are requested filenames for the files generated by the module. + They can be either singular strings or lists of strings. + * `values` - a dictionary that contains other values used to configure the module. The keys are value's names and the values can have any type. + * `platform` - Platform's name. This is a string. ### Platform-level configuration From 3ab6f2b10d81977afb6f11f983fd56a9f6092949 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Mon, 28 Feb 2022 17:01:50 +0100 Subject: [PATCH 08/33] f4pga: add requirements.txt Signed-off-by: Unai Martinez-Corral --- f4pga/requirements.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 f4pga/requirements.txt diff --git a/f4pga/requirements.txt b/f4pga/requirements.txt new file mode 100644 index 000000000..3fcfb51b2 --- /dev/null +++ b/f4pga/requirements.txt @@ -0,0 +1 @@ +colorama From 898eab8232bc0b4bb4f820fb68f28e3594b0a715 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 22:52:01 +0100 Subject: [PATCH 09/33] f4pga/module_runner: import importlib.util explicitly Signed-off-by: Unai Martinez-Corral --- f4pga/sf_module_runner/__init__.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/f4pga/sf_module_runner/__init__.py b/f4pga/sf_module_runner/__init__.py index 23be3c492..64eb37b8d 100644 --- a/f4pga/sf_module_runner/__init__.py +++ b/f4pga/sf_module_runner/__init__.py @@ -2,6 +2,7 @@ from contextlib import contextmanager import importlib +import importlib.util import os from sf_module import Module, ModuleContext, get_mod_metadata from sf_common import ResolutionEnv, deep, sfprint @@ -36,7 +37,7 @@ def get_module(path: str): cached = preloaded_modules.get(path) if cached: return cached.ModuleClass - + mod = import_module_from_path(path) preloaded_modules[path] = mod @@ -53,7 +54,7 @@ def __init__(self, share: str, bin: str, config: 'dict[str, ]'): self.share = share self.bin = bin self.config = config - + def make_r_env(self): return ResolutionEnv(self.config['values']) @@ -66,7 +67,7 @@ def __init__(self, module: str, mode: str, e: Exception): self.module = module self.mode = mode self.e = e - + def __str__(self) -> str: return f'ModuleFailException:\n Module `{self.module}` failed ' \ f'MODE: \'{self.mode}\'\n\nException `{type(self.e)}`: {self.e}' @@ -81,11 +82,11 @@ def module_io(module: Module): def module_map(module: Module, ctx: ModRunCtx): try: - mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, + mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, ctx.bin) except Exception as e: raise ModuleFailException(module.name, 'map', e) - + return _realpath_deep(vars(mod_ctx.outputs)) def module_exec(module: Module, ctx: ModRunCtx): @@ -94,7 +95,7 @@ def module_exec(module: Module, ctx: ModRunCtx): ctx.bin) except Exception as e: raise ModuleFailException(module.name, 'exec', e) - + sfprint(1, 'Executing module ' f'`{Style.BRIGHT + module.name + Style.RESET_ALL}`:') current_phase = 1 From 72913daac241844479e40152592e222b646656e9 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Wed, 2 Mar 2022 01:48:00 +0100 Subject: [PATCH 10/33] f4pga/sfbuild: get INSTALL_DIR from environ Signed-off-by: Unai Martinez-Corral --- f4pga/sfbuild.py | 73 ++++++++++++++++++++++++------------------------ 1 file changed, 37 insertions(+), 36 deletions(-) diff --git a/f4pga/sfbuild.py b/f4pga/sfbuild.py index 82768068c..df6447170 100755 --- a/f4pga/sfbuild.py +++ b/f4pga/sfbuild.py @@ -23,6 +23,7 @@ from argparse import Namespace import os +from os import environ import json from typing import Iterable from colorama import Fore, Style @@ -45,7 +46,7 @@ mypath = os.path.dirname(mypath) binpath = os.path.realpath(os.path.join(mypath, '..')) -share_dir_path = os.path.realpath(os.path.join(mypath, '../../share/symbiflow')) +share_dir_path = os.path.realpath(f"{environ.get('INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") class DependencyNotProducedException(Exception): dep_name: str @@ -54,7 +55,7 @@ class DependencyNotProducedException(Exception): def __init__(self, dep_name: str, provider: str): self.dep_name = dep_name self.provider = provider - + def __str__(self) -> str: return f'Stage `{self.provider}` did not produce promised ' \ f'dependency `{self.dep_name}`' @@ -69,10 +70,10 @@ def platform_stages(platform_flow, r_env): for stage_name, modulestr in platform_flow['stages'].items(): mod_opts = stage_options.get(stage_name) if stage_options else None yield Stage(stage_name, modulestr, mod_opts, r_env) - + def req_exists(r): """ Checks whether a dependency exists on a drive. """ - + if type(r) is str: if not os.path.isfile(r) and not os.path.islink(r) \ and not os.path.isdir(r): @@ -121,21 +122,21 @@ def prepare_stage_input(stage: Stage, platform_name: str, values: dict, paths = dep_paths.get(take.name) if paths: # Some takes may be not required takes[take.name] = paths - + produces = {} for prod in stage.produces: if dep_paths.get(prod.name): produces[prod.name] = dep_paths[prod.name] elif config_paths.get(prod.name): produces[prod.name] = config_paths[prod.name] - + stage_mod_cfg = { 'takes': takes, 'produces': produces, 'values': values, 'platform': platform_name, } - + return stage_mod_cfg def update_dep_statuses(paths, consumer: str, symbicache: SymbiCache): @@ -154,7 +155,7 @@ def dep_differ(paths, consumer: str, symbicache: SymbiCache): Check if a dependency differs from its last version, lack of dependency is treated as "differs" """ - + if type(paths) is str: s = symbicache.get_status(paths, consumer) if s == 'untracked': @@ -198,7 +199,7 @@ class Flow: # Dependendecy to build target: str - # Values in global scope + # Values in global scope cfg: FlowConfig # dependency-producer map os_map: 'dict[str, Stage]' @@ -228,14 +229,14 @@ def __init__(self, target: str, cfg: FlowConfig, self.deps_rebuilds = {} self._resolve_dependencies(self.target, set()) - + def _dep_will_differ(self, dep: str, paths, consumer: str): if not self.symbicache: # Handle --nocache mode return True return dep_will_differ(dep, paths, consumer, self.os_map, self.run_stages, self.symbicache) - + def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): # Initialize the dependency status if necessary if self.deps_rebuilds.get(dep) is None: @@ -251,7 +252,7 @@ def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): # TODO: Check if the dependency is "on-demand" and force it in provider's # config if it is. - + for take in provider.takes: self._resolve_dependencies(take.name, stages_checked) # If any of the required dependencies is unavailable, then the @@ -263,17 +264,17 @@ def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): if not take_paths and take.spec == 'req': _print_unreachable_stage_message(provider, take) return - + if self._dep_will_differ(take.name, take_paths, provider.name): sfprint(2, f'{take.name} is causing rebuild for {provider.name}') self.run_stages.add(provider.name) self.deps_rebuilds[take.name] += 1 - + stage_values = self.cfg.get_r_env(provider.name).values modrunctx = config_mod_runctx(provider, self.cfg.platform, stage_values, self.dep_paths, self.cfg.get_dependency_overrides()) - + outputs = module_map(provider.module, modrunctx) stages_checked.add(provider.name) @@ -282,7 +283,7 @@ def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): for _, out_paths in outputs.items(): if (out_paths is not None) and not (req_exists(out_paths)): self.run_stages.add(provider.name) - + # Verify module's outputs and add paths as values. outs = outputs.keys() # print(outs) @@ -303,7 +304,7 @@ def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): provider.value_overrides[dep_value_str(o.name)] = \ outputs.get(o.name) - + def print_resolved_dependencies(self, verbosity: int): deps = list(self.deps_rebuilds.keys()) deps.sort() @@ -333,7 +334,7 @@ def print_resolved_dependencies(self, verbosity: int): status = Fore.RED + '[U]' + Fore.RESET source = \ f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} -> ???' - + sfprint(verbosity, f' {Style.BRIGHT + status} ' f'{dep + Style.RESET_ALL}: {source}') @@ -344,12 +345,12 @@ def _build_dep(self, dep): if not paths: sfprint(2, f'Dependency {dep} is unresolved.') return False - + if req_exists(paths) and not run: return True else: assert(provider) - + any_dep_differ = False if self.symbicache else True for p_dep in provider.takes: if not self._build_dep(p_dep.name): @@ -360,7 +361,7 @@ def _build_dep(self, dep): any_dep_differ |= \ update_dep_statuses(self.dep_paths[p_dep.name], provider.name, self.symbicache) - + # If dependencies remained the same, consider the dep as up-to date # For example, when changing a comment in Verilog source code, # the initial dependency resolution will report a need for complete @@ -372,7 +373,7 @@ def _build_dep(self, dep): f'{Style.BRIGHT + dep + Style.RESET_ALL}` because all ' f'of it\'s dependencies remained unchanged') return True - + stage_values = self.cfg.get_r_env(provider.name).values modrunctx = config_mod_runctx(provider, self.cfg.platform, stage_values, self.dep_paths, @@ -380,12 +381,12 @@ def _build_dep(self, dep): module_exec(provider.module, modrunctx) self.run_stages.discard(provider.name) - + if not req_exists(paths): raise DependencyNotProducedException(dep, provider.name) - + return True - + def execute(self): self._build_dep(self.target) if self.symbicache: @@ -402,7 +403,7 @@ def display_dep_info(stages: 'Iterable[Stage]'): l = len(out.name) if l > longest_out_name_len: longest_out_name_len = l - + desc_indent = longest_out_name_len + 7 nl_indentstr = '\n' for _ in range(0, desc_indent): @@ -418,9 +419,9 @@ def display_dep_info(stages: 'Iterable[Stage]'): if out.spec == 'req': specstr = f'{Fore.BLUE}guaranteed{Fore.RESET}' elif out.spec == 'maybe': - specstr = f'{Fore.YELLOW}not guaranteed{Fore.RESET}' + specstr = f'{Fore.YELLOW}not guaranteed{Fore.RESET}' elif out.spec == 'demand': - specstr = f'{Fore.RED}on-demand{Fore.RESET}' + specstr = f'{Fore.RED}on-demand{Fore.RESET}' pgen = f'{Style.DIM}stage: `{stage.name}`, '\ f'spec: {specstr}{Style.RESET_ALL}' pdesc = stage.meta[out.name].replace('\n', nl_indentstr) @@ -432,7 +433,7 @@ def display_stage_info(stage: Stage): sfprint(0, f'Stage does not exist') sfbuild_fail() return - + sfprint(0, f'Stage `{Style.BRIGHT}{stage.name}{Style.RESET_ALL}`:') sfprint(0, f' Module: `{Style.BRIGHT}{stage.module.name}{Style.RESET_ALL}`') sfprint(0, f' Module info:') @@ -481,13 +482,13 @@ def verify_platform_stage_params(flow_cfg: FlowConfig, if args.platform not in flow_cfg.platforms(): sfprint(0, f'Platform `{platform}`` is not in project.') return False - + if stage: if not verify_stage(platform, stage, mypath): sfprint(0, f'Stage `{stage}` is invalid.') sfbuild_fail() return False - + return True def get_platform_name_for_part(part_name: str): @@ -506,7 +507,7 @@ def cmd_build(args: Namespace): """ sfbuild's `build` command implementation """ project_flow_cfg: ProjectFlowConfig = None - + platform = args.platform if platform is None: @@ -596,7 +597,7 @@ def cmd_show_dependencies(args: Namespace): if not verify_platform_stage_params(flow_cfg, args.platform): sfbuild_fail() return - + platform_overrides: 'set | None' = None if args.platform is not None: platform_overrides = \ @@ -613,14 +614,14 @@ def cmd_show_dependencies(args: Namespace): f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' else: prstr = f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' - + display_list.append((dep_name, prstr)) - + display_list.sort(key = lambda p: p[0]) for _, prstr in display_list: sfprint(0, prstr) - + set_verbosity_level(-1) if __name__ == '__main__': From ba3a9d925e3e526d71a3809114aedf7c0d68288d Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Wed, 2 Mar 2022 10:11:09 +0100 Subject: [PATCH 11/33] f4pga/platforms/xc7a50t: override the path to arty.xdc Signed-off-by: Unai Martinez-Corral --- f4pga/platforms/xc7a50t.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/f4pga/platforms/xc7a50t.json b/f4pga/platforms/xc7a50t.json index b18f732b4..5444f874c 100644 --- a/f4pga/platforms/xc7a50t.json +++ b/f4pga/platforms/xc7a50t.json @@ -81,7 +81,7 @@ "OUT_EBLIF": "${:eblif}", "PYTHON3": "${python3}", "UTILS_PATH": "${shareDir}/scripts", - "INPUT_XDC_FILES": "${:xdc}" + "INPUT_XDC_FILES": "xc7/counter_test/arty.xdc" } } }, From eeac9239d7e463c65ccf9cb4da0453af5bd31f42 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Mon, 28 Feb 2022 17:06:17 +0100 Subject: [PATCH 12/33] f4pga: rename single file modules Signed-off-by: Unai Martinez-Corral --- f4pga/{sf_common/__init__.py => sf_common.py} | 0 f4pga/{sf_module/__init__.py => sf_module.py} | 0 f4pga/{sf_module_runner/__init__.py => sf_module_runner.py} | 0 3 files changed, 0 insertions(+), 0 deletions(-) rename f4pga/{sf_common/__init__.py => sf_common.py} (100%) rename f4pga/{sf_module/__init__.py => sf_module.py} (100%) rename f4pga/{sf_module_runner/__init__.py => sf_module_runner.py} (100%) diff --git a/f4pga/sf_common/__init__.py b/f4pga/sf_common.py similarity index 100% rename from f4pga/sf_common/__init__.py rename to f4pga/sf_common.py diff --git a/f4pga/sf_module/__init__.py b/f4pga/sf_module.py similarity index 100% rename from f4pga/sf_module/__init__.py rename to f4pga/sf_module.py diff --git a/f4pga/sf_module_runner/__init__.py b/f4pga/sf_module_runner.py similarity index 100% rename from f4pga/sf_module_runner/__init__.py rename to f4pga/sf_module_runner.py From 849d3efd48841706f6510f8ba27b0720c9466134 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 27 Feb 2022 15:27:49 +0100 Subject: [PATCH 13/33] ci: add GitHub Actions workflow 'pyFPGA' Signed-off-by: Unai Martinez-Corral --- .github/workflows/pyF4PGA.yml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/pyF4PGA.yml diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml new file mode 100644 index 000000000..657dea9b7 --- /dev/null +++ b/.github/workflows/pyF4PGA.yml @@ -0,0 +1,26 @@ +name: py4FPGA + +on: + push: + pull_request: + +jobs: + + Run-tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + + steps: + + - uses: actions/checkout@v2 +# with: +# submodules: recursive + + - name: Test py4FPGA + run: | + pip3 install -r f4pga/requirements.txt + + PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py + + PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py -h From f58637f3d43a3fec0224b7c290bd574e4cf679f3 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 04:07:15 +0100 Subject: [PATCH 14/33] ci: test f4pga|sfbuild Signed-off-by: Unai Martinez-Corral --- .github/workflows/pyF4PGA.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml index 657dea9b7..f2dc4f26e 100644 --- a/.github/workflows/pyF4PGA.yml +++ b/.github/workflows/pyF4PGA.yml @@ -24,3 +24,8 @@ jobs: PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py -h + + - name: Test py4FPGA build + run: | + make env + PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py build --platform xc7a50t -t bitstream From c13d6fc6ebf70dc6dbde69d3691fa0d10a7a7bc9 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 20:23:47 +0100 Subject: [PATCH 15/33] ci: add sftest.json Authored-By: Krzysztof Boronski Signed-off-by: Unai Martinez-Corral --- .github/sftest.json | 43 +++++++++++++++++++++++++++++++++++ .github/workflows/pyF4PGA.yml | 2 +- 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 .github/sftest.json diff --git a/.github/sftest.json b/.github/sftest.json new file mode 100644 index 000000000..3870ace7b --- /dev/null +++ b/.github/sftest.json @@ -0,0 +1,43 @@ +{ + "default_platform": "xc7a50t", + "values": { + "top": "top" + }, + "dependencies": { + "sources": [ + "counter.v" + ], + "synth_log": "synth.log", + "pack_log": "pack.log" + }, + "xc7a200t": { + "default_target": "bitstream", + "dependencies": { + "xdc": [ + "arty200.xdc" + ], + "build_dir": "build/arty_200" + } + }, + "xc7a100t": { + "default_target": "bitstream", + "dependencies": { + "xdc": [ + "arty.xdc" + ], + "build_dir": "build/arty100" + } + }, + "xc7a50t": { + "default_target": "bitstream", + "dependencies": { + "build_dir": "build/arty_35", + "xdc": [ + "arty.xdc" + ] + }, + "values": { + "part": "xc7a35tcpg236-1" + } + } +} diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml index f2dc4f26e..c3bccf1fa 100644 --- a/.github/workflows/pyF4PGA.yml +++ b/.github/workflows/pyF4PGA.yml @@ -28,4 +28,4 @@ jobs: - name: Test py4FPGA build run: | make env - PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py build --platform xc7a50t -t bitstream + PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py build --flow .github/sftest.json -t bitstream From 22003c2f49cac10d7f3c90d1a1507bcb05657cda Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 20:37:47 +0100 Subject: [PATCH 16/33] ci: make examples env Signed-off-by: Unai Martinez-Corral --- .github/sftest.json | 2 +- .github/workflows/pyF4PGA.yml | 33 +++++++++++++++++++++++++++------ 2 files changed, 28 insertions(+), 7 deletions(-) diff --git a/.github/sftest.json b/.github/sftest.json index 3870ace7b..6beeb0218 100644 --- a/.github/sftest.json +++ b/.github/sftest.json @@ -5,7 +5,7 @@ }, "dependencies": { "sources": [ - "counter.v" + "xc7/counter_test/counter.v" ], "synth_log": "synth.log", "pack_log": "pack.log" diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml index c3bccf1fa..95e23a437 100644 --- a/.github/workflows/pyF4PGA.yml +++ b/.github/workflows/pyF4PGA.yml @@ -17,15 +17,36 @@ jobs: # with: # submodules: recursive - - name: Test py4FPGA + - name: Prepare environment run: | - pip3 install -r f4pga/requirements.txt + sudo apt update -y + sudo apt install -y git wget xz-utils - PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py + git clone --recurse-submodules https://github.com/chipsalliance/f4pga-examples + cd f4pga-examples - PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py -h + wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O conda_installer.sh + + export INSTALL_DIR=/opt/f4pga + + bash conda_installer.sh -u -b -p $INSTALL_DIR/xc7/conda + source "$INSTALL_DIR/xc7/conda/etc/profile.d/conda.sh" + conda env create -f xc7/environment.yml + + mkdir -p $INSTALL_DIR/xc7/install + wget -qO- https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/symbiflow-arch-defs/continuous/install/535/20220128-000432/symbiflow-arch-defs-install-5fa5e715.tar.xz | tar -xJC $INSTALL_DIR/xc7/install + wget -qO- https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/symbiflow-arch-defs/continuous/install/535/20220128-000432/symbiflow-arch-defs-xc7a50t_test-5fa5e715.tar.xz | tar -xJC $INSTALL_DIR/xc7/install - name: Test py4FPGA build run: | - make env - PYTHONPATH=$(pwd)/f4pga python3 f4pga/sfbuild.py build --flow .github/sftest.json -t bitstream + cd f4pga-examples + + export INSTALL_DIR=/opt/f4pga + export PATH="$INSTALL_DIR/xc7/install/bin:$PATH"; + source "$INSTALL_DIR/xc7/conda/etc/profile.d/conda.sh" + + conda activate xc7 + + pip install -r ../f4pga/requirements.txt + + PYTHONPATH=$(pwd)/../f4pga python3 ../f4pga/sfbuild.py build --flow ../.github/sftest.json -t bitstream From 920d1d5ec302bd5958ec87c753e0c45d74d84776 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 02:52:59 +0100 Subject: [PATCH 17/33] f4pga: test pyF4PGA in CI Signed-off-by: Unai Martinez-Corral --- .github/workflows/pyF4PGA.yml | 17 +- f4pga/CMakeLists.txt | 146 ------------------ f4pga/__init__.py | 1 - f4pga/setup.py | 31 +++- f4pga/sf_common.py | 14 +- f4pga/sf_common_modules/fasm.py | 16 +- .../generic_script_wrapper.py | 40 ++--- f4pga/sf_common_modules/io_rename.py | 12 +- f4pga/sf_common_modules/mkdirs.py | 8 +- f4pga/sf_common_modules/pack.py | 10 +- f4pga/sf_common_modules/place.py | 16 +- f4pga/sf_common_modules/place_constraints.py | 10 +- f4pga/sf_common_modules/route.py | 12 +- f4pga/sf_common_modules/synth.py | 22 +-- f4pga/sf_flow_config.py | 60 +++---- f4pga/sf_module.py | 18 +-- f4pga/sf_module_inspector.py | 4 +- f4pga/sf_module_runner.py | 4 +- f4pga/sf_stage.py | 22 +-- f4pga/sf_ugly.py | 8 +- f4pga/sfbuild | 5 - f4pga/sfbuild.py | 29 ++-- 22 files changed, 196 insertions(+), 309 deletions(-) delete mode 100644 f4pga/CMakeLists.txt delete mode 100644 f4pga/sfbuild diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml index 95e23a437..b206f9c0c 100644 --- a/.github/workflows/pyF4PGA.yml +++ b/.github/workflows/pyF4PGA.yml @@ -6,7 +6,8 @@ on: jobs: - Run-tests: + + Test-pip: runs-on: ubuntu-latest strategy: fail-fast: false @@ -39,14 +40,20 @@ jobs: - name: Test py4FPGA build run: | - cd f4pga-examples - export INSTALL_DIR=/opt/f4pga export PATH="$INSTALL_DIR/xc7/install/bin:$PATH"; source "$INSTALL_DIR/xc7/conda/etc/profile.d/conda.sh" conda activate xc7 - pip install -r ../f4pga/requirements.txt + cd f4pga + pip install --use-feature=in-tree-build . + cd .. - PYTHONPATH=$(pwd)/../f4pga python3 ../f4pga/sfbuild.py build --flow ../.github/sftest.json -t bitstream + cd f4pga-examples + f4pga build --flow ../.github/sftest.json -t bitstream + + - name: Test py4FPGA (PYTHONPATH) + run: | + PYTHONPATH=$(pwd) python3 f4pga/sfbuild.py + PYTHONPATH=$(pwd) python3 f4pga/sfbuild.py -h diff --git a/f4pga/CMakeLists.txt b/f4pga/CMakeLists.txt deleted file mode 100644 index 2f2a9a8de..000000000 --- a/f4pga/CMakeLists.txt +++ /dev/null @@ -1,146 +0,0 @@ -# Installs sfbuild - experimental Symbiflow Build System - -function(INSTALL_DIR) - # Create directory during installation phase - set(options) - set(one_value_args INSTALL_DIRECTORY) - set(multi_value_args) - cmake_parse_arguments( - INSTALL_DIR - "${options}" - "${one_value_args}" - "${multi_value_args}" - ${ARGN} - ) - - set(make_dir_code "file(MAKE_DIRECTORY ${INSTALL_DIR_INSTALL_DIRECTORY})") - install(CODE ${make_dir_code}) - -endfunction() - -function(INSTALL_DIR_CONTENT) - # Install files from ROOT_DIRECTORY/FILES_DIRECTORY directory into a FILES_DIRECTORY subdirectory of INSTALL_DIRECTORY - set(options) - set(one_value_args - ROOT_DIRECTORY - FILES_DIRECTORY - DESTINATION) - set(multi_value_args - FILES - INSTALL_OPTS) - cmake_parse_arguments( - INSTALL_DIR_CONTENT - "${options}" - "${one_value_args}" - "${multi_value_args}" - ${ARGN} - ) - - if(NOT DEFINED INSTALL_DIR_CONTENT_ROOT_DIRECTORY) - set(INSTALL_DIR_CONTENT_ROOT_DIRECTORY .) - endif() - if(NOT DEFINED INSTALL_DIR_CONTENT_FILES_DIRECTORY) - set(INSTALL_DIR_CONTENT_FILES_DIRECTORY .) - endif() - - set(file_paths) - foreach(file ${INSTALL_DIR_CONTENT_FILES}) - list(APPEND file_paths ${INSTALL_DIR_CONTENT_ROOT_DIRECTORY}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY}/${file}) - endforeach() - - install(FILES ${file_paths} - DESTINATION ${INSTALL_DIR_CONTENT_DESTINATION}/${INSTALL_DIR_CONTENT_FILES_DIRECTORY} - ${INSTALL_DIR_CONTENT_INSTALL_OPTS}) - -endfunction() - - -find_package(Python3 COMPONENTS Interpreter REQUIRED) - -get_target_property_required(VPR env VPR) -get_target_property_required(GENFASM env GENFASM) - -set(SFBUILD_SUPPORTED_PLATFORMS - ql-eos-s3 - xc7a50t - xc7a100t - xc7a200t - ql-k4n8_fast - ql-k4n8_slow) - -# Create required directories -foreach(DIR_PATH ${SFBUILD_DIRECTORIES}) - install_dir(INSTALL_DIRECTORY ${CMAKE_INSTALL_PREFIX}/bin/${DIR_PATH}) -endforeach() - -# Install sfbuild -install_dir_content( - FILES - __init__.py - sf_argparse.py - sf_cache.py - sf_flow_config.py - sf_module_inspector.py - sf_stage.py - sf_ugly.py - sfbuild.py - sfbuild - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) -install_dir_content( - FILES __init__.py - FILES_DIRECTORY sf_common - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) -install_dir_content( - FILES __init__.py - FILES_DIRECTORY sf_module - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) -install_dir_content( - FILES __init__.py - FILES_DIRECTORY sf_module_runner - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) -# Install common modules -install_dir_content( - FILES - fasm.py - generic_script_wrapper.py - io_rename.py - mkdirs.py - pack.py - place_constraints.py - place.py - route.py - synth.py - FILES_DIRECTORY sf_common_modules - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE) -# Install platform flow definitions -set(sfbuild_supported_platform_defs) -foreach(SFBUILD_PLATFORM ${SFBUILD_SUPPORTED_PLATFORMS}) - set(sfbuild_platform_def "${SFBUILD_PLATFORM}.json") - list(APPEND sfbuild_supported_platform_defs ${sfbuild_platform_def}) -endforeach() -install_dir_content( - FILES ${sfbuild_supported_platform_defs} - FILES_DIRECTORY platforms - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_EXECUTE WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ) - -# Install part_db -install_dir_content( - FILES - parts.json - FILES_DIRECTORY part_db - DESTINATION bin/sfbuild - INSTALL_OPTS - PERMISSIONS WORLD_READ OWNER_WRITE OWNER_READ GROUP_READ -) diff --git a/f4pga/__init__.py b/f4pga/__init__.py index ad48ec86c..e69de29bb 100644 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -1 +0,0 @@ -import sfbuild \ No newline at end of file diff --git a/f4pga/setup.py b/f4pga/setup.py index bb6de36b3..279e26932 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -19,6 +19,7 @@ # SPDX-License-Identifier: Apache-2.0 from pathlib import Path +from typing import List from setuptools import setup as setuptools_setup @@ -27,6 +28,28 @@ packagePath = Path(__file__).resolve().parent +requirementsFile = packagePath / "requirements.txt" + + +# Read requirements file and add them to package dependency list +def get_requirements(file: Path) -> List[str]: + requirements = [] + with file.open("r") as fh: + for line in fh.read().splitlines(): + if line.startswith("#") or line == "": + continue + elif line.startswith("-r"): + # Remove the first word/argument (-r) + filename = " ".join(line.split(" ")[1:]) + requirements += get_requirements(file.parent / filename) + elif line.startswith("https"): + # Convert 'URL#NAME' to 'NAME @ URL' + splitItems = line.split("#") + requirements.append("{} @ {}".format(splitItems[1], splitItems[0])) + else: + requirements.append(line) + return requirements + sf = "symbiflow" shwrappers = "f4pga.wrappers.sh.__init__" @@ -54,15 +77,21 @@ description="F4PGA.", url="https://github.com/chipsalliance/f4pga", packages=[ + "f4pga", + "f4pga.sf_common_modules", "f4pga.wrappers.sh", ], package_dir={"f4pga": "."}, package_data={ + 'f4pga': ['platforms/*.json'], 'f4pga.wrappers.sh': ['xc7/*.f4pga.sh', 'quicklogic/*.f4pga.sh'] }, classifiers=[], python_requires='>=3.6', + install_requires=list(set(get_requirements(requirementsFile))), entry_points={ - "console_scripts": wrapper_entrypoints + "console_scripts": [ + "f4pga = f4pga.sfbuild:main", + ] + wrapper_entrypoints }, ) diff --git a/f4pga/sf_common.py b/f4pga/sf_common.py index 9bac5519d..efb1e88ae 100644 --- a/f4pga/sf_common.py +++ b/f4pga/sf_common.py @@ -27,7 +27,7 @@ def with_qualifier(name: str, q: str) -> str: _sfbuild_module_collection_name_to_path = {} def scan_modules(mypath: str): global _sfbuild_module_collection_name_to_path - + sfbuild_home = mypath sfbuild_home_dirs = os.listdir(sfbuild_home) sfbuild_module_dirs = \ @@ -66,7 +66,7 @@ def d(paths, *args, **kwargs): return [d(p) for p in paths]; elif type(paths) is dict: return dict([(k, d(p)) for k, p in paths.items()]) - + return d def file_noext(path: str): @@ -99,7 +99,7 @@ def __init__(self, share: str, eblif, values: Namespace, self.device_name = values.vpr_grid_layout_name self.eblif = os.path.realpath(eblif) if values.vpr_options is not None: - self.optional = options_dict_to_list(values.vpr_options) + self.optional = options_dict_to_list(values.vpr_options) else: self.optional = [] if vpr_extra_opts is not None: @@ -159,7 +159,7 @@ def options_dict_to_list(opt_dict: dict): Converts a dictionary of named options for CLI program to a list. Example: { "option_name": "value" } -> [ "--option_name", "value" ] """ - + opts = [] for key, val in opt_dict.items(): opts.append('--' + key) @@ -186,7 +186,7 @@ def fatal(code, message): with a given return code. """ - print(f'[FATAL ERROR]: {message}') + raise(Exception(f'[FATAL ERROR]: {message}')) exit(code) class ResolutionEnv: @@ -202,7 +202,7 @@ class ResolutionEnv: def __init__(self, values={}): self.values = values - + def __copy__(self): return ResolutionEnv(self.values.copy()) @@ -251,7 +251,7 @@ def add_values(self, values: dict): def sfprint(verbosity: int, *args): """ Print with regards to currently set verbosity level """ - + global verbosity_level if verbosity <= verbosity_level: print(*args) diff --git a/f4pga/sf_common_modules/fasm.py b/f4pga/sf_common_modules/fasm.py index 83d57196b..7446f954e 100644 --- a/f4pga/sf_common_modules/fasm.py +++ b/f4pga/sf_common_modules/fasm.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -31,10 +31,10 @@ def map_io(self, ctx: ModuleContext): return { 'fasm': fasm_output_path(build_dir, ctx.values.top) } - - def execute(self, ctx: ModuleContext): + + def execute(self, ctx: ModuleContext): build_dir = os.path.dirname(ctx.takes.eblif) - + vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values) optional = [] @@ -48,12 +48,12 @@ def execute(self, ctx: ModuleContext): '--device', vprargs.device_name, '--read_rr_graph', vprargs.rr_graph ] + vprargs.optional - + if get_verbosity_level() >= 2: yield 'Generating FASM...\n ' + ' '.join(s) else: yield 'Generating FASM...' - + sub(*s, cwd=build_dir) default_fasm_output_name = fasm_output_path(build_dir, ctx.values.top) @@ -65,7 +65,7 @@ def execute(self, ctx: ModuleContext): concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm) else: yield 'No extra FASM to append' - + def __init__(self, _): self.name = 'fasm' self.no_of_phases = 2 diff --git a/f4pga/sf_common_modules/generic_script_wrapper.py b/f4pga/sf_common_modules/generic_script_wrapper.py index b70846016..632c9ecf3 100644 --- a/f4pga/sf_common_modules/generic_script_wrapper.py +++ b/f4pga/sf_common_modules/generic_script_wrapper.py @@ -4,7 +4,7 @@ """ This module is intended for wrapping simple scripts without rewriting them as -an sfbuild module. This is mostly to maintain compatibility with workflows +an sfbuild module. This is mostly to maintain compatibility with workflows that do not use sfbuild and instead rely on legacy scripts. Accepted module parameters: @@ -24,7 +24,7 @@ dependency alsogets two extra values associated with it: `:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "." removed and `:dependency_name[dir]` which - contains directory paths of the dependency. This is useful for deriving an output + contains directory paths of the dependency. This is useful for deriving an output name from the input. * `meta` (string, optional): Description of the output dependency. * `inputs` (dict[string -> string | bool], mandatory): @@ -49,8 +49,8 @@ import shutil import re -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -106,7 +106,7 @@ def _get_input_references(input: str) -> InputReferences: refs.dependencies.add(dep_name) else: refs.values.add(match_str) - + return refs @@ -146,14 +146,14 @@ def map_io(self, ctx: ModuleContext): for dep, _, out_path in self.file_outputs: out_path_resolved = ctx.r_env.resolve(out_path, final=True) outputs[dep] = out_path_resolved - + if self.stdout_target: out_path_resolved = \ ctx.r_env.resolve(self.stdout_target[1], final=True) outputs[self.stdout_target[0]] = out_path_resolved - + return outputs - + def execute(self, ctx: ModuleContext): _add_extra_values_to_env(ctx) @@ -163,9 +163,9 @@ def execute(self, ctx: ModuleContext): + self.get_args(ctx) if self.interpreter: sub_args = [ctx.r_env.resolve(self.interpreter, final=True)] + sub_args - + sub_env = self.get_env(ctx) - + # XXX: This may produce incorrect string if arguments contains whitespace # characters cmd = ' '.join(sub_args) @@ -174,7 +174,7 @@ def execute(self, ctx: ModuleContext): yield f'Running script...\n {cmd}' else: yield f'Running an externel script...' - + data = sub(*sub_args, cwd=cwd, env=sub_env) yield 'Writing outputs...' @@ -182,7 +182,7 @@ def execute(self, ctx: ModuleContext): target = ctx.r_env.resolve(self.stdout_target[1], final=True) with open(target, 'wb') as f: f.write(data) - + for _, file, target in self.file_outputs: file = ctx.r_env.resolve(file, final=True) target = ctx.r_env.resolve(target, final=True) @@ -199,15 +199,15 @@ def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'): meta = output_def.get('meta') if meta is str: self.prod_meta[dname] = meta - + mode = output_def.get('mode') if type(mode) is not str: raise Exception(f'Output mode for `{dep_name}` is not specified') - + target = output_def.get('target') if type(target) is not str: raise Exception('`target` field is not specified') - + if mode == 'file': file = output_def.get('file') if type(file) is not str: @@ -217,7 +217,7 @@ def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'): if self.stdout_target is not None: raise Exception('stdout output is already specified') self.stdout_target = dname, target - + # A very functional approach def _init_inputs(self, input_defs): positional_args = [] @@ -267,7 +267,7 @@ def push_q(ctx: ModuleContext, push_env=push_env, input=input): if val != '': push_env(val) get_env = _tailcall1(get_env, push_q) - + def get_all_args(ctx: ModuleContext): nonlocal get_args, positional_args, named_args @@ -277,14 +277,14 @@ def get_all_args(ctx: ModuleContext): pos = [ a for _, a in positional_args] return named_args + pos - + def get_all_env(ctx: ModuleContext): nonlocal get_env, env_vars get_env(ctx) if len(env_vars.items()) == 0: return None return env_vars - + setattr(self, 'get_args', get_all_args) setattr(self, 'get_env', get_all_env) @@ -292,7 +292,7 @@ def get_all_env(ctx: ModuleContext): self.takes.append(dep) for val in refs.values: self.values.append(val) - + def __init__(self, params): self.name = _generate_stage_name(params) self.no_of_phases = 2 diff --git a/f4pga/sf_common_modules/io_rename.py b/f4pga/sf_common_modules/io_rename.py index 155450464..f14c0be60 100644 --- a/f4pga/sf_common_modules/io_rename.py +++ b/f4pga/sf_common_modules/io_rename.py @@ -27,9 +27,9 @@ # ----------------------------------------------------------------------------- # -from sf_common import * -from sf_module import * -from sf_module_runner import get_module +from f4pga.sf_common import * +from f4pga.sf_module import * +from f4pga.sf_module_runner import get_module # ----------------------------------------------------------------------------- # @@ -64,7 +64,7 @@ def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]': else: newl.append(r if r is not None else e) return newl - + def _generate_stage_name(name: str): return f'{name}-io_renamed' @@ -83,7 +83,7 @@ def map_io(self, ctx: ModuleContext): newctx.values = _switchback_attrs(ctx.values, self.rename_values) r = self.module.map_io(newctx) return _switch_keys(r, self.rename_produces) - + def execute(self, ctx: ModuleContext): newctx = ctx.shallow_copy() newctx.takes = _switchback_attrs(ctx.takes, self.rename_takes) @@ -91,7 +91,7 @@ def execute(self, ctx: ModuleContext): newctx.outputs = _switchback_attrs(ctx.produces, self.rename_produces) print(newctx.takes) return self.module.execute(newctx) - + def __init__(self, params): mod_path = resolve_modstr(params["module"]) module_class = get_module(mod_path) diff --git a/f4pga/sf_common_modules/mkdirs.py b/f4pga/sf_common_modules/mkdirs.py index 855e6f8a7..065549fcc 100644 --- a/f4pga/sf_common_modules/mkdirs.py +++ b/f4pga/sf_common_modules/mkdirs.py @@ -12,8 +12,8 @@ # ----------------------------------------------------------------------------- # import os -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -22,13 +22,13 @@ class MkDirsModule(Module): def map_io(self, ctx: ModuleContext): return ctx.r_env.resolve(self.deps_to_produce) - + def execute(self, ctx: ModuleContext): outputs = vars(ctx.outputs) for _, path in outputs.items(): yield f'Creating directory {path}...' os.makedirs(path, exist_ok=True) - + def __init__(self, params): self.name = 'mkdirs' self.no_of_phases = len(params) if params else 0 diff --git a/f4pga/sf_common_modules/pack.py b/f4pga/sf_common_modules/pack.py index ab286867b..816cbfd38 100644 --- a/f4pga/sf_common_modules/pack.py +++ b/f4pga/sf_common_modules/pack.py @@ -6,8 +6,8 @@ import os import re -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -24,7 +24,7 @@ def map_io(self, ctx: ModuleContext): 'util_rpt': os.path.join(build_dir, DEFAULT_UTIL_RPT), 'timing_rpt': os.path.join(build_dir, DEFAULT_TIMING_RPT) } - + def execute(self, ctx: ModuleContext): vpr_args = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, sdc_file=ctx.takes.sdc) @@ -42,14 +42,14 @@ def execute(self, ctx: ModuleContext): shutil.move(og_log, ctx.outputs.pack_log) else: os.remove(og_log) - + if ctx.outputs.timing_rpt: shutil.move(os.path.join(build_dir, DEFAULT_TIMING_RPT), ctx.outputs.timing_rpt) if ctx.outputs.util_rpt: shutil.move(os.path.join(build_dir, DEFAULT_UTIL_RPT), ctx.outputs.util_rpt) - + def __init__(self, _): self.name = 'pack' self.no_of_phases = 2 diff --git a/f4pga/sf_common_modules/place.py b/f4pga/sf_common_modules/place.py index 7821a52ea..16bc007c2 100644 --- a/f4pga/sf_common_modules/place.py +++ b/f4pga/sf_common_modules/place.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -27,33 +27,33 @@ def place_constraints_file(ctx: ModuleContext): if not p: dummy = True p = file_noext(ctx.takes.eblif) + '.place' - + return p, dummy class PlaceModule(Module): def map_io(self, ctx: ModuleContext): mapping = {} p, _ = place_constraints_file(ctx) - + mapping['place'] = default_output_name(p) return mapping - + def execute(self, ctx: ModuleContext): place_constraints, dummy = place_constraints_file(ctx) place_constraints = os.path.realpath(place_constraints) if dummy: with open(place_constraints, 'wb') as f: f.write(b'') - + build_dir = os.path.dirname(ctx.takes.eblif) vpr_options = ['--fix_clusters', place_constraints] - + yield 'Running VPR...' vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, sdc_file=ctx.takes.sdc, vpr_extra_opts=vpr_options) vpr('place', vprargs, cwd=build_dir) - + # VPR names output on its own. If user requested another name, the # output file should be moved. # TODO: This extends the set of names that would cause collisions. diff --git a/f4pga/sf_common_modules/place_constraints.py b/f4pga/sf_common_modules/place_constraints.py index cd3a26ff5..b48947161 100644 --- a/f4pga/sf_common_modules/place_constraints.py +++ b/f4pga/sf_common_modules/place_constraints.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -19,9 +19,9 @@ def map_io(self, ctx: ModuleContext): def execute(self, ctx: ModuleContext): arch_dir = os.path.join(ctx.share, 'arch') arch_def = os.path.join(arch_dir, ctx.values.device, 'arch.timing.xml') - + database = sub('prjxray-config').decode().replace('\n', '') - + yield 'Generating .place...' extra_opts: 'list[str]' @@ -38,7 +38,7 @@ def execute(self, ctx: ModuleContext): '--db_root', database, '--part', ctx.values.part_name] + extra_opts)) - + yield 'Saving place constraint data...' with open(ctx.outputs.place_constraints, 'wb') as f: f.write(data) diff --git a/f4pga/sf_common_modules/route.py b/f4pga/sf_common_modules/route.py index c9b440d2a..a73dd7bdb 100644 --- a/f4pga/sf_common_modules/route.py +++ b/f4pga/sf_common_modules/route.py @@ -6,27 +6,27 @@ import os import shutil -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # def route_place_file(eblif: str): - return file_noext(eblif) + '.route' + return file_noext(eblif) + '.route' class RouteModule(Module): def map_io(self, ctx: ModuleContext): return { 'route': route_place_file(ctx.takes.eblif) } - + def execute(self, ctx: ModuleContext): build_dir = os.path.dirname(ctx.takes.eblif) vpr_options = [] if ctx.values.vpr_options: vpr_options = options_dict_to_list(ctx.values.vpr_options) - + vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, sdc_file=ctx.takes.sdc) @@ -39,7 +39,7 @@ def execute(self, ctx: ModuleContext): yield 'Saving log...' save_vpr_log('route.log', build_dir=build_dir) - + def __init__(self, _): self.name = 'route' self.no_of_phases = 2 diff --git a/f4pga/sf_common_modules/synth.py b/f4pga/sf_common_modules/synth.py index 8cac85214..b72ff18c1 100755 --- a/f4pga/sf_common_modules/synth.py +++ b/f4pga/sf_common_modules/synth.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from sf_common import * -from sf_module import * +from f4pga.sf_common import * +from f4pga.sf_module import * # ----------------------------------------------------------------------------- # @@ -39,7 +39,7 @@ def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None for verilog in verilog_files: tcl = f'read_verilog {args_str} {verilog}; {tcl}' verilog_files = [] - + # Execute YOSYS command return sub(*(['yosys', '-p', tcl] + optional + verilog_files), env=env) @@ -63,7 +63,7 @@ def map_io(self, ctx: ModuleContext): top = ctx.values.top if ctx.takes.build_dir: - top = os.path.join(ctx.takes.build_dir, top) + top = os.path.join(ctx.takes.build_dir, top) mapping['eblif'] = top + '.eblif' mapping['fasm_extra'] = top + '_fasm_extra.fasm' mapping['json'] = top + '.json' @@ -84,7 +84,7 @@ def map_io(self, ctx: ModuleContext): ctx.values.device + '_' + name + '.' + name) return mapping - + def execute(self, ctx: ModuleContext): split_inouts = os.path.join(ctx.share, 'scripts/split_inouts.py') synth_tcl = os.path.join(ctx.values.tcl_scripts, 'synth.tcl') @@ -92,26 +92,26 @@ def execute(self, ctx: ModuleContext): tcl_env = yosys_setup_tcl_env(ctx.values.yosys_tcl_env) \ if ctx.values.yosys_tcl_env else {} - + if get_verbosity_level() >= 2: yield f'Synthesizing sources: {ctx.takes.sources}...' else: yield f'Synthesizing sources...' - + yosys_synth(synth_tcl, tcl_env, ctx.takes.sources, ctx.values.read_verilog_args, ctx.outputs.synth_log) yield f'Splitting in/outs...' sub('python3', split_inouts, '-i', ctx.outputs.json, '-o', ctx.outputs.synth_json) - + if not os.path.isfile(ctx.produces.fasm_extra): with open(ctx.produces.fasm_extra, 'w') as f: f.write('') yield f'Converting...' yosys_conv(conv_tcl, tcl_env, ctx.outputs.synth_json) - + def __init__(self, params): self.name = 'synthesize' self.no_of_phases = 3 @@ -123,7 +123,7 @@ def __init__(self, params): extra_takes = params.get('takes') if extra_takes: self.takes += extra_takes - + self.produces = [ 'eblif', 'fasm_extra', @@ -138,7 +138,7 @@ def __init__(self, params): self.extra_products = extra_products else: self.extra_products = [] - + self.values = [ 'top', 'device', diff --git a/f4pga/sf_flow_config.py b/f4pga/sf_flow_config.py index 30f9ba4e3..5ed59412e 100644 --- a/f4pga/sf_flow_config.py +++ b/f4pga/sf_flow_config.py @@ -1,8 +1,8 @@ import os import json -from sf_common import file_noext, ResolutionEnv, deep -from sf_stage import Stage +from f4pga.sf_common import file_noext, ResolutionEnv, deep +from f4pga.sf_stage import Stage from copy import copy _realpath_deep = deep(os.path.realpath) @@ -37,22 +37,22 @@ def _get_ov_dict(dname: str, flow: dict, d = _get_lazy_dict(platform_dict, dname) else: d = _get_lazy_dict(flow, dname) - + return d def _get_dep_dict(flow: dict, platform: 'str | None' = None, stage: 'str | None' = None): - return _get_ov_dict('dependencies', flow, platform, stage) + return _get_ov_dict('dependencies', flow, platform, stage) def _get_vals_dict(flow: dict, platform: 'str | None' = None, stage: 'str | None' = None): - return _get_ov_dict('values', flow, platform, stage) + return _get_ov_dict('values', flow, platform, stage) def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str, values: list, platform: 'str | None' = None, stage: 'str | None' = None) -> bool: d = ov_dict_getter(flow_cfg, platform, stage) - + deps = d.get(name) if type(deps) is list: deps += values @@ -61,7 +61,7 @@ def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str, else: print(failstr_constr(name)) return False - + return True def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr, @@ -70,7 +70,7 @@ def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr, stage: 'str | None' = None) -> bool: values_to_remove = set(vals) d = ov_dict_getter(flow, platform, stage) - + vallist: list = d.get(name) if type(vallist) is list: d[name] = [val for val in vallist if val not in values_to_remove] @@ -80,7 +80,7 @@ def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr, else: print(notlist_str_constr(name)) return False - + return True @@ -93,14 +93,14 @@ def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr, if len(idcs) == 0: print(f'Index list is emtpy!') return False - + d = ov_dict_getter(flow, platform, stage) vallist: list = d.get(name) if type(vallist) is list: if idcs[0] >= len(vallist) or idcs[len(idcs) - 1] < 0: print(f'Index out of range (max: {len(vallist)}!') return False - + for idx in idcs: vallist.pop(idx) elif vallist is None: @@ -109,7 +109,7 @@ def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr, else: print(notlist_str_constr(name)) return False - + return True def _get_ovs_raw(dict_name: str, flow_cfg, @@ -125,7 +125,7 @@ def _get_ovs_raw(dict_name: str, flow_cfg, stage_deps = flow_cfg[platform][stage].get(dict_name) if stage_deps is not None: vals.update(stage_deps) - + return vals def _remove_dependencies_by_values(flow: dict, name: str, deps: list, @@ -206,12 +206,12 @@ def __init__(self, flow_def: dict, r_env: ResolutionEnv): global_vals = flow_def.get('values') if global_vals is not None: self.r_env.add_values(global_vals) - + stages_d = flow_def['stages'] modopts_d = flow_def.get('stage_options') if modopts_d is None: modopts_d = {} - + for stage_name, modstr in stages_d.items(): opts = modopts_d.get(stage_name) self.stages[stage_name] = Stage(stage_name, modstr, opts) @@ -241,7 +241,7 @@ def platforms(self): for platform, _ in self.flow_cfg.items(): if not _is_kword(platform): yield platform - + def add_platform(self, device: str) -> bool: d = self.flow_cfg.get(device) if d: @@ -264,7 +264,7 @@ def get_default_platform(self) -> 'str | None': def get_default_target(self, platform: str) -> 'str | None': return self.flow_cfg[platform].get('default_target') - + def get_stage_r_env(self, platform: str, stage: str) -> ResolutionEnv: r_env = self._cache_platform_r_env(platform) @@ -272,28 +272,28 @@ def get_stage_r_env(self, platform: str, stage: str) -> ResolutionEnv: stage_values = stage_cfg.get('values') if stage_values: r_env.add_values(stage_values) - + return r_env - + """ Get dependencies without value resolution applied """ def get_dependencies_raw(self, platform: 'str | None' = None): return _get_ovs_raw('dependencies', self.flow_cfg, platform, None) - + """ Get values without value resolution applied """ def get_values_raw(self, platform: 'str | None' = None, stage: 'str | None' = None): return _get_ovs_raw('values', self.flow_cfg, platform, stage) - + def get_stage_value_overrides(self, platform: str, stage: str): stage_cfg = self.flow_cfg[platform].get(stage) if stage_cfg is None: return {} - + stage_vals_ovds = stage_cfg.get('values') if stage_vals_ovds is None: return {} return stage_vals_ovds - + def get_dependency_platform_overrides(self, platform: str): platform_ovds = self.flow_cfg[platform].get('dependencies') if platform_ovds is None: @@ -314,17 +314,17 @@ def __init__(self, project_config: ProjectFlowConfig, self.r_env.add_values(platform_vals) self.stages = platform_def.stages self.platform = platform - + raw_project_deps = project_config.get_dependencies_raw(platform) self.dependencies_explicit = \ _realpath_deep(self.r_env.resolve(raw_project_deps)) - + for stage_name, stage in platform_def.stages.items(): project_val_ovds = \ project_config.get_stage_value_overrides(platform, stage_name) stage.value_overrides.update(project_val_ovds) - + def get_dependency_overrides(self): return self.dependencies_explicit @@ -332,9 +332,9 @@ def get_r_env(self, stage_name: str) -> ResolutionEnv: stage = self.stages[stage_name] r_env = copy(self.r_env) r_env.add_values(stage.value_overrides) - + return r_env - + def get_stage(self, stage_name: str) -> Stage: return self.stages[stage_name] @@ -345,7 +345,7 @@ class FlowConfigException(Exception): def __init__(self, path: str, message: str): self.path = path self.message = message - + def __str__(self) -> str: return f'Error in config `{self.path}: {self.message}' @@ -356,5 +356,5 @@ def open_project_flow_cfg(path: str) -> ProjectFlowConfig: with open(path, 'r') as flow_cfg_file: flow_cfg_json = flow_cfg_file.read() cfg.flow_cfg = json.loads(flow_cfg_json) - + return cfg \ No newline at end of file diff --git a/f4pga/sf_module.py b/f4pga/sf_module.py index 6bb4efd50..a9a785e0d 100644 --- a/f4pga/sf_module.py +++ b/f4pga/sf_module.py @@ -2,7 +2,7 @@ import abc from types import SimpleNamespace -from sf_common import * +from f4pga.sf_common import * from colorama import Fore, Style class Module: @@ -13,7 +13,7 @@ class Module: They also have to specify what dependencies they produce and create the files for these dependencies. """ - + no_of_phases: int name: str takes: 'list[str]' @@ -37,7 +37,7 @@ def map_io(self, ctx) -> 'dict[str, ]': `ctx` is `ModuleContext`. """ pass - + def __init__(self, params: 'dict[str, ]'): self.no_of_phases = 0 self.current_phase = 0 @@ -49,7 +49,7 @@ class ModuleContext: A class for object holding mappings for dependencies and values as well as other information needed during modules execution. """ - + share: str # Absolute path to Symbiflow's share directory bin: str # Absolute path to Symbiflow's bin directory takes: SimpleNamespace # Maps symbolic dependency names to relative @@ -59,11 +59,11 @@ class ModuleContext: # on-demand optional outputs (such as logs) # with `is_output_explicit` method. outputs: SimpleNamespace # Contains mappings for all available outputs. - values: SimpleNamespace # Contains all available requested values. + values: SimpleNamespace # Contains all available requested values. r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings # for current scope. module_name: str # Name of the module. - + def is_output_explicit(self, name: str): """ True if user has explicitely specified output's path. """ o = getattr(self.produces, name) @@ -74,7 +74,7 @@ def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'): Add attribute for a dependency or panic if a required dependency has not been given to the module on its input. """ - + for name in deps: name, spec = decompose_depname(name) value = deps_cfg.get(name) @@ -120,7 +120,7 @@ def shallow_copy(self): mycopy.share = self.share mycopy.bin = self.bin - return mycopy + return mycopy class ModuleRuntimeException(Exception): info: str @@ -133,7 +133,7 @@ def __str___(self): def get_mod_metadata(module: Module): """ Get descriptions for produced dependencies. """ - + meta = {} has_meta = hasattr(module, 'prod_meta') for prod in module.produces: diff --git a/f4pga/sf_module_inspector.py b/f4pga/sf_module_inspector.py index bc725e12f..dc62d6c30 100644 --- a/f4pga/sf_module_inspector.py +++ b/f4pga/sf_module_inspector.py @@ -1,5 +1,5 @@ -from sf_module import Module -from sf_common import decompose_depname +from f4pga.sf_module import Module +from f4pga.sf_common import decompose_depname from colorama import Style def _get_if_qualifier(deplist: 'list[str]', qualifier: str): diff --git a/f4pga/sf_module_runner.py b/f4pga/sf_module_runner.py index 64eb37b8d..9dcb66579 100644 --- a/f4pga/sf_module_runner.py +++ b/f4pga/sf_module_runner.py @@ -4,8 +4,8 @@ import importlib import importlib.util import os -from sf_module import Module, ModuleContext, get_mod_metadata -from sf_common import ResolutionEnv, deep, sfprint +from f4pga.sf_module import Module, ModuleContext, get_mod_metadata +from f4pga.sf_common import ResolutionEnv, deep, sfprint from colorama import Fore, Style _realpath_deep = deep(os.path.realpath) diff --git a/f4pga/sf_stage.py b/f4pga/sf_stage.py index ec1baca90..5aa9fe638 100644 --- a/f4pga/sf_stage.py +++ b/f4pga/sf_stage.py @@ -1,6 +1,6 @@ -from sf_common import decompose_depname, resolve_modstr -from sf_module import Module -from sf_module_runner import get_module, module_io +from f4pga.sf_common import decompose_depname, resolve_modstr +from f4pga.sf_module import Module +from f4pga.sf_module_runner import get_module, module_io class StageIO: """ @@ -20,7 +20,7 @@ def __init__(self, encoded_name: str): """ self.name, self.spec = decompose_depname(encoded_name) - + def __repr__(self) -> str: return 'StageIO { name: \'' + self.name + '\', spec: ' + \ self.spec + '}' @@ -34,17 +34,17 @@ class Stage: name: str # Name of the stage (module's name) takes: 'list[StageIO]' # List of symbolic names of dependencies used by # the stage - produces: 'list[StageIO]' # List of symbolic names of dependencies + produces: 'list[StageIO]' # List of symbolic names of dependencies # produced by the stage value_overrides: 'dict[str, ]' # Stage-specific values module: Module meta: 'dict[str, str]' # Stage's metadata extracted from module's # output. - + def __init__(self, name: str, modstr: str, mod_opts: 'dict[str, ] | None'): if mod_opts is None: mod_opts = {} - + module_path = resolve_modstr(modstr) ModuleClass = get_module(module_path) self.module = ModuleClass(mod_opts.get('params')) @@ -54,20 +54,20 @@ def __init__(self, name: str, modstr: str, mod_opts: 'dict[str, ] | None'): self.value_overrides = values else: self.value_overrides = {} - + mod_io = module_io(self.module) self.name = name - + self.takes = [] for input in mod_io['takes']: io = StageIO(input) self.takes.append(io) - + self.produces = [] for input in mod_io['produces']: io = StageIO(input) self.produces.append(io) - + self.meta = mod_io['meta'] def __repr__(self) -> str: diff --git a/f4pga/sf_ugly.py b/f4pga/sf_ugly.py index 8dd38a54b..55021377f 100644 --- a/f4pga/sf_ugly.py +++ b/f4pga/sf_ugly.py @@ -1,11 +1,11 @@ """ The "ugly" module is dedicated for some *ugly* workarounds """ import os -import sf_common +from f4pga.sf_common import sub as common_sub def noisy_warnings(): """ Emit some noisy warnings """ - + os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log' return 'noisy_warnings.log' @@ -13,7 +13,7 @@ def generate_values(): """ Generate initial values, available in configs """ return{ - 'prjxray_db': sf_common.sub('prjxray-config').decode().replace('\n', ''), - 'python3': sf_common.sub('which', 'python3').decode().replace('\n', ''), + 'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''), + 'python3': common_sub('which', 'python3').decode().replace('\n', ''), 'noisyWarnings': noisy_warnings() } diff --git a/f4pga/sfbuild b/f4pga/sfbuild deleted file mode 100644 index 1a2d46a4b..000000000 --- a/f4pga/sfbuild +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh - -MYDIR=`dirname $0` - -python3 ${MYDIR}/sfbuild.py $@ \ No newline at end of file diff --git a/f4pga/sfbuild.py b/f4pga/sfbuild.py index df6447170..4b91f25f9 100755 --- a/f4pga/sfbuild.py +++ b/f4pga/sfbuild.py @@ -21,30 +21,30 @@ such as list of source code files. """ +from pathlib import Path from argparse import Namespace import os from os import environ import json from typing import Iterable from colorama import Fore, Style -from sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \ +from f4pga.sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \ sfprint -from sf_module import * -from sf_cache import SymbiCache -import sf_ugly -from sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \ +from f4pga.sf_module import * +from f4pga.sf_cache import SymbiCache +import f4pga.sf_ugly as sf_ugly +from f4pga.sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \ open_project_flow_cfg, verify_platform_name, \ verify_stage -from sf_module_runner import * -from sf_module_inspector import get_module_info -from sf_stage import Stage -from sf_argparse import setup_argparser, get_cli_flow_config +from f4pga.sf_module_runner import * +from f4pga.sf_module_inspector import get_module_info +from f4pga.sf_stage import Stage +from f4pga.sf_argparse import setup_argparser, get_cli_flow_config SYMBICACHEPATH = '.symbicache' -mypath = os.path.realpath(os.sys.argv[0]) -mypath = os.path.dirname(mypath) -binpath = os.path.realpath(os.path.join(mypath, '..')) +binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..')) +mypath = str(Path(__file__).resolve().parent) share_dir_path = os.path.realpath(f"{environ.get('INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") @@ -624,7 +624,7 @@ def cmd_show_dependencies(args: Namespace): set_verbosity_level(-1) -if __name__ == '__main__': +def main(): parser = setup_argparser() args = parser.parse_args() @@ -640,3 +640,6 @@ def cmd_show_dependencies(args: Namespace): sfprint(0, 'Please use a command.\nUse `--help` flag to learn more.') sfbuild_done() + +if __name__ == '__main__': + main() From fcb4ce881147bacfc62cf49afff93b1aadc642ca Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 03:46:20 +0100 Subject: [PATCH 18/33] f4pga: merge sfbuild into __init__ Signed-off-by: Unai Martinez-Corral --- .github/workflows/pyF4PGA.yml | 4 +- f4pga/__init__.py | 645 ++++++++++++++++++++++++++++++++++ f4pga/setup.py | 2 +- f4pga/sfbuild.py | 645 ---------------------------------- 4 files changed, 648 insertions(+), 648 deletions(-) mode change 100644 => 100755 f4pga/__init__.py delete mode 100755 f4pga/sfbuild.py diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml index b206f9c0c..da498f804 100644 --- a/.github/workflows/pyF4PGA.yml +++ b/.github/workflows/pyF4PGA.yml @@ -55,5 +55,5 @@ jobs: - name: Test py4FPGA (PYTHONPATH) run: | - PYTHONPATH=$(pwd) python3 f4pga/sfbuild.py - PYTHONPATH=$(pwd) python3 f4pga/sfbuild.py -h + PYTHONPATH=$(pwd) python3 f4pga/__init__.py + PYTHONPATH=$(pwd) python3 f4pga/__init__.py -h diff --git a/f4pga/__init__.py b/f4pga/__init__.py old mode 100644 new mode 100755 index e69de29bb..4b91f25f9 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -0,0 +1,645 @@ +#!/usr/bin/env python3 + +""" +sfbuild - Symbiflow Build System + +This tool allows for building FPGA targets (such as bitstreams) for any supported +platform with just one simple command and a project file. + +The idea is that sfbuild wraps all the tools needed by different platforms in +"modules", which define inputs/outputs and various parameters. This allows +sfbuild to resolve dependencies for any target provided that a "flow definition" +file exists for such target. The flow defeinition file list modules available for +that platform and may tweak some settings of those modules. + +A basic example of using sfbuild: +$ sfbuild build --platform arty_35 -t bitstream + +This will make sfbuild attempt to create a bitstream for arty_35 platform. +flow.json is a flow configuration file, which should be created for a project +that uses sfbuild. Iontains project-specific definitions needed within the flow, +such as list of source code files. +""" + +from pathlib import Path +from argparse import Namespace +import os +from os import environ +import json +from typing import Iterable +from colorama import Fore, Style +from f4pga.sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \ + sfprint +from f4pga.sf_module import * +from f4pga.sf_cache import SymbiCache +import f4pga.sf_ugly as sf_ugly +from f4pga.sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \ + open_project_flow_cfg, verify_platform_name, \ + verify_stage +from f4pga.sf_module_runner import * +from f4pga.sf_module_inspector import get_module_info +from f4pga.sf_stage import Stage +from f4pga.sf_argparse import setup_argparser, get_cli_flow_config + +SYMBICACHEPATH = '.symbicache' + +binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..')) +mypath = str(Path(__file__).resolve().parent) + +share_dir_path = os.path.realpath(f"{environ.get('INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") + +class DependencyNotProducedException(Exception): + dep_name: str + provider: str + + def __init__(self, dep_name: str, provider: str): + self.dep_name = dep_name + self.provider = provider + + def __str__(self) -> str: + return f'Stage `{self.provider}` did not produce promised ' \ + f'dependency `{self.dep_name}`' + +def dep_value_str(dep: str): + return ':' + dep + +def platform_stages(platform_flow, r_env): + """ Iterates over all stages available in a given flow. """ + + stage_options = platform_flow.get('stage_options') + for stage_name, modulestr in platform_flow['stages'].items(): + mod_opts = stage_options.get(stage_name) if stage_options else None + yield Stage(stage_name, modulestr, mod_opts, r_env) + +def req_exists(r): + """ Checks whether a dependency exists on a drive. """ + + if type(r) is str: + if not os.path.isfile(r) and not os.path.islink(r) \ + and not os.path.isdir(r): + return False + elif type(r) is list: + return not (False in map(req_exists, r)) + else: + raise Exception(f'Requirements can be currently checked only for single ' + f'paths, or path lists (reason: {r})') + return True + +def map_outputs_to_stages(stages: 'list[Stage]'): + """ + Associates a stage with every possible output. + This is commonly refferef to as `os_map` (output-stage-map) through the code. + """ + + os_map: 'dict[str, Stage]' = {} # Output-Stage map + for stage in stages: + for output in stage.produces: + if not os_map.get(output.name): + os_map[output.name] = stage + elif os_map[output.name] != stage: + raise Exception(f'Dependency `{output.name}` is generated by ' + f'stage `{os_map[output.name].name}` and ' + f'`{stage.name}`. Dependencies can have only one ' + 'provider at most.') + return os_map + +def filter_existing_deps(deps: 'dict[str, ]', symbicache): + return [(n, p) for n, p in deps.items() \ + if req_exists(p)] # and not dep_differ(p, symbicache)] + +def get_stage_values_override(og_values: dict, stage: Stage): + values = og_values.copy() + values.update(stage.value_ovds) + return values + +def prepare_stage_io_input(stage: Stage): + return { 'params': stage.params } if stage.params is not None else {} + +def prepare_stage_input(stage: Stage, platform_name: str, values: dict, + dep_paths: 'dict[str, ]', config_paths: 'dict[str, ]'): + takes = {} + for take in stage.takes: + paths = dep_paths.get(take.name) + if paths: # Some takes may be not required + takes[take.name] = paths + + produces = {} + for prod in stage.produces: + if dep_paths.get(prod.name): + produces[prod.name] = dep_paths[prod.name] + elif config_paths.get(prod.name): + produces[prod.name] = config_paths[prod.name] + + stage_mod_cfg = { + 'takes': takes, + 'produces': produces, + 'values': values, + 'platform': platform_name, + } + + return stage_mod_cfg + +def update_dep_statuses(paths, consumer: str, symbicache: SymbiCache): + if type(paths) is str: + return symbicache.update(paths, consumer) + elif type(paths) is list: + for p in paths: + return update_dep_statuses(p, consumer, symbicache) + elif type(paths) is dict: + for _, p in paths.items(): + return update_dep_statuses(p, consumer, symbicache) + fatal(-1, 'WRONG PATHS TYPE') + +def dep_differ(paths, consumer: str, symbicache: SymbiCache): + """ + Check if a dependency differs from its last version, lack of dependency is + treated as "differs" + """ + + if type(paths) is str: + s = symbicache.get_status(paths, consumer) + if s == 'untracked': + symbicache.update(paths, consumer) + return symbicache.get_status(paths, consumer) != 'same' + elif type(paths) is list: + return True in [dep_differ(p, consumer, symbicache) for p in paths] + elif type(paths) is dict: + return True in [dep_differ(p, consumer, symbicache) \ + for _, p in paths.items()] + return False +def dep_will_differ(target: str, paths, consumer: str, + os_map: 'dict[str, Stage]', run_stages: 'set[str]', + symbicache: SymbiCache): + """ + Check if a dependency or any of the dependencies it depends on differ from + their last versions. + """ + + provider = os_map.get(target) + if provider: + return (provider.name in run_stages) or \ + dep_differ(paths, consumer, symbicache) + return dep_differ(paths, consumer, symbicache) + +def _print_unreachable_stage_message(provider: Stage, take: str): + sfprint(0, ' Stage ' + f'`{Style.BRIGHT + provider.name + Style.RESET_ALL}` is ' + 'unreachable due to unmet dependency ' + f'`{Style.BRIGHT + take.name + Style.RESET_ALL}`') + +def config_mod_runctx(stage: Stage, platform_name: str, values: 'dict[str, ]', + dep_paths: 'dict[str, str | list[str]]', + config_paths: 'dict[str, str | list[str]]'): + config = prepare_stage_input(stage, platform_name, values, + dep_paths, config_paths) + return ModRunCtx(share_dir_path, binpath, config) + +class Flow: + """ Describes a complete, configured flow, ready for execution. """ + + # Dependendecy to build + target: str + # Values in global scope + cfg: FlowConfig + # dependency-producer map + os_map: 'dict[str, Stage]' + # Paths resolved for dependencies + dep_paths: 'dict[str, str | list[str]]' + # Explicit configs for dependency paths + # config_paths: 'dict[str, str | list[str]]' + # Stages that need to be run + run_stages: 'set[str]' + # Number of stages that relied on outdated version of a (checked) dependency + deps_rebuilds: 'dict[str, int]' + symbicache: 'SymbiCache | None' + flow_cfg: FlowConfig + + def __init__(self, target: str, cfg: FlowConfig, + symbicache: 'SymbiCache | None'): + self.target = target + self.os_map = map_outputs_to_stages(cfg.stages.values()) + + explicit_deps = cfg.get_dependency_overrides() + # print(explicit_deps) + + self.dep_paths = dict(filter_existing_deps(explicit_deps, symbicache)) + self.run_stages = set() + self.symbicache = symbicache + self.cfg = cfg + self.deps_rebuilds = {} + + self._resolve_dependencies(self.target, set()) + + def _dep_will_differ(self, dep: str, paths, consumer: str): + if not self.symbicache: # Handle --nocache mode + return True + return dep_will_differ(dep, paths, consumer, + self.os_map, self.run_stages, + self.symbicache) + + def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): + # Initialize the dependency status if necessary + if self.deps_rebuilds.get(dep) is None: + self.deps_rebuilds[dep] = 0 + # Check if an explicit dependency is already resolved + paths = self.dep_paths.get(dep) + if paths and not self.os_map.get(dep): + return + # Check if a stage can provide the required dependency + provider = self.os_map.get(dep) + if not provider or provider.name in stages_checked: + return + + # TODO: Check if the dependency is "on-demand" and force it in provider's + # config if it is. + + for take in provider.takes: + self._resolve_dependencies(take.name, stages_checked) + # If any of the required dependencies is unavailable, then the + # provider stage cannot be run + take_paths = self.dep_paths.get(take.name) + # Add input path to values (dirty hack) + provider.value_overrides[dep_value_str(take.name)] = take_paths + + if not take_paths and take.spec == 'req': + _print_unreachable_stage_message(provider, take) + return + + if self._dep_will_differ(take.name, take_paths, provider.name): + sfprint(2, f'{take.name} is causing rebuild for {provider.name}') + self.run_stages.add(provider.name) + self.deps_rebuilds[take.name] += 1 + + stage_values = self.cfg.get_r_env(provider.name).values + modrunctx = config_mod_runctx(provider, self.cfg.platform, + stage_values, self.dep_paths, + self.cfg.get_dependency_overrides()) + + outputs = module_map(provider.module, modrunctx) + + stages_checked.add(provider.name) + self.dep_paths.update(outputs) + + for _, out_paths in outputs.items(): + if (out_paths is not None) and not (req_exists(out_paths)): + self.run_stages.add(provider.name) + + # Verify module's outputs and add paths as values. + outs = outputs.keys() + # print(outs) + for o in provider.produces: + if o.name not in outs: + if o.spec == 'req' or (o.spec == 'demand' and \ + o.name in self.cfg.get_dependency_overrides().keys()): + fatal(-1, f'Module {provider.name} did not produce a mapping ' + f'for a required output `{o.name}`') + else: + # Remove an on-demand/optional output that is not produced + # from os_map. + self.os_map.pop(o.name) + # Add a value for the output (dirty ack yet again) + o_path = outputs.get(o.name) + + if o_path is not None: + provider.value_overrides[dep_value_str(o.name)] = \ + outputs.get(o.name) + + + def print_resolved_dependencies(self, verbosity: int): + deps = list(self.deps_rebuilds.keys()) + deps.sort() + + for dep in deps: + status = Fore.RED + '[X]' + Fore.RESET + source = Fore.YELLOW + 'MISSING' + Fore.RESET + paths = self.dep_paths.get(dep) + + if paths: + exists = req_exists(paths) + provider = self.os_map.get(dep) + if provider and provider.name in self.run_stages: + if exists: + status = Fore.YELLOW + '[R]' + Fore.RESET + else: + status = Fore.YELLOW + '[S]' + Fore.RESET + source = f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} ' \ + f'-> {paths}' + elif exists: + if self.deps_rebuilds[dep] > 0: + status = Fore.GREEN + '[N]' + Fore.RESET + else: + status = Fore.GREEN + '[O]' + Fore.RESET + source = paths + elif self.os_map.get(dep): + status = Fore.RED + '[U]' + Fore.RESET + source = \ + f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} -> ???' + + sfprint(verbosity, f' {Style.BRIGHT + status} ' + f'{dep + Style.RESET_ALL}: {source}') + + def _build_dep(self, dep): + paths = self.dep_paths.get(dep) + provider = self.os_map.get(dep) + run = (provider.name in self.run_stages) if provider else False + if not paths: + sfprint(2, f'Dependency {dep} is unresolved.') + return False + + if req_exists(paths) and not run: + return True + else: + assert(provider) + + any_dep_differ = False if self.symbicache else True + for p_dep in provider.takes: + if not self._build_dep(p_dep.name): + assert (p_dep.spec != 'req') + continue + + if self.symbicache: + any_dep_differ |= \ + update_dep_statuses(self.dep_paths[p_dep.name], + provider.name, self.symbicache) + + # If dependencies remained the same, consider the dep as up-to date + # For example, when changing a comment in Verilog source code, + # the initial dependency resolution will report a need for complete + # rebuild, however, after the synthesis stage, the generated eblif + # will reamin the same, thus making it unnecessary to continue the + # rebuild process. + if (not any_dep_differ) and req_exists(paths): + sfprint(2, f'Skipping rebuild of `' + f'{Style.BRIGHT + dep + Style.RESET_ALL}` because all ' + f'of it\'s dependencies remained unchanged') + return True + + stage_values = self.cfg.get_r_env(provider.name).values + modrunctx = config_mod_runctx(provider, self.cfg.platform, + stage_values, self.dep_paths, + self.cfg.get_dependency_overrides()) + module_exec(provider.module, modrunctx) + + self.run_stages.discard(provider.name) + + if not req_exists(paths): + raise DependencyNotProducedException(dep, provider.name) + + return True + + def execute(self): + self._build_dep(self.target) + if self.symbicache: + update_dep_statuses(self.dep_paths[self.target], '__target', + self.symbicache) + sfprint(0, f'Target `{Style.BRIGHT + self.target + Style.RESET_ALL}` ' + f'-> {self.dep_paths[self.target]}') + +def display_dep_info(stages: 'Iterable[Stage]'): + sfprint(0, 'Platform dependencies/targets:') + longest_out_name_len = 0 + for stage in stages: + for out in stage.produces: + l = len(out.name) + if l > longest_out_name_len: + longest_out_name_len = l + + desc_indent = longest_out_name_len + 7 + nl_indentstr = '\n' + for _ in range(0, desc_indent): + nl_indentstr += ' ' + + for stage in stages: + for out in stage.produces: + pname = Style.BRIGHT + out.name + Style.RESET_ALL + indent = '' + for _ in range(0, desc_indent - len(pname) + 3): + indent += ' ' + specstr = '???' + if out.spec == 'req': + specstr = f'{Fore.BLUE}guaranteed{Fore.RESET}' + elif out.spec == 'maybe': + specstr = f'{Fore.YELLOW}not guaranteed{Fore.RESET}' + elif out.spec == 'demand': + specstr = f'{Fore.RED}on-demand{Fore.RESET}' + pgen = f'{Style.DIM}stage: `{stage.name}`, '\ + f'spec: {specstr}{Style.RESET_ALL}' + pdesc = stage.meta[out.name].replace('\n', nl_indentstr) + sfprint(0, f' {Style.BRIGHT + out.name + Style.RESET_ALL}:' + f'{indent}{pdesc}{nl_indentstr}{pgen}') + +def display_stage_info(stage: Stage): + if stage is None: + sfprint(0, f'Stage does not exist') + sfbuild_fail() + return + + sfprint(0, f'Stage `{Style.BRIGHT}{stage.name}{Style.RESET_ALL}`:') + sfprint(0, f' Module: `{Style.BRIGHT}{stage.module.name}{Style.RESET_ALL}`') + sfprint(0, f' Module info:') + + mod_info = get_module_info(stage.module) + mod_info = '\n '.join(mod_info.split('\n')) + + sfprint(0, f' {mod_info}') + +sfbuild_done_str = Style.BRIGHT + Fore.GREEN + 'DONE' +sfbuild_silent = 0 + +def sfbuild_fail(): + global sfbuild_done_str + sfbuild_done_str = Style.BRIGHT + Fore.RED + 'FAILED' + +def sfbuild_done(): + sfprint(1, f'sfbuild: {sfbuild_done_str}' + f'{Style.RESET_ALL + Fore.RESET}') + exit(0) + +def setup_resolution_env(): + """ Sets up a ResolutionEnv with sfbuild's default built-ins. """ + + r_env = ResolutionEnv({ + 'shareDir': share_dir_path, + 'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin')) + }) + r_env.add_values(sf_ugly.generate_values()) + return r_env + +def open_project_flow_config(path: str) -> ProjectFlowConfig: + try: + flow_cfg = open_project_flow_cfg(path) + except FileNotFoundError as _: + fatal(-1, 'The provided flow configuration file does not exist') + return flow_cfg + +def verify_platform_stage_params(flow_cfg: FlowConfig, + platform: 'str | None' = None, + stage: 'str | None' = None): + if platform: + if not verify_platform_name(platform, mypath): + sfprint(0, f'Platform `{platform}`` is unsupported.') + return False + if args.platform not in flow_cfg.platforms(): + sfprint(0, f'Platform `{platform}`` is not in project.') + return False + + if stage: + if not verify_stage(platform, stage, mypath): + sfprint(0, f'Stage `{stage}` is invalid.') + sfbuild_fail() + return False + + return True + +def get_platform_name_for_part(part_name: str): + """ + Gets a name that identifies the platform setup required for a specific chip. + The reason for such distinction is that plenty of chips with different names + differ only in a type of package they use. + """ + + d: dict + with open(os.path.join(mypath, 'part_db/parts.json')) as f: + d = json.loads(f.read()) + return d.get(part_name.upper()) + +def cmd_build(args: Namespace): + """ sfbuild's `build` command implementation """ + + project_flow_cfg: ProjectFlowConfig = None + + + platform = args.platform + if platform is None: + if args.part: + platform = get_platform_name_for_part(args.part) + + if args.flow: + project_flow_cfg = open_project_flow_config(args.flow) + elif platform is not None: + project_flow_cfg = ProjectFlowConfig('.temp.flow.json') + project_flow_cfg.flow_cfg = get_cli_flow_config(args, platform) + if platform is None and project_flow_cfg is not None: + platform = project_flow_cfg.get_default_platform() + if platform is None: + fatal(-1, 'You have to specify a platform name or a part name or ' + 'configure a default platform.') + if platform is None or project_flow_cfg is None: + fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or ' + '`--part` to configure flow..') + + platform_path = os.path.join(mypath, 'platforms', platform + '.json') + platform_def = None + try: + with open(platform_path) as platform_file: + platform_def = platform_file.read() + except FileNotFoundError as _: + fatal(-1, f'The platform flow definition file {platform_path} for the platform ' + f'{platform} referenced in flow definition file {args.flow} ' + 'cannot be found.') + + r_env = setup_resolution_env() + + sfprint(2, 'Scanning modules...') + scan_modules(mypath) + + flow_definition_dict = json.loads(platform_def) + flow_def = FlowDefinition(flow_definition_dict, r_env) + flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform) + + + if len(flow_cfg.stages) == 0: + fatal(-1, 'Platform flow does not define any stage') + + if args.info: + display_dep_info(flow_cfg.stages.values()) + sfbuild_done() + + if args.stageinfo: + display_stage_info(flow_cfg.stages.get(args.stageinfo[0])) + sfbuild_done() + + target = args.target + if target is None: + target = project_flow_cfg.get_default_target(platform) + if target is None: + fatal(-1, 'Please specify desired target using `--target` option ' + 'or configure a default target.') + + flow = Flow( + target=target, + cfg=flow_cfg, + symbicache=SymbiCache(SYMBICACHEPATH) if not args.nocache else None + ) + + dep_print_verbosity = 0 if args.pretend else 2 + sfprint(dep_print_verbosity, '\nProject status:') + flow.print_resolved_dependencies(dep_print_verbosity) + sfprint(dep_print_verbosity, '') + + if args.pretend: + sfbuild_done() + + try: + flow.execute() + except Exception as e: + sfprint(0, e) + sfbuild_fail() + + if flow.symbicache: + flow.symbicache.save() + +def cmd_show_dependencies(args: Namespace): + """ sfbuild's `showd` command implementation """ + + flow_cfg = open_project_flow_config(args.flow) + + if not verify_platform_stage_params(flow_cfg, args.platform): + sfbuild_fail() + return + + platform_overrides: 'set | None' = None + if args.platform is not None: + platform_overrides = \ + set(flow_cfg.get_dependency_platform_overrides(args.platform).keys()) + + display_list = [] + + raw_deps = flow_cfg.get_dependencies_raw(args.platform) + + for dep_name, dep_paths in raw_deps.items(): + prstr: str + if (platform_overrides is not None) and (dep_name in platform_overrides): + prstr = f'{Style.DIM}({args.platform}){Style.RESET_ALL} ' \ + f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' + else: + prstr = f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' + + display_list.append((dep_name, prstr)) + + display_list.sort(key = lambda p: p[0]) + + for _, prstr in display_list: + sfprint(0, prstr) + + set_verbosity_level(-1) + +def main(): + parser = setup_argparser() + args = parser.parse_args() + + set_verbosity_level(args.verbose - (1 if args.silent else 0)) + + if args.command == 'build': + cmd_build(args) + sfbuild_done() + + if args.command == 'showd': + cmd_show_dependencies(args) + sfbuild_done() + + sfprint(0, 'Please use a command.\nUse `--help` flag to learn more.') + sfbuild_done() + +if __name__ == '__main__': + main() diff --git a/f4pga/setup.py b/f4pga/setup.py index 279e26932..ef32f45a8 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -91,7 +91,7 @@ def get_requirements(file: Path) -> List[str]: install_requires=list(set(get_requirements(requirementsFile))), entry_points={ "console_scripts": [ - "f4pga = f4pga.sfbuild:main", + "f4pga = f4pga.__init__:main", ] + wrapper_entrypoints }, ) diff --git a/f4pga/sfbuild.py b/f4pga/sfbuild.py deleted file mode 100755 index 4b91f25f9..000000000 --- a/f4pga/sfbuild.py +++ /dev/null @@ -1,645 +0,0 @@ -#!/usr/bin/env python3 - -""" -sfbuild - Symbiflow Build System - -This tool allows for building FPGA targets (such as bitstreams) for any supported -platform with just one simple command and a project file. - -The idea is that sfbuild wraps all the tools needed by different platforms in -"modules", which define inputs/outputs and various parameters. This allows -sfbuild to resolve dependencies for any target provided that a "flow definition" -file exists for such target. The flow defeinition file list modules available for -that platform and may tweak some settings of those modules. - -A basic example of using sfbuild: -$ sfbuild build --platform arty_35 -t bitstream - -This will make sfbuild attempt to create a bitstream for arty_35 platform. -flow.json is a flow configuration file, which should be created for a project -that uses sfbuild. Iontains project-specific definitions needed within the flow, -such as list of source code files. -""" - -from pathlib import Path -from argparse import Namespace -import os -from os import environ -import json -from typing import Iterable -from colorama import Fore, Style -from f4pga.sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \ - sfprint -from f4pga.sf_module import * -from f4pga.sf_cache import SymbiCache -import f4pga.sf_ugly as sf_ugly -from f4pga.sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \ - open_project_flow_cfg, verify_platform_name, \ - verify_stage -from f4pga.sf_module_runner import * -from f4pga.sf_module_inspector import get_module_info -from f4pga.sf_stage import Stage -from f4pga.sf_argparse import setup_argparser, get_cli_flow_config - -SYMBICACHEPATH = '.symbicache' - -binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..')) -mypath = str(Path(__file__).resolve().parent) - -share_dir_path = os.path.realpath(f"{environ.get('INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") - -class DependencyNotProducedException(Exception): - dep_name: str - provider: str - - def __init__(self, dep_name: str, provider: str): - self.dep_name = dep_name - self.provider = provider - - def __str__(self) -> str: - return f'Stage `{self.provider}` did not produce promised ' \ - f'dependency `{self.dep_name}`' - -def dep_value_str(dep: str): - return ':' + dep - -def platform_stages(platform_flow, r_env): - """ Iterates over all stages available in a given flow. """ - - stage_options = platform_flow.get('stage_options') - for stage_name, modulestr in platform_flow['stages'].items(): - mod_opts = stage_options.get(stage_name) if stage_options else None - yield Stage(stage_name, modulestr, mod_opts, r_env) - -def req_exists(r): - """ Checks whether a dependency exists on a drive. """ - - if type(r) is str: - if not os.path.isfile(r) and not os.path.islink(r) \ - and not os.path.isdir(r): - return False - elif type(r) is list: - return not (False in map(req_exists, r)) - else: - raise Exception(f'Requirements can be currently checked only for single ' - f'paths, or path lists (reason: {r})') - return True - -def map_outputs_to_stages(stages: 'list[Stage]'): - """ - Associates a stage with every possible output. - This is commonly refferef to as `os_map` (output-stage-map) through the code. - """ - - os_map: 'dict[str, Stage]' = {} # Output-Stage map - for stage in stages: - for output in stage.produces: - if not os_map.get(output.name): - os_map[output.name] = stage - elif os_map[output.name] != stage: - raise Exception(f'Dependency `{output.name}` is generated by ' - f'stage `{os_map[output.name].name}` and ' - f'`{stage.name}`. Dependencies can have only one ' - 'provider at most.') - return os_map - -def filter_existing_deps(deps: 'dict[str, ]', symbicache): - return [(n, p) for n, p in deps.items() \ - if req_exists(p)] # and not dep_differ(p, symbicache)] - -def get_stage_values_override(og_values: dict, stage: Stage): - values = og_values.copy() - values.update(stage.value_ovds) - return values - -def prepare_stage_io_input(stage: Stage): - return { 'params': stage.params } if stage.params is not None else {} - -def prepare_stage_input(stage: Stage, platform_name: str, values: dict, - dep_paths: 'dict[str, ]', config_paths: 'dict[str, ]'): - takes = {} - for take in stage.takes: - paths = dep_paths.get(take.name) - if paths: # Some takes may be not required - takes[take.name] = paths - - produces = {} - for prod in stage.produces: - if dep_paths.get(prod.name): - produces[prod.name] = dep_paths[prod.name] - elif config_paths.get(prod.name): - produces[prod.name] = config_paths[prod.name] - - stage_mod_cfg = { - 'takes': takes, - 'produces': produces, - 'values': values, - 'platform': platform_name, - } - - return stage_mod_cfg - -def update_dep_statuses(paths, consumer: str, symbicache: SymbiCache): - if type(paths) is str: - return symbicache.update(paths, consumer) - elif type(paths) is list: - for p in paths: - return update_dep_statuses(p, consumer, symbicache) - elif type(paths) is dict: - for _, p in paths.items(): - return update_dep_statuses(p, consumer, symbicache) - fatal(-1, 'WRONG PATHS TYPE') - -def dep_differ(paths, consumer: str, symbicache: SymbiCache): - """ - Check if a dependency differs from its last version, lack of dependency is - treated as "differs" - """ - - if type(paths) is str: - s = symbicache.get_status(paths, consumer) - if s == 'untracked': - symbicache.update(paths, consumer) - return symbicache.get_status(paths, consumer) != 'same' - elif type(paths) is list: - return True in [dep_differ(p, consumer, symbicache) for p in paths] - elif type(paths) is dict: - return True in [dep_differ(p, consumer, symbicache) \ - for _, p in paths.items()] - return False -def dep_will_differ(target: str, paths, consumer: str, - os_map: 'dict[str, Stage]', run_stages: 'set[str]', - symbicache: SymbiCache): - """ - Check if a dependency or any of the dependencies it depends on differ from - their last versions. - """ - - provider = os_map.get(target) - if provider: - return (provider.name in run_stages) or \ - dep_differ(paths, consumer, symbicache) - return dep_differ(paths, consumer, symbicache) - -def _print_unreachable_stage_message(provider: Stage, take: str): - sfprint(0, ' Stage ' - f'`{Style.BRIGHT + provider.name + Style.RESET_ALL}` is ' - 'unreachable due to unmet dependency ' - f'`{Style.BRIGHT + take.name + Style.RESET_ALL}`') - -def config_mod_runctx(stage: Stage, platform_name: str, values: 'dict[str, ]', - dep_paths: 'dict[str, str | list[str]]', - config_paths: 'dict[str, str | list[str]]'): - config = prepare_stage_input(stage, platform_name, values, - dep_paths, config_paths) - return ModRunCtx(share_dir_path, binpath, config) - -class Flow: - """ Describes a complete, configured flow, ready for execution. """ - - # Dependendecy to build - target: str - # Values in global scope - cfg: FlowConfig - # dependency-producer map - os_map: 'dict[str, Stage]' - # Paths resolved for dependencies - dep_paths: 'dict[str, str | list[str]]' - # Explicit configs for dependency paths - # config_paths: 'dict[str, str | list[str]]' - # Stages that need to be run - run_stages: 'set[str]' - # Number of stages that relied on outdated version of a (checked) dependency - deps_rebuilds: 'dict[str, int]' - symbicache: 'SymbiCache | None' - flow_cfg: FlowConfig - - def __init__(self, target: str, cfg: FlowConfig, - symbicache: 'SymbiCache | None'): - self.target = target - self.os_map = map_outputs_to_stages(cfg.stages.values()) - - explicit_deps = cfg.get_dependency_overrides() - # print(explicit_deps) - - self.dep_paths = dict(filter_existing_deps(explicit_deps, symbicache)) - self.run_stages = set() - self.symbicache = symbicache - self.cfg = cfg - self.deps_rebuilds = {} - - self._resolve_dependencies(self.target, set()) - - def _dep_will_differ(self, dep: str, paths, consumer: str): - if not self.symbicache: # Handle --nocache mode - return True - return dep_will_differ(dep, paths, consumer, - self.os_map, self.run_stages, - self.symbicache) - - def _resolve_dependencies(self, dep: str, stages_checked: 'set[str]'): - # Initialize the dependency status if necessary - if self.deps_rebuilds.get(dep) is None: - self.deps_rebuilds[dep] = 0 - # Check if an explicit dependency is already resolved - paths = self.dep_paths.get(dep) - if paths and not self.os_map.get(dep): - return - # Check if a stage can provide the required dependency - provider = self.os_map.get(dep) - if not provider or provider.name in stages_checked: - return - - # TODO: Check if the dependency is "on-demand" and force it in provider's - # config if it is. - - for take in provider.takes: - self._resolve_dependencies(take.name, stages_checked) - # If any of the required dependencies is unavailable, then the - # provider stage cannot be run - take_paths = self.dep_paths.get(take.name) - # Add input path to values (dirty hack) - provider.value_overrides[dep_value_str(take.name)] = take_paths - - if not take_paths and take.spec == 'req': - _print_unreachable_stage_message(provider, take) - return - - if self._dep_will_differ(take.name, take_paths, provider.name): - sfprint(2, f'{take.name} is causing rebuild for {provider.name}') - self.run_stages.add(provider.name) - self.deps_rebuilds[take.name] += 1 - - stage_values = self.cfg.get_r_env(provider.name).values - modrunctx = config_mod_runctx(provider, self.cfg.platform, - stage_values, self.dep_paths, - self.cfg.get_dependency_overrides()) - - outputs = module_map(provider.module, modrunctx) - - stages_checked.add(provider.name) - self.dep_paths.update(outputs) - - for _, out_paths in outputs.items(): - if (out_paths is not None) and not (req_exists(out_paths)): - self.run_stages.add(provider.name) - - # Verify module's outputs and add paths as values. - outs = outputs.keys() - # print(outs) - for o in provider.produces: - if o.name not in outs: - if o.spec == 'req' or (o.spec == 'demand' and \ - o.name in self.cfg.get_dependency_overrides().keys()): - fatal(-1, f'Module {provider.name} did not produce a mapping ' - f'for a required output `{o.name}`') - else: - # Remove an on-demand/optional output that is not produced - # from os_map. - self.os_map.pop(o.name) - # Add a value for the output (dirty ack yet again) - o_path = outputs.get(o.name) - - if o_path is not None: - provider.value_overrides[dep_value_str(o.name)] = \ - outputs.get(o.name) - - - def print_resolved_dependencies(self, verbosity: int): - deps = list(self.deps_rebuilds.keys()) - deps.sort() - - for dep in deps: - status = Fore.RED + '[X]' + Fore.RESET - source = Fore.YELLOW + 'MISSING' + Fore.RESET - paths = self.dep_paths.get(dep) - - if paths: - exists = req_exists(paths) - provider = self.os_map.get(dep) - if provider and provider.name in self.run_stages: - if exists: - status = Fore.YELLOW + '[R]' + Fore.RESET - else: - status = Fore.YELLOW + '[S]' + Fore.RESET - source = f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} ' \ - f'-> {paths}' - elif exists: - if self.deps_rebuilds[dep] > 0: - status = Fore.GREEN + '[N]' + Fore.RESET - else: - status = Fore.GREEN + '[O]' + Fore.RESET - source = paths - elif self.os_map.get(dep): - status = Fore.RED + '[U]' + Fore.RESET - source = \ - f'{Fore.BLUE + self.os_map[dep].name + Fore.RESET} -> ???' - - sfprint(verbosity, f' {Style.BRIGHT + status} ' - f'{dep + Style.RESET_ALL}: {source}') - - def _build_dep(self, dep): - paths = self.dep_paths.get(dep) - provider = self.os_map.get(dep) - run = (provider.name in self.run_stages) if provider else False - if not paths: - sfprint(2, f'Dependency {dep} is unresolved.') - return False - - if req_exists(paths) and not run: - return True - else: - assert(provider) - - any_dep_differ = False if self.symbicache else True - for p_dep in provider.takes: - if not self._build_dep(p_dep.name): - assert (p_dep.spec != 'req') - continue - - if self.symbicache: - any_dep_differ |= \ - update_dep_statuses(self.dep_paths[p_dep.name], - provider.name, self.symbicache) - - # If dependencies remained the same, consider the dep as up-to date - # For example, when changing a comment in Verilog source code, - # the initial dependency resolution will report a need for complete - # rebuild, however, after the synthesis stage, the generated eblif - # will reamin the same, thus making it unnecessary to continue the - # rebuild process. - if (not any_dep_differ) and req_exists(paths): - sfprint(2, f'Skipping rebuild of `' - f'{Style.BRIGHT + dep + Style.RESET_ALL}` because all ' - f'of it\'s dependencies remained unchanged') - return True - - stage_values = self.cfg.get_r_env(provider.name).values - modrunctx = config_mod_runctx(provider, self.cfg.platform, - stage_values, self.dep_paths, - self.cfg.get_dependency_overrides()) - module_exec(provider.module, modrunctx) - - self.run_stages.discard(provider.name) - - if not req_exists(paths): - raise DependencyNotProducedException(dep, provider.name) - - return True - - def execute(self): - self._build_dep(self.target) - if self.symbicache: - update_dep_statuses(self.dep_paths[self.target], '__target', - self.symbicache) - sfprint(0, f'Target `{Style.BRIGHT + self.target + Style.RESET_ALL}` ' - f'-> {self.dep_paths[self.target]}') - -def display_dep_info(stages: 'Iterable[Stage]'): - sfprint(0, 'Platform dependencies/targets:') - longest_out_name_len = 0 - for stage in stages: - for out in stage.produces: - l = len(out.name) - if l > longest_out_name_len: - longest_out_name_len = l - - desc_indent = longest_out_name_len + 7 - nl_indentstr = '\n' - for _ in range(0, desc_indent): - nl_indentstr += ' ' - - for stage in stages: - for out in stage.produces: - pname = Style.BRIGHT + out.name + Style.RESET_ALL - indent = '' - for _ in range(0, desc_indent - len(pname) + 3): - indent += ' ' - specstr = '???' - if out.spec == 'req': - specstr = f'{Fore.BLUE}guaranteed{Fore.RESET}' - elif out.spec == 'maybe': - specstr = f'{Fore.YELLOW}not guaranteed{Fore.RESET}' - elif out.spec == 'demand': - specstr = f'{Fore.RED}on-demand{Fore.RESET}' - pgen = f'{Style.DIM}stage: `{stage.name}`, '\ - f'spec: {specstr}{Style.RESET_ALL}' - pdesc = stage.meta[out.name].replace('\n', nl_indentstr) - sfprint(0, f' {Style.BRIGHT + out.name + Style.RESET_ALL}:' - f'{indent}{pdesc}{nl_indentstr}{pgen}') - -def display_stage_info(stage: Stage): - if stage is None: - sfprint(0, f'Stage does not exist') - sfbuild_fail() - return - - sfprint(0, f'Stage `{Style.BRIGHT}{stage.name}{Style.RESET_ALL}`:') - sfprint(0, f' Module: `{Style.BRIGHT}{stage.module.name}{Style.RESET_ALL}`') - sfprint(0, f' Module info:') - - mod_info = get_module_info(stage.module) - mod_info = '\n '.join(mod_info.split('\n')) - - sfprint(0, f' {mod_info}') - -sfbuild_done_str = Style.BRIGHT + Fore.GREEN + 'DONE' -sfbuild_silent = 0 - -def sfbuild_fail(): - global sfbuild_done_str - sfbuild_done_str = Style.BRIGHT + Fore.RED + 'FAILED' - -def sfbuild_done(): - sfprint(1, f'sfbuild: {sfbuild_done_str}' - f'{Style.RESET_ALL + Fore.RESET}') - exit(0) - -def setup_resolution_env(): - """ Sets up a ResolutionEnv with sfbuild's default built-ins. """ - - r_env = ResolutionEnv({ - 'shareDir': share_dir_path, - 'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin')) - }) - r_env.add_values(sf_ugly.generate_values()) - return r_env - -def open_project_flow_config(path: str) -> ProjectFlowConfig: - try: - flow_cfg = open_project_flow_cfg(path) - except FileNotFoundError as _: - fatal(-1, 'The provided flow configuration file does not exist') - return flow_cfg - -def verify_platform_stage_params(flow_cfg: FlowConfig, - platform: 'str | None' = None, - stage: 'str | None' = None): - if platform: - if not verify_platform_name(platform, mypath): - sfprint(0, f'Platform `{platform}`` is unsupported.') - return False - if args.platform not in flow_cfg.platforms(): - sfprint(0, f'Platform `{platform}`` is not in project.') - return False - - if stage: - if not verify_stage(platform, stage, mypath): - sfprint(0, f'Stage `{stage}` is invalid.') - sfbuild_fail() - return False - - return True - -def get_platform_name_for_part(part_name: str): - """ - Gets a name that identifies the platform setup required for a specific chip. - The reason for such distinction is that plenty of chips with different names - differ only in a type of package they use. - """ - - d: dict - with open(os.path.join(mypath, 'part_db/parts.json')) as f: - d = json.loads(f.read()) - return d.get(part_name.upper()) - -def cmd_build(args: Namespace): - """ sfbuild's `build` command implementation """ - - project_flow_cfg: ProjectFlowConfig = None - - - platform = args.platform - if platform is None: - if args.part: - platform = get_platform_name_for_part(args.part) - - if args.flow: - project_flow_cfg = open_project_flow_config(args.flow) - elif platform is not None: - project_flow_cfg = ProjectFlowConfig('.temp.flow.json') - project_flow_cfg.flow_cfg = get_cli_flow_config(args, platform) - if platform is None and project_flow_cfg is not None: - platform = project_flow_cfg.get_default_platform() - if platform is None: - fatal(-1, 'You have to specify a platform name or a part name or ' - 'configure a default platform.') - if platform is None or project_flow_cfg is None: - fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or ' - '`--part` to configure flow..') - - platform_path = os.path.join(mypath, 'platforms', platform + '.json') - platform_def = None - try: - with open(platform_path) as platform_file: - platform_def = platform_file.read() - except FileNotFoundError as _: - fatal(-1, f'The platform flow definition file {platform_path} for the platform ' - f'{platform} referenced in flow definition file {args.flow} ' - 'cannot be found.') - - r_env = setup_resolution_env() - - sfprint(2, 'Scanning modules...') - scan_modules(mypath) - - flow_definition_dict = json.loads(platform_def) - flow_def = FlowDefinition(flow_definition_dict, r_env) - flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform) - - - if len(flow_cfg.stages) == 0: - fatal(-1, 'Platform flow does not define any stage') - - if args.info: - display_dep_info(flow_cfg.stages.values()) - sfbuild_done() - - if args.stageinfo: - display_stage_info(flow_cfg.stages.get(args.stageinfo[0])) - sfbuild_done() - - target = args.target - if target is None: - target = project_flow_cfg.get_default_target(platform) - if target is None: - fatal(-1, 'Please specify desired target using `--target` option ' - 'or configure a default target.') - - flow = Flow( - target=target, - cfg=flow_cfg, - symbicache=SymbiCache(SYMBICACHEPATH) if not args.nocache else None - ) - - dep_print_verbosity = 0 if args.pretend else 2 - sfprint(dep_print_verbosity, '\nProject status:') - flow.print_resolved_dependencies(dep_print_verbosity) - sfprint(dep_print_verbosity, '') - - if args.pretend: - sfbuild_done() - - try: - flow.execute() - except Exception as e: - sfprint(0, e) - sfbuild_fail() - - if flow.symbicache: - flow.symbicache.save() - -def cmd_show_dependencies(args: Namespace): - """ sfbuild's `showd` command implementation """ - - flow_cfg = open_project_flow_config(args.flow) - - if not verify_platform_stage_params(flow_cfg, args.platform): - sfbuild_fail() - return - - platform_overrides: 'set | None' = None - if args.platform is not None: - platform_overrides = \ - set(flow_cfg.get_dependency_platform_overrides(args.platform).keys()) - - display_list = [] - - raw_deps = flow_cfg.get_dependencies_raw(args.platform) - - for dep_name, dep_paths in raw_deps.items(): - prstr: str - if (platform_overrides is not None) and (dep_name in platform_overrides): - prstr = f'{Style.DIM}({args.platform}){Style.RESET_ALL} ' \ - f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' - else: - prstr = f'{Style.BRIGHT + dep_name + Style.RESET_ALL}: {dep_paths}' - - display_list.append((dep_name, prstr)) - - display_list.sort(key = lambda p: p[0]) - - for _, prstr in display_list: - sfprint(0, prstr) - - set_verbosity_level(-1) - -def main(): - parser = setup_argparser() - args = parser.parse_args() - - set_verbosity_level(args.verbose - (1 if args.silent else 0)) - - if args.command == 'build': - cmd_build(args) - sfbuild_done() - - if args.command == 'showd': - cmd_show_dependencies(args) - sfbuild_done() - - sfprint(0, 'Please use a command.\nUse `--help` flag to learn more.') - sfbuild_done() - -if __name__ == '__main__': - main() From c44a0e164cdfbb2a63a54127796e67acecbff53a Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Tue, 1 Mar 2022 04:02:51 +0100 Subject: [PATCH 19/33] f4pga: rm sf_ prefixes Signed-off-by: Unai Martinez-Corral --- f4pga/__init__.py | 37 ++++++++++++------- f4pga/{sf_argparse.py => argparser.py} | 30 +++++++-------- f4pga/{sf_cache.py => cache.py} | 0 f4pga/{sf_common.py => common.py} | 4 +- .../__init__.py | 0 .../fasm.py | 4 +- .../generic_script_wrapper.py | 4 +- .../io_rename.py | 6 +-- .../mkdirs.py | 4 +- .../pack.py | 4 +- .../place.py | 4 +- .../place_constraints.py | 4 +- .../route.py | 4 +- .../synth.py | 4 +- f4pga/{sf_flow_config.py => flow_config.py} | 4 +- f4pga/{sf_module.py => module.py} | 2 +- ...odule_inspector.py => module_inspector.py} | 4 +- .../{sf_module_runner.py => module_runner.py} | 4 +- f4pga/setup.py | 2 +- f4pga/{sf_stage.py => stage.py} | 6 +-- f4pga/{sf_ugly.py => ugly.py} | 2 +- 21 files changed, 72 insertions(+), 61 deletions(-) rename f4pga/{sf_argparse.py => argparser.py} (99%) rename f4pga/{sf_cache.py => cache.py} (100%) rename f4pga/{sf_common.py => common.py} (98%) rename f4pga/{sf_common_modules => common_modules}/__init__.py (100%) rename f4pga/{sf_common_modules => common_modules}/fasm.py (97%) rename f4pga/{sf_common_modules => common_modules}/generic_script_wrapper.py (99%) rename f4pga/{sf_common_modules => common_modules}/io_rename.py (97%) rename f4pga/{sf_common_modules => common_modules}/mkdirs.py (95%) rename f4pga/{sf_common_modules => common_modules}/pack.py (97%) rename f4pga/{sf_common_modules => common_modules}/place.py (97%) rename f4pga/{sf_common_modules => common_modules}/place_constraints.py (96%) rename f4pga/{sf_common_modules => common_modules}/route.py (96%) rename f4pga/{sf_common_modules => common_modules}/synth.py (98%) rename f4pga/{sf_flow_config.py => flow_config.py} (99%) rename f4pga/{sf_module.py => module.py} (99%) rename f4pga/{sf_module_inspector.py => module_inspector.py} (94%) rename f4pga/{sf_module_runner.py => module_runner.py} (96%) rename f4pga/{sf_stage.py => stage.py} (94%) rename f4pga/{sf_ugly.py => ugly.py} (92%) diff --git a/f4pga/__init__.py b/f4pga/__init__.py index 4b91f25f9..a99dfe1cd 100755 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -28,18 +28,29 @@ import json from typing import Iterable from colorama import Fore, Style -from f4pga.sf_common import ResolutionEnv, fatal, scan_modules, set_verbosity_level, \ - sfprint -from f4pga.sf_module import * -from f4pga.sf_cache import SymbiCache -import f4pga.sf_ugly as sf_ugly -from f4pga.sf_flow_config import ProjectFlowConfig, FlowConfig, FlowDefinition, \ - open_project_flow_cfg, verify_platform_name, \ - verify_stage -from f4pga.sf_module_runner import * -from f4pga.sf_module_inspector import get_module_info -from f4pga.sf_stage import Stage -from f4pga.sf_argparse import setup_argparser, get_cli_flow_config + +from f4pga.common import ( + ResolutionEnv, + fatal, + scan_modules, + set_verbosity_level, + sfprint +) +from f4pga.module import * +from f4pga.cache import SymbiCache +import f4pga.ugly as ugly +from f4pga.flow_config import ( + ProjectFlowConfig, + FlowConfig, + FlowDefinition, + open_project_flow_cfg, + verify_platform_name, + verify_stage +) +from f4pga.module_runner import * +from f4pga.module_inspector import get_module_info +from f4pga.stage import Stage +from f4pga.argparser import setup_argparser, get_cli_flow_config SYMBICACHEPATH = '.symbicache' @@ -462,7 +473,7 @@ def setup_resolution_env(): 'shareDir': share_dir_path, 'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin')) }) - r_env.add_values(sf_ugly.generate_values()) + r_env.add_values(ugly.generate_values()) return r_env def open_project_flow_config(path: str) -> ProjectFlowConfig: diff --git a/f4pga/sf_argparse.py b/f4pga/argparser.py similarity index 99% rename from f4pga/sf_argparse.py rename to f4pga/argparser.py index 1f7ba2e83..b9e3b6bb5 100644 --- a/f4pga/sf_argparse.py +++ b/f4pga/argparser.py @@ -53,7 +53,7 @@ def setup_argparser(): parser.add_argument('-v', '--verbose', action='count', default=0) parser.add_argument('-s', '--silent', action='store_true') - + subparsers = parser.add_subparsers(dest='command') build = subparsers.add_parser('build') _setup_build_parser(build) @@ -74,13 +74,13 @@ def _parse_depval(depvalstr: str): d = { 'name': None, 'stage': None, 'value': None } splitted = list(_unescaped_separated('=', depvalstr)) - + if len(splitted) != 2: raise Exception('Too many components') - + pathstr = splitted[0] valstr = splitted[1] - + path_components = pathstr.split('.') if len(path_components) < 1: raise Exception('Missing value') @@ -89,9 +89,9 @@ def _parse_depval(depvalstr: str): d['stage'] = path_components.pop(0) if len(path_components) > 0: raise Exception('Too many path components') - + d['value'] = _parse_cli_value(valstr) - + return d def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): @@ -99,7 +99,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): Find all occurences of a pattern in a string that contains escape sequences. Yields pairs of starting and ending indices of the pattern. """ - + noescapes = '' # We remove all escape sequnces from a string, so it will match only with @@ -117,7 +117,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): offsets.append(offset) offset += 2 noescapes += noescape - + iter = re.finditer(regexp, noescapes) for m in iter: @@ -164,7 +164,7 @@ def _parse_cli_value(s: str): if len(s) == 0: return '' - + # List if s[0] == '[': if len(s) < 2 or s[len(s)-1] != ']': @@ -191,15 +191,15 @@ def _parse_cli_value(s: str): key = k_v[0] value = _parse_cli_value(k_v[1]) d[key] = value - + return d - + # Bool hack if s == '\\True': return True if s == '\\False': return False - + # Number hack if len(s) >= 3 and s[0:1] == '\\N': return int(s[2:]) @@ -213,9 +213,9 @@ def create_defdict(): 'dependencies': {}, 'values': {}, } - + platform_flow_config = create_defdict() - + def add_entries(arglist: 'list[str]', dict_name: str): for value_def in (_parse_depval(cliv) for cliv in arglist): stage = value_def['stage'] @@ -227,7 +227,7 @@ def add_entries(arglist: 'list[str]', dict_name: str): platform_flow_config[stage] = create_defdict() platform_flow_config[stage][dict_name][value_def['name']] = \ value_def['value'] - + add_entries(args.dep, 'dependencies') add_entries(args.val, 'values') diff --git a/f4pga/sf_cache.py b/f4pga/cache.py similarity index 100% rename from f4pga/sf_cache.py rename to f4pga/cache.py diff --git a/f4pga/sf_common.py b/f4pga/common.py similarity index 98% rename from f4pga/sf_common.py rename to f4pga/common.py index efb1e88ae..6cfd2dbc7 100644 --- a/f4pga/sf_common.py +++ b/f4pga/common.py @@ -31,9 +31,9 @@ def scan_modules(mypath: str): sfbuild_home = mypath sfbuild_home_dirs = os.listdir(sfbuild_home) sfbuild_module_dirs = \ - [dir for dir in sfbuild_home_dirs if re.match('sf_.*_modules$', dir)] + [dir for dir in sfbuild_home_dirs if re.match('.*_modules$', dir)] _sfbuild_module_collection_name_to_path = \ - dict([(re.match('sf_(.*)_modules$', moddir).groups()[0], + dict([(re.match('(.*)_modules$', moddir).groups()[0], os.path.join(sfbuild_home, moddir)) for moddir in sfbuild_module_dirs]) diff --git a/f4pga/sf_common_modules/__init__.py b/f4pga/common_modules/__init__.py similarity index 100% rename from f4pga/sf_common_modules/__init__.py rename to f4pga/common_modules/__init__.py diff --git a/f4pga/sf_common_modules/fasm.py b/f4pga/common_modules/fasm.py similarity index 97% rename from f4pga/sf_common_modules/fasm.py rename to f4pga/common_modules/fasm.py index 7446f954e..cfee9b421 100644 --- a/f4pga/sf_common_modules/fasm.py +++ b/f4pga/common_modules/fasm.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/generic_script_wrapper.py b/f4pga/common_modules/generic_script_wrapper.py similarity index 99% rename from f4pga/sf_common_modules/generic_script_wrapper.py rename to f4pga/common_modules/generic_script_wrapper.py index 632c9ecf3..d93c6ac36 100644 --- a/f4pga/sf_common_modules/generic_script_wrapper.py +++ b/f4pga/common_modules/generic_script_wrapper.py @@ -49,8 +49,8 @@ import shutil import re -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/io_rename.py b/f4pga/common_modules/io_rename.py similarity index 97% rename from f4pga/sf_common_modules/io_rename.py rename to f4pga/common_modules/io_rename.py index f14c0be60..da1f196ec 100644 --- a/f4pga/sf_common_modules/io_rename.py +++ b/f4pga/common_modules/io_rename.py @@ -27,9 +27,9 @@ # ----------------------------------------------------------------------------- # -from f4pga.sf_common import * -from f4pga.sf_module import * -from f4pga.sf_module_runner import get_module +from f4pga.common import * +from f4pga.module import * +from f4pga.module_runner import get_module # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/mkdirs.py b/f4pga/common_modules/mkdirs.py similarity index 95% rename from f4pga/sf_common_modules/mkdirs.py rename to f4pga/common_modules/mkdirs.py index 065549fcc..8b3d10542 100644 --- a/f4pga/sf_common_modules/mkdirs.py +++ b/f4pga/common_modules/mkdirs.py @@ -12,8 +12,8 @@ # ----------------------------------------------------------------------------- # import os -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/pack.py b/f4pga/common_modules/pack.py similarity index 97% rename from f4pga/sf_common_modules/pack.py rename to f4pga/common_modules/pack.py index 816cbfd38..1aeec6919 100644 --- a/f4pga/sf_common_modules/pack.py +++ b/f4pga/common_modules/pack.py @@ -6,8 +6,8 @@ import os import re -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/place.py b/f4pga/common_modules/place.py similarity index 97% rename from f4pga/sf_common_modules/place.py rename to f4pga/common_modules/place.py index 16bc007c2..1cefd100d 100644 --- a/f4pga/sf_common_modules/place.py +++ b/f4pga/common_modules/place.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/place_constraints.py b/f4pga/common_modules/place_constraints.py similarity index 96% rename from f4pga/sf_common_modules/place_constraints.py rename to f4pga/common_modules/place_constraints.py index b48947161..04495f2ea 100644 --- a/f4pga/sf_common_modules/place_constraints.py +++ b/f4pga/common_modules/place_constraints.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/route.py b/f4pga/common_modules/route.py similarity index 96% rename from f4pga/sf_common_modules/route.py rename to f4pga/common_modules/route.py index a73dd7bdb..7ffb8ebbf 100644 --- a/f4pga/sf_common_modules/route.py +++ b/f4pga/common_modules/route.py @@ -6,8 +6,8 @@ import os import shutil -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_common_modules/synth.py b/f4pga/common_modules/synth.py similarity index 98% rename from f4pga/sf_common_modules/synth.py rename to f4pga/common_modules/synth.py index b72ff18c1..48cac762a 100755 --- a/f4pga/sf_common_modules/synth.py +++ b/f4pga/common_modules/synth.py @@ -5,8 +5,8 @@ # ----------------------------------------------------------------------------- # import os -from f4pga.sf_common import * -from f4pga.sf_module import * +from f4pga.common import * +from f4pga.module import * # ----------------------------------------------------------------------------- # diff --git a/f4pga/sf_flow_config.py b/f4pga/flow_config.py similarity index 99% rename from f4pga/sf_flow_config.py rename to f4pga/flow_config.py index 5ed59412e..43329f05b 100644 --- a/f4pga/sf_flow_config.py +++ b/f4pga/flow_config.py @@ -1,8 +1,8 @@ import os import json -from f4pga.sf_common import file_noext, ResolutionEnv, deep -from f4pga.sf_stage import Stage +from f4pga.common import file_noext, ResolutionEnv, deep +from f4pga.stage import Stage from copy import copy _realpath_deep = deep(os.path.realpath) diff --git a/f4pga/sf_module.py b/f4pga/module.py similarity index 99% rename from f4pga/sf_module.py rename to f4pga/module.py index a9a785e0d..e4a25691c 100644 --- a/f4pga/sf_module.py +++ b/f4pga/module.py @@ -2,7 +2,7 @@ import abc from types import SimpleNamespace -from f4pga.sf_common import * +from f4pga.common import * from colorama import Fore, Style class Module: diff --git a/f4pga/sf_module_inspector.py b/f4pga/module_inspector.py similarity index 94% rename from f4pga/sf_module_inspector.py rename to f4pga/module_inspector.py index dc62d6c30..b4f1bdde6 100644 --- a/f4pga/sf_module_inspector.py +++ b/f4pga/module_inspector.py @@ -1,5 +1,5 @@ -from f4pga.sf_module import Module -from f4pga.sf_common import decompose_depname +from f4pga.module import Module +from f4pga.common import decompose_depname from colorama import Style def _get_if_qualifier(deplist: 'list[str]', qualifier: str): diff --git a/f4pga/sf_module_runner.py b/f4pga/module_runner.py similarity index 96% rename from f4pga/sf_module_runner.py rename to f4pga/module_runner.py index 9dcb66579..33acf0e60 100644 --- a/f4pga/sf_module_runner.py +++ b/f4pga/module_runner.py @@ -4,8 +4,8 @@ import importlib import importlib.util import os -from f4pga.sf_module import Module, ModuleContext, get_mod_metadata -from f4pga.sf_common import ResolutionEnv, deep, sfprint +from f4pga.module import Module, ModuleContext, get_mod_metadata +from f4pga.common import ResolutionEnv, deep, sfprint from colorama import Fore, Style _realpath_deep = deep(os.path.realpath) diff --git a/f4pga/setup.py b/f4pga/setup.py index ef32f45a8..fac3b5314 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -78,7 +78,7 @@ def get_requirements(file: Path) -> List[str]: url="https://github.com/chipsalliance/f4pga", packages=[ "f4pga", - "f4pga.sf_common_modules", + "f4pga.common_modules", "f4pga.wrappers.sh", ], package_dir={"f4pga": "."}, diff --git a/f4pga/sf_stage.py b/f4pga/stage.py similarity index 94% rename from f4pga/sf_stage.py rename to f4pga/stage.py index 5aa9fe638..373087ca2 100644 --- a/f4pga/sf_stage.py +++ b/f4pga/stage.py @@ -1,6 +1,6 @@ -from f4pga.sf_common import decompose_depname, resolve_modstr -from f4pga.sf_module import Module -from f4pga.sf_module_runner import get_module, module_io +from f4pga.common import decompose_depname, resolve_modstr +from f4pga.module import Module +from f4pga.module_runner import get_module, module_io class StageIO: """ diff --git a/f4pga/sf_ugly.py b/f4pga/ugly.py similarity index 92% rename from f4pga/sf_ugly.py rename to f4pga/ugly.py index 55021377f..fdb0ec338 100644 --- a/f4pga/sf_ugly.py +++ b/f4pga/ugly.py @@ -1,7 +1,7 @@ """ The "ugly" module is dedicated for some *ugly* workarounds """ import os -from f4pga.sf_common import sub as common_sub +from f4pga.common import sub as common_sub def noisy_warnings(): """ Emit some noisy warnings """ From 382e09b3070a24831af353753cc7be2df791c7f2 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Sun, 24 Apr 2022 02:38:17 +0200 Subject: [PATCH 20/33] ci: merge workflow pyF4PGA into Pipeline Signed-off-by: Unai Martinez-Corral --- .github/workflows/Pipeline.yml | 61 +++++++++++++++++++++++++++++----- .github/workflows/pyF4PGA.yml | 59 -------------------------------- f4pga/__init__.py | 2 +- 3 files changed, 54 insertions(+), 68 deletions(-) delete mode 100644 .github/workflows/pyF4PGA.yml diff --git a/.github/workflows/Pipeline.yml b/.github/workflows/Pipeline.yml index 36532a73f..c8a08cff5 100644 --- a/.github/workflows/Pipeline.yml +++ b/.github/workflows/Pipeline.yml @@ -21,12 +21,13 @@ on: jobs: + Docs: runs-on: ubuntu-latest name: '📓 Docs' steps: - - name: '🧰 Checkout' + - name: 🧰 Checkout uses: actions/checkout@v3 with: submodules: recursive @@ -62,28 +63,28 @@ jobs: git push -u origin +HEAD:gh-pages - Example: + Deprecated: runs-on: ubuntu-latest - name: '🐍 Example' strategy: fail-fast: false matrix: include: - { fam: xc7, example: counter_test } - { fam: eos-s3, example: btn_counter } + name: '🚦 Example (deprecated sh) | ${{ matrix.fam }}' env: F4PGA_INSTALL_DIR: /opt/f4pga F4PGA_FAM: ${{ matrix.fam }} steps: - - name: '🧰 Checkout' + - name: 🧰 Checkout uses: actions/checkout@v3 - - name: '🔧 Prepare environment' + - name: 🔧 Prepare environment run: ./.github/scripts/prepare_environment.sh - - name: '🐍 Install f4pga (pip)' + - name: 🐍 Install f4pga (pip) run: | . ./.github/scripts/activate.sh @@ -91,14 +92,14 @@ jobs: pip install --use-feature=in-tree-build . cd .. - - name: '🚧 Test f4pga-env' + - name: 🚧 Test f4pga-env run: | . ./.github/scripts/activate.sh echo "F4PGA_ENV_BIN=$(f4pga-env bin)" >> "$GITHUB_ENV" echo "F4PGA_ENV_SHARE=$(f4pga-env share)" >> "$GITHUB_ENV" - - name: '🚧 Test make example' + - name: 🚧 Test make example run: | . ./.github/scripts/activate.sh @@ -127,3 +128,47 @@ jobs: name: eos-s3-Bitstream path: f4pga-examples/eos-s3/btn_counter/build/top.bit if-no-files-found: error + + + pyF4PGA: + runs-on: ubuntu-latest + name: '🐍 Example' + env: + F4PGA_INSTALL_DIR: /opt/f4pga + F4PGA_FAM: xc7 + + steps: + + - name: 🧰 Checkout + uses: actions/checkout@v3 +# with: +# submodules: recursive + + - name: 🔧 Prepare environment + run: ./.github/scripts/prepare_environment.sh + + - name: 🐍 Install f4pga (pip) + run: | + . ./.github/scripts/activate.sh + + cd f4pga + pip install --use-feature=in-tree-build . + cd .. + + - name: 🚧 Test py4FPGA build + run: | + . ./.github/scripts/activate.sh + + cd f4pga-examples + f4pga build --flow ../.github/sftest.json -t bitstream + + - name: '📤 Upload artifact: Arty 35 bitstream' + uses: actions/upload-artifact@v3 + with: + name: arty_35-Bitstream-pyF4PGA + path: f4pga-examples/build/arty_35/top.bit + + - name: Test py4FPGA (PYTHONPATH) + run: | + PYTHONPATH=$(pwd) python3 f4pga/__init__.py + PYTHONPATH=$(pwd) python3 f4pga/__init__.py -h diff --git a/.github/workflows/pyF4PGA.yml b/.github/workflows/pyF4PGA.yml deleted file mode 100644 index da498f804..000000000 --- a/.github/workflows/pyF4PGA.yml +++ /dev/null @@ -1,59 +0,0 @@ -name: py4FPGA - -on: - push: - pull_request: - -jobs: - - - Test-pip: - runs-on: ubuntu-latest - strategy: - fail-fast: false - - steps: - - - uses: actions/checkout@v2 -# with: -# submodules: recursive - - - name: Prepare environment - run: | - sudo apt update -y - sudo apt install -y git wget xz-utils - - git clone --recurse-submodules https://github.com/chipsalliance/f4pga-examples - cd f4pga-examples - - wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O conda_installer.sh - - export INSTALL_DIR=/opt/f4pga - - bash conda_installer.sh -u -b -p $INSTALL_DIR/xc7/conda - source "$INSTALL_DIR/xc7/conda/etc/profile.d/conda.sh" - conda env create -f xc7/environment.yml - - mkdir -p $INSTALL_DIR/xc7/install - wget -qO- https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/symbiflow-arch-defs/continuous/install/535/20220128-000432/symbiflow-arch-defs-install-5fa5e715.tar.xz | tar -xJC $INSTALL_DIR/xc7/install - wget -qO- https://storage.googleapis.com/symbiflow-arch-defs/artifacts/prod/foss-fpga-tools/symbiflow-arch-defs/continuous/install/535/20220128-000432/symbiflow-arch-defs-xc7a50t_test-5fa5e715.tar.xz | tar -xJC $INSTALL_DIR/xc7/install - - - name: Test py4FPGA build - run: | - export INSTALL_DIR=/opt/f4pga - export PATH="$INSTALL_DIR/xc7/install/bin:$PATH"; - source "$INSTALL_DIR/xc7/conda/etc/profile.d/conda.sh" - - conda activate xc7 - - cd f4pga - pip install --use-feature=in-tree-build . - cd .. - - cd f4pga-examples - f4pga build --flow ../.github/sftest.json -t bitstream - - - name: Test py4FPGA (PYTHONPATH) - run: | - PYTHONPATH=$(pwd) python3 f4pga/__init__.py - PYTHONPATH=$(pwd) python3 f4pga/__init__.py -h diff --git a/f4pga/__init__.py b/f4pga/__init__.py index a99dfe1cd..a9f7e84d2 100755 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -57,7 +57,7 @@ binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..')) mypath = str(Path(__file__).resolve().parent) -share_dir_path = os.path.realpath(f"{environ.get('INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") +share_dir_path = os.path.realpath(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") class DependencyNotProducedException(Exception): dep_name: str From dc58d412b93d5fc7203425c65817314460d4f567 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Fri, 4 Mar 2022 00:15:32 +0100 Subject: [PATCH 21/33] mv python toolchain wrappers into f4pga Signed-off-by: Unai Martinez-Corral --- .github/workflows/Pipeline.yml | 53 +++++++++++- f4pga/setup.py | 5 ++ .../wrappers/xc7}/__init__.py | 0 .../wrappers/xc7/common.py | 14 +-- f4pga/wrappers/xc7/place.py | 34 ++++++++ f4pga/wrappers/xc7/route.py | 30 +++++++ f4pga/wrappers/xc7/synth.py | 85 +++++++++++++++++++ f4pga/wrappers/xc7/write_fasm.py | 48 +++++++++++ xc/xc7/toolchain_wrappers/symbiflow_place.py | 28 ------ xc/xc7/toolchain_wrappers/symbiflow_route.py | 23 ----- xc/xc7/toolchain_wrappers/symbiflow_synth.py | 84 ------------------ .../symbiflow_write_fasm.py | 42 --------- 12 files changed, 260 insertions(+), 186 deletions(-) rename {xc/xc7/toolchain_wrappers => f4pga/wrappers/xc7}/__init__.py (100%) rename xc/xc7/toolchain_wrappers/symbiflow_common.py => f4pga/wrappers/xc7/common.py (91%) create mode 100644 f4pga/wrappers/xc7/place.py create mode 100644 f4pga/wrappers/xc7/route.py create mode 100755 f4pga/wrappers/xc7/synth.py create mode 100644 f4pga/wrappers/xc7/write_fasm.py delete mode 100644 xc/xc7/toolchain_wrappers/symbiflow_place.py delete mode 100644 xc/xc7/toolchain_wrappers/symbiflow_route.py delete mode 100755 xc/xc7/toolchain_wrappers/symbiflow_synth.py delete mode 100644 xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py diff --git a/.github/workflows/Pipeline.yml b/.github/workflows/Pipeline.yml index c8a08cff5..6cc03a277 100644 --- a/.github/workflows/Pipeline.yml +++ b/.github/workflows/Pipeline.yml @@ -132,7 +132,7 @@ jobs: pyF4PGA: runs-on: ubuntu-latest - name: '🐍 Example' + name: '🐍 Example | xc7' env: F4PGA_INSTALL_DIR: /opt/f4pga F4PGA_FAM: xc7 @@ -155,7 +155,7 @@ jobs: pip install --use-feature=in-tree-build . cd .. - - name: 🚧 Test py4FPGA build + - name: 🚧 Test f4pga build run: | . ./.github/scripts/activate.sh @@ -168,7 +168,54 @@ jobs: name: arty_35-Bitstream-pyF4PGA path: f4pga-examples/build/arty_35/top.bit - - name: Test py4FPGA (PYTHONPATH) + + PYTHONPATH: + runs-on: ubuntu-latest + name: '🐍 PYTHONPATH' + env: + F4PGA_INSTALL_DIR: /opt/f4pga + F4PGA_FAM: xc7 + + steps: + + - name: 🧰 Checkout + uses: actions/checkout@v3 + + - name: 🚧 Test pyF4PGA (PYTHONPATH) run: | PYTHONPATH=$(pwd) python3 f4pga/__init__.py PYTHONPATH=$(pwd) python3 f4pga/__init__.py -h + + + pyWrappers: + runs-on: ubuntu-latest + name: '🐍 Python wrappers' + env: + F4PGA_INSTALL_DIR: /opt/f4pga + F4PGA_FAM: xc7 + + steps: + + - name: 🧰 Checkout + uses: actions/checkout@v3 + + - name: 🔧 Prepare environment + run: ./.github/scripts/prepare_environment.sh + + - name: 🐍 Install f4pga (pip) + run: | + . ./.github/scripts/activate.sh + + cd f4pga + pip install --use-feature=in-tree-build . + cd .. + + - name: 🚦 Test Python wrappers + run: | + . ./.github/scripts/activate.sh + + for tool in place route synth write-fasm; do + echo "::group::Test f4pga-$tool" + "f4pga-$tool" || echo "Failing?" + echo "::endgroup::" + done; diff --git a/f4pga/setup.py b/f4pga/setup.py index fac3b5314..d51e01fdc 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -80,6 +80,7 @@ def get_requirements(file: Path) -> List[str]: "f4pga", "f4pga.common_modules", "f4pga.wrappers.sh", + "f4pga.wrappers.xc7" ], package_dir={"f4pga": "."}, package_data={ @@ -92,6 +93,10 @@ def get_requirements(file: Path) -> List[str]: entry_points={ "console_scripts": [ "f4pga = f4pga.__init__:main", + "f4pga-place = f4pga.wrappers.xc7.place:main", + "f4pga-route = f4pga.wrappers.xc7.route:main", + "f4pga-synth = f4pga.wrappers.xc7.synth:main", + "f4pga-write-fasm = f4pga.wrappers.xc7.write_fasm:main", ] + wrapper_entrypoints }, ) diff --git a/xc/xc7/toolchain_wrappers/__init__.py b/f4pga/wrappers/xc7/__init__.py similarity index 100% rename from xc/xc7/toolchain_wrappers/__init__.py rename to f4pga/wrappers/xc7/__init__.py diff --git a/xc/xc7/toolchain_wrappers/symbiflow_common.py b/f4pga/wrappers/xc7/common.py similarity index 91% rename from xc/xc7/toolchain_wrappers/symbiflow_common.py rename to f4pga/wrappers/xc7/common.py index 41f8b45a0..a2306b173 100644 --- a/xc/xc7/toolchain_wrappers/symbiflow_common.py +++ b/f4pga/wrappers/xc7/common.py @@ -1,7 +1,9 @@ +from pathlib import Path import subprocess import argparse import os import shutil +from sys import argv as sys_argv class VprArgs: arch_dir: str @@ -17,7 +19,7 @@ class VprArgs: def __init__(self, mypath, args): self.arch_dir = \ - os.path.join(mypath, '../share/symbiflow/arch/', args.device) + str(Path(mypath) / '../share/symbiflow/arch' / args.device) self.arch_dir = os.path.realpath(self.arch_dir) self.arch_def = os.path.join(self.arch_dir, 'arch.timing.xml') self.lookahead = \ @@ -38,7 +40,7 @@ def __init__(self, mypath, args): self.optional = [] if args.sdc: self.optional += ['--sdc_file', args.sdc] - + def export(self): os.environ['ARCH_DIR'] = self.arch_dir os.environ['ARCH_DEF'] = self.arch_def @@ -51,7 +53,7 @@ def export(self): def setup_vpr_arg_parser(): parser = argparse.ArgumentParser(description="Parse flags") parser.add_argument('-d', '--device', nargs=1, metavar='', - type=str, help='Device type (e.g. artix7)') + type=str, help='Device type (e.g. artix7)', default='artix7') parser.add_argument('-e', '--eblif', nargs=1, metavar='', type=str, help='EBLIF filename') parser.add_argument('-p', '--pcf', nargs=1, metavar='', @@ -60,7 +62,7 @@ def setup_vpr_arg_parser(): type=str, help='Part name') parser.add_argument('-s', '--sdc', nargs=1, metavar='', type=str, help='SDC file') - parser.add_argument('-a', '--additional_vpr_options', metavar='', + parser.add_argument('-a', '--vpr_options', metavar='', type=str, help='Additional VPR options') parser.add_argument('additional_vpr_args', nargs='*', metavar='', type=str, help='Additional arguments for vpr command') @@ -91,9 +93,9 @@ def noisy_warnings(device): # Get current PWD def my_path(): - mypath = os.path.realpath(sys.argv[0]) + mypath = os.path.realpath(sys_argv[0]) return os.path.dirname(mypath) # Save VPR log def save_vpr_log(filename): - shutil.move('vpr_stdout.log', filename) \ No newline at end of file + shutil.move('vpr_stdout.log', filename) diff --git a/f4pga/wrappers/xc7/place.py b/f4pga/wrappers/xc7/place.py new file mode 100644 index 000000000..c253b1a7f --- /dev/null +++ b/f4pga/wrappers/xc7/place.py @@ -0,0 +1,34 @@ +#!/usr/bin/python3 + +import shutil +from f4pga.wrappers.xc7.common import ( + my_path, + setup_vpr_arg_parser, + VprArgs, + vpr +) + +def main(): + mypath = my_path() + parser = setup_vpr_arg_parser() + parser.add_argument('-n', '--net', nargs='+', metavar='', + type=str, help='NET filename') + args = parser.parse_args() + vprargs = VprArgs(mypath, args) + vprargs += ['--fix_clusters', 'constraints.place', '--place'] + vprargs.export() + + if not args.net: + print('Please provide NET filename') + exit(1) + + noisy_warnings() + + print('Generating constraints...\n') + + sub('symbiflow_generate_constraints', + args.eblif, args.net, args.part, vprargs.arch_def, args.pcf) + + vpr(vprargs) + + save_vpr_log('place.log') \ No newline at end of file diff --git a/f4pga/wrappers/xc7/route.py b/f4pga/wrappers/xc7/route.py new file mode 100644 index 000000000..305592cd5 --- /dev/null +++ b/f4pga/wrappers/xc7/route.py @@ -0,0 +1,30 @@ +#!/usr/bin/python3 + +import argparse +import subprocess +import os +import shutil +from f4pga.wrappers.xc7.common import ( + my_path, + setup_vpr_arg_parser, + VprArgs, + noisy_warnings, + vpr +) + +def main(): + mypath = my_path() + parser = setup_vpr_arg_parser() + args = parser.parse_args() + + vprargs = VprArgs(mypath, args) + vprargs.export() + + noisy_warnings(args.device) + + vprargs.optional += '--route' + + print('Routing...') + vpr(vprargs) + + save_vpr_log('route.log') diff --git a/f4pga/wrappers/xc7/synth.py b/f4pga/wrappers/xc7/synth.py new file mode 100755 index 000000000..9d60f3fd3 --- /dev/null +++ b/f4pga/wrappers/xc7/synth.py @@ -0,0 +1,85 @@ +#!/usr/bin/python3 + +import sys +import os +import argparse +from f4pga.wrappers.xc7.common import * + +def setup_arg_parser(): + parser = argparse.ArgumentParser(description="Parse flags") + parser.add_argument('-t', '--top', nargs=1, metavar='', + type=str, help='Top module name') + parser.add_argument('-v', '--verilog', nargs='+', metavar='', + type=str, help='Verilog file list') + parser.add_argument('-x', '--xdc', nargs='+', metavar='', + type=str, help='XDC file list') + parser.add_argument('-d', '--device', nargs=1, metavar='', + type=str, help='Device type (e.g. artix7)') + parser.add_argument('-p', '--part', nargs=1, metavar='', + type=str, help='Part name') + return parser + +def main(): + mypath = os.path.realpath(sys.argv[0]) + mypath = os.path.dirname(mypath) + + share_dir_path = os.path.realpath(os.path.join(mypath, '../share/symbiflow')) + techmap_path = os.path.join(share_dir_path, 'techmaps/xc7_vpr/techmap') + utils_path = os.path.join(share_dir_path, 'scripts') + synth_tcl_path = os.path.join(utils_path, 'xc7/synth.tcl') + conv_tcl_path = os.path.join(utils_path, 'xc7/conv.tcl') + split_inouts = os.path.join(utils_path, 'split_inouts.py') + + os.environ['SHARE_DIR_PATH'] = share_dir_path + os.environ['TECHMAP_PATH'] = techmap_path + os.environ['UTILS_PATH'] = utils_path + + parser = setup_arg_parser() + + args = parser.parse_args() + + if not os.environ['DATABASE_DIR']: + os.environ['DATABASE_DIR'] = sub(['prjxray-config']) + database_dir = os.environ['DATABASE_DIR'] + + # TODO: is this crossplatform??? + if not os.environ['PYTHON3']: + os.environ['PYTHON3'] = sub(['which', 'python3']) + + if not args.verilog: + print('Please provide at least one Verilog file\n') + exit(0) + if not args.top: + print('Top module must be specified\n') + exit(0) + if not args.device: + print('Device parameter required\n') + exit(0) + if not args.part: + print('Part parameter required\n') + exit(0) + + out_json = args.top + '.json' + synth_json = args.top + '_io.json' + log = args.top + '_synth.log' + + os.environ['TOP'] = args.top + os.environ['OUT_JSON'] = out_json + os.environ['OUT_SDC'] = args.top + '.sdc' + os.environ['SYNTH_JSON'] = synth_json + os.environ['OUT_SYNTH_V'] = args.top + '_synth.v' + os.environ['OUT_EBLIF'] = args.top + '.eblif' + os.environ['PART_JSON'] = \ + os.path.join(database_dir, args.device, args.part, 'part.json') + os.environ['OUT_FASM_EXTRA'] = args.top + '_fasm_extra.fasm' + + if args.xdc: + os.environ['INPUT_XDC_FILES'] = ' '.join(args.xdc) + + verilog_paths_str = ' '.join(args.verilog) + + print('------------------------------------> In symbiflow_synth!!!\n') + + sub('yosys', '-p', f'\"tcl {synth_tcl_path}\"', '-l', 'log', verilog_paths_str) + sub('python3', split_inouts, '-i', out_json, '-o', synth_json) + sub('yosys', '-p', f'\"read_json {synth_json}; tcl {conv_tcl_path}\"') diff --git a/f4pga/wrappers/xc7/write_fasm.py b/f4pga/wrappers/xc7/write_fasm.py new file mode 100644 index 000000000..b8413709d --- /dev/null +++ b/f4pga/wrappers/xc7/write_fasm.py @@ -0,0 +1,48 @@ +#!/usr/bin/python3 + +import shutil +import re +from f4pga.wrappers.xc7.common import ( + my_path, + setup_vpr_arg_parser, + VprArgs, + sub +) + +def main(): + mypath = my_path() + parser = setup_vpr_arg_parser() + args = parser.parse_args() + vprargs = VprArgs(mypath, args) + + + top = vprargs.eblif + top_ext_match = re.search('.*\\.[^.]*', vprargs.eblif) + if top_ext_match: + top = top[:top_ext_match.pos] + + fasm_extra = top + '_fasm_extra.fasm' + + noisy_warnings() + + sub('genfasm', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + vprargs.vpr_options, + '--read_rr_graph', vprargs.rr_graph) + + print(f'FASM extra: {fasm_extra}\n') + + # Concatenate top.fasm with extra.fasm if necessary + if os.path.isfile(fasm_extra): + print('writing final fasm') + with open(top + '.fasm', 'r+<') as top_file, open(fasm_extra) as extra_file: + cat = top_file.read() + cat += '\n' + cat += extra_file.read() + top_file.seek(0) + top_file.write(cat) + top_file.truncate() + + save_vpr_log('fasm.log') diff --git a/xc/xc7/toolchain_wrappers/symbiflow_place.py b/xc/xc7/toolchain_wrappers/symbiflow_place.py deleted file mode 100644 index e3a016d2b..000000000 --- a/xc/xc7/toolchain_wrappers/symbiflow_place.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/python3 - -import shutil -from symbiflow_common import * - -mypath = my_path() -parser = setup_vpr_arg_parser() -parser.add_argument('-n', '--net', nargs='+', metavar='', - type=str, help='NET filename') -args = parser.parse_args() -vprargs = VprArgs(mypath, args) -vprargs += ['--fix_clusters', 'constraints.place', '--place'] -vprargs.export() - -if not args.net: - print('Please provide NET filename') - exit(1) - -noisy_warnings() - -print('Generating constraints...\n') - -sub('symbiflow_generate_constraints', - args.eblif, args.net, args.part, vprargs.arch_def, args.pcf) - -vpr(vprargs) - -save_vpr_log('place.log') \ No newline at end of file diff --git a/xc/xc7/toolchain_wrappers/symbiflow_route.py b/xc/xc7/toolchain_wrappers/symbiflow_route.py deleted file mode 100644 index 099260cfd..000000000 --- a/xc/xc7/toolchain_wrappers/symbiflow_route.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/python3 - -import argparse -import subprocess -import os -import shutil -from symbiflow_common import * - -mypath = my_path() -parser = setup_vpr_arg_parser() -args = parser.parse_args() - -vprargs = VprArgs(mypath, args) -vprargs.export() - -noisy_warnings(args.device) - -vprargs.optional += '--route' - -print('Routing...') -vpr(vprargs) - -save_vpr_log('route.log') \ No newline at end of file diff --git a/xc/xc7/toolchain_wrappers/symbiflow_synth.py b/xc/xc7/toolchain_wrappers/symbiflow_synth.py deleted file mode 100755 index ae9745721..000000000 --- a/xc/xc7/toolchain_wrappers/symbiflow_synth.py +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/python3 - -import sys -import os -import argparse -from symbiflow_common import * - -def setup_arg_parser(): - parser = argparse.ArgumentParser(description="Parse flags") - parser.add_argument('-t', '--top', nargs=1, metavar='', - type=str, help='Top module name') - parser.add_argument('-v', '--verilog', nargs='+', metavar='', - type=str, help='Verilog file list') - parser.add_argument('-x', '--xdc', nargs='+', metavar='', - type=str, help='XDC file list') - parser.add_argument('-d', '--device', nargs=1, metavar='', - type=str, help='Device type (e.g. artix7)') - parser.add_argument('-p', '--part', nargs=1, metavar='', - type=str, help='Part name') - return parser - -mypath = os.path.realpath(sys.argv[0]) -mypath = os.path.dirname(mypath) - -share_dir_path = os.path.realpath(os.path.join(mypath, '../share/symbiflow')) -techmap_path = os.path.join(share_dir_path, 'techmaps/xc7_vpr/techmap') -utils_path = os.path.join(share_dir_path, 'scripts') -synth_tcl_path = os.path.join(utils_path, 'xc7/synth.tcl') -conv_tcl_path = os.path.join(utils_path, 'xc7/conv.tcl') -split_inouts = os.path.join(utils_path, 'split_inouts.py') - -os.environ['SHARE_DIR_PATH'] = share_dir_path -os.environ['TECHMAP_PATH'] = techmap_path -os.environ['UTILS_PATH'] = utils_path - -parser = setup_arg_parser() - -args = parser.parse_args() - -if not os.environ['DATABASE_DIR']: - os.environ['DATABASE_DIR'] = sub(['prjxray-config']) -database_dir = os.environ['DATABASE_DIR'] - -# TODO: is this crossplatform??? -if not os.environ['PYTHON3']: - os.environ['PYTHON3'] = sub(['which', 'python3']) - -if not args.verilog: - print('Please provide at least one Verilog file\n') - exit(0) -if not args.top: - print('Top module must be specified\n') - exit(0) -if not args.device: - print('Device parameter required\n') - exit(0) -if not args.part: - print('Part parameter required\n') - exit(0) - -out_json = args.top + '.json' -synth_json = args.top + '_io.json' -log = args.top + '_synth.log' - -os.environ['TOP'] = args.top -os.environ['OUT_JSON'] = out_json -os.environ['OUT_SDC'] = args.top + '.sdc' -os.environ['SYNTH_JSON'] = synth_json -os.environ['OUT_SYNTH_V'] = args.top + '_synth.v' -os.environ['OUT_EBLIF'] = args.top + '.eblif' -os.environ['PART_JSON'] = \ - os.path.join(database_dir, args.device, args.part, 'part.json') -os.environ['OUT_FASM_EXTRA'] = args.top + '_fasm_extra.fasm' - -if args.xdc: - os.environ['INPUT_XDC_FILES'] = ' '.join(args.xdc) - -verilog_paths_str = ' '.join(args.verilog) - -print('------------------------------------> In symbiflow_synth!!!\n') - -sub('yosys', '-p', f'\"tcl {synth_tcl_path}\"', '-l', 'log', verilog_paths_str) -sub('python3', split_inouts, '-i', out_json, '-o', synth_json) -sub('yosys', '-p', f'\"read_json {synth_json}; tcl {conv_tcl_path}\"') diff --git a/xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py b/xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py deleted file mode 100644 index 373088fd9..000000000 --- a/xc/xc7/toolchain_wrappers/symbiflow_write_fasm.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/python3 - -import shutil -import re -from symbiflow_common import * - -mypath = my_path() -parser = setup_vpr_arg_parser() -args = parser.parse_args() -vprargs = VprArgs(args) - - -top = vprargs.eblif -top_ext_match = re.search('.*\\.[^.]*', vprargs.eblif) -if top_ext_match: - top = top[:top_ext_match.pos] - -fasm_extra = top + '_fasm_extra.fasm' - -noisy_warnings() - -sub('genfasm', - vprargs.arch_def, - vprargs.eblif, - '--device', vprargs.device_name, - vprargs.vpr_options, - '--read_rr_graph', vprargs.rr_graph) - -print(f'FASM extra: {fasm_extra}\n') - -# Concatenate top.fasm with extra.fasm if necessary -if os.path.isfile(fasm_extra): - print('writing final fasm') - with open(top + '.fasm', 'r+<') as top_file, open(fasm_extra) as extra_file: - cat = top_file.read() - cat += '\n' - cat += extra_file.read() - top_file.seek(0) - top_file.write(cat) - top_file.truncate() - -save_vpr_log('fasm.log') From d0641bdf5348be8a4617b76d920da59133b9d1f3 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Fri, 4 Mar 2022 01:02:11 +0100 Subject: [PATCH 22/33] f4pga/wrappers/xc7 * move helpers from common to __init__ * merge write_fasm, route and place into __init__ * rename common to vpr * synth: style * write_fasm: style Signed-off-by: Unai Martinez-Corral --- f4pga/setup.py | 7 +- f4pga/wrappers/__init__.py | 28 +++++ f4pga/wrappers/xc7/__init__.py | 113 +++++++++++++++++++ f4pga/wrappers/xc7/common.py | 101 ----------------- f4pga/wrappers/xc7/place.py | 34 ------ f4pga/wrappers/xc7/route.py | 30 ----- f4pga/wrappers/xc7/synth.py | 181 +++++++++++++++++++------------ f4pga/wrappers/xc7/vpr.py | 133 +++++++++++++++++++++++ f4pga/wrappers/xc7/write_fasm.py | 48 -------- 9 files changed, 389 insertions(+), 286 deletions(-) create mode 100644 f4pga/wrappers/__init__.py delete mode 100644 f4pga/wrappers/xc7/common.py delete mode 100644 f4pga/wrappers/xc7/place.py delete mode 100644 f4pga/wrappers/xc7/route.py create mode 100644 f4pga/wrappers/xc7/vpr.py delete mode 100644 f4pga/wrappers/xc7/write_fasm.py diff --git a/f4pga/setup.py b/f4pga/setup.py index d51e01fdc..acb8ad94a 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -80,6 +80,7 @@ def get_requirements(file: Path) -> List[str]: "f4pga", "f4pga.common_modules", "f4pga.wrappers.sh", + "f4pga.wrappers", "f4pga.wrappers.xc7" ], package_dir={"f4pga": "."}, @@ -93,10 +94,10 @@ def get_requirements(file: Path) -> List[str]: entry_points={ "console_scripts": [ "f4pga = f4pga.__init__:main", - "f4pga-place = f4pga.wrappers.xc7.place:main", - "f4pga-route = f4pga.wrappers.xc7.route:main", + "f4pga-place = f4pga.wrappers.xc7.__init__:place", + "f4pga-route = f4pga.wrappers.xc7.__init__:route", "f4pga-synth = f4pga.wrappers.xc7.synth:main", - "f4pga-write-fasm = f4pga.wrappers.xc7.write_fasm:main", + "f4pga-write-fasm = f4pga.wrappers.xc7.__init__:write_fasm", ] + wrapper_entrypoints }, ) diff --git a/f4pga/wrappers/__init__.py b/f4pga/wrappers/__init__.py new file mode 100644 index 000000000..7c627bc54 --- /dev/null +++ b/f4pga/wrappers/__init__.py @@ -0,0 +1,28 @@ +from pathlib import Path +from os import environ +from sys import argv as sys_argv +from subprocess import run as subprocess_run + + +def run(*args): + """ + Execute subroutine + """ + out = subprocess_run(args, capture_output=True) + if out.returncode != 0: + raise(Exception(out.returncode)) + return out.stdout + + +def noisy_warnings(device): + """ + Emit some noisy warnings + """ + environ['OUR_NOISY_WARNINGS'] = f'noisy_warnings-{device}_pack.log' + + +def my_path(): + """ + Get current PWD + """ + return str(Path(sys_argv[0]).resolve().parent) diff --git a/f4pga/wrappers/xc7/__init__.py b/f4pga/wrappers/xc7/__init__.py index e69de29bb..7a9cf1473 100644 --- a/f4pga/wrappers/xc7/__init__.py +++ b/f4pga/wrappers/xc7/__init__.py @@ -0,0 +1,113 @@ +from pathlib import Path +from re import search as re_search + +from f4pga.wrappers import ( + my_path, + noisy_warnings, + run +) + +from f4pga.wrappers.xc7.vpr import ( + save_vpr_log, + setup_vpr_arg_parser, + VprArgs, + vpr +) + + +def place(): + parser = setup_vpr_arg_parser() + parser.add_argument( + '-n', + '--net', + nargs='+', + metavar='', + type=str, + help='NET filename' + ) + args = parser.parse_args() + + vprargs = VprArgs(my_path(), args) + [ + '--fix_clusters', + 'constraints.place', + '--place' + ] + vprargs.export() + + if not args.net: + print('Please provide NET filename') + exit(1) + + noisy_warnings() + + print('Generating constraints...\n') + + run( + 'symbiflow_generate_constraints', + args.eblif, + args.net, + args.part, + vprargs.arch_def, + args.pcf + ) + + vpr(vprargs) + + save_vpr_log('place.log') + + +def route(): + args = setup_vpr_arg_parser().parse_args() + + vprargs = VprArgs(my_path(), args) + vprargs.export() + + noisy_warnings(args.device) + + vprargs.optional += '--route' + + print('Routing...') + vpr(vprargs) + + save_vpr_log('route.log') + + +def write_fasm(): + vprargs = VprArgs( + my_path(), + setup_vpr_arg_parser().parse_args() + ) + + if vprargs.eblif is None: + raise(Exception("Argument EBLIF is required!")) + + top_ext_match = re_search('.*\\.[^.]*', vprargs.eblif) + top = top[:top_ext_match.pos] if top_ext_match else vprargs.eblif + + fasm_extra = top + '_fasm_extra.fasm' + + noisy_warnings() + + run( + 'genfasm', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + vprargs.vpr_options, + '--read_rr_graph', vprargs.rr_graph + ) + + print(f'FASM extra: {fasm_extra}\n') + + # Concatenate top.fasm with extra.fasm if necessary + if Path(fasm_extra).is_file(): + print('writing final fasm') + with open(top + '.fasm', 'r+<') as top_file, open(fasm_extra) as extra_file: + cat = top_file.read() + cat += '\n' + cat += extra_file.read() + top_file.seek(0) + top_file.write(cat) + top_file.truncate() + + save_vpr_log('fasm.log') diff --git a/f4pga/wrappers/xc7/common.py b/f4pga/wrappers/xc7/common.py deleted file mode 100644 index a2306b173..000000000 --- a/f4pga/wrappers/xc7/common.py +++ /dev/null @@ -1,101 +0,0 @@ -from pathlib import Path -import subprocess -import argparse -import os -import shutil -from sys import argv as sys_argv - -class VprArgs: - arch_dir: str - arch_def: str - lookahead: str - rr_graph: str - rr_graph_xml: str - place_delay: str - device_name: str - eblif: str - vpr_options: str - optional: list[str] - - def __init__(self, mypath, args): - self.arch_dir = \ - str(Path(mypath) / '../share/symbiflow/arch' / args.device) - self.arch_dir = os.path.realpath(self.arch_dir) - self.arch_def = os.path.join(self.arch_dir, 'arch.timing.xml') - self.lookahead = \ - os.path.join(self.arch_dir, - 'rr_graph_' + args.device + '.lookahead.bin') - self.rr_graph = \ - os.path.join(self.arch_dir, - 'rr_graph_' + args.device + '.rr_graph.real.bin') - self.rr_graph_xml = \ - os.path.join(self.arch_dir, - 'rr_graph_' + args.device + '.rr_graph.real.xml') - self.place_delay = \ - os.path.join(self.arch_dir, - 'rr_graph_' + args.device + '.place_delay.bin') - self.device_name = args.device.replace('_', '-') - self.eblif = args.eblif - self.vpr_options = args.vpr_options - self.optional = [] - if args.sdc: - self.optional += ['--sdc_file', args.sdc] - - def export(self): - os.environ['ARCH_DIR'] = self.arch_dir - os.environ['ARCH_DEF'] = self.arch_def - os.environ['LOOKAHEAD'] = self.lookahead - os.environ['RR_GRAPH'] = self.rr_graph - os.environ['RR_GRAPH_XML'] = self.rr_graph_xml - os.environ['PLACE_DELAY'] = self.place_delay - os.environ['DEVICE_NAME'] = self.device_name - -def setup_vpr_arg_parser(): - parser = argparse.ArgumentParser(description="Parse flags") - parser.add_argument('-d', '--device', nargs=1, metavar='', - type=str, help='Device type (e.g. artix7)', default='artix7') - parser.add_argument('-e', '--eblif', nargs=1, metavar='', - type=str, help='EBLIF filename') - parser.add_argument('-p', '--pcf', nargs=1, metavar='', - type=str, help='PCF filename') - parser.add_argument('-P', '--part', nargs=1, metavar='', - type=str, help='Part name') - parser.add_argument('-s', '--sdc', nargs=1, metavar='', - type=str, help='SDC file') - parser.add_argument('-a', '--vpr_options', metavar='', - type=str, help='Additional VPR options') - parser.add_argument('additional_vpr_args', nargs='*', metavar='', - type=str, help='Additional arguments for vpr command') - return parser - -# Exwecute subroutine -def sub(*args): - out = subprocess.run(args, capture_output=True) - if out.returncode != 0: - exit(out.returncode) - return out.stdout - -# Execute `vpr` -def vpr(vprargs: VprArgs): - return sub('vpr', - vprargs.arch_def, - vprargs.eblif, - '--device', vprargs.device_name, - vprargs.vpr_options, - '--read_rr_graph', vprargs.rr_graph, - '--read_router_lookahead', vprargs.lookahead, - 'read_placement_delay_lookup', vprargs.place_delay, - *vprargs.optional) - -# Emit some noisy warnings -def noisy_warnings(device): - os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log' - -# Get current PWD -def my_path(): - mypath = os.path.realpath(sys_argv[0]) - return os.path.dirname(mypath) - -# Save VPR log -def save_vpr_log(filename): - shutil.move('vpr_stdout.log', filename) diff --git a/f4pga/wrappers/xc7/place.py b/f4pga/wrappers/xc7/place.py deleted file mode 100644 index c253b1a7f..000000000 --- a/f4pga/wrappers/xc7/place.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/python3 - -import shutil -from f4pga.wrappers.xc7.common import ( - my_path, - setup_vpr_arg_parser, - VprArgs, - vpr -) - -def main(): - mypath = my_path() - parser = setup_vpr_arg_parser() - parser.add_argument('-n', '--net', nargs='+', metavar='', - type=str, help='NET filename') - args = parser.parse_args() - vprargs = VprArgs(mypath, args) - vprargs += ['--fix_clusters', 'constraints.place', '--place'] - vprargs.export() - - if not args.net: - print('Please provide NET filename') - exit(1) - - noisy_warnings() - - print('Generating constraints...\n') - - sub('symbiflow_generate_constraints', - args.eblif, args.net, args.part, vprargs.arch_def, args.pcf) - - vpr(vprargs) - - save_vpr_log('place.log') \ No newline at end of file diff --git a/f4pga/wrappers/xc7/route.py b/f4pga/wrappers/xc7/route.py deleted file mode 100644 index 305592cd5..000000000 --- a/f4pga/wrappers/xc7/route.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/python3 - -import argparse -import subprocess -import os -import shutil -from f4pga.wrappers.xc7.common import ( - my_path, - setup_vpr_arg_parser, - VprArgs, - noisy_warnings, - vpr -) - -def main(): - mypath = my_path() - parser = setup_vpr_arg_parser() - args = parser.parse_args() - - vprargs = VprArgs(mypath, args) - vprargs.export() - - noisy_warnings(args.device) - - vprargs.optional += '--route' - - print('Routing...') - vpr(vprargs) - - save_vpr_log('route.log') diff --git a/f4pga/wrappers/xc7/synth.py b/f4pga/wrappers/xc7/synth.py index 9d60f3fd3..f209c24eb 100755 --- a/f4pga/wrappers/xc7/synth.py +++ b/f4pga/wrappers/xc7/synth.py @@ -1,85 +1,126 @@ -#!/usr/bin/python3 - -import sys -import os -import argparse -from f4pga.wrappers.xc7.common import * - -def setup_arg_parser(): - parser = argparse.ArgumentParser(description="Parse flags") - parser.add_argument('-t', '--top', nargs=1, metavar='', - type=str, help='Top module name') - parser.add_argument('-v', '--verilog', nargs='+', metavar='', - type=str, help='Verilog file list') - parser.add_argument('-x', '--xdc', nargs='+', metavar='', - type=str, help='XDC file list') - parser.add_argument('-d', '--device', nargs=1, metavar='', - type=str, help='Device type (e.g. artix7)') - parser.add_argument('-p', '--part', nargs=1, metavar='', - type=str, help='Part name') - return parser +from pathlib import Path +from sys import argv as sys_argv +from os import environ +from argparse import ArgumentParser +from f4pga.wrappers import run + + +def arg_parser(): + parser = ArgumentParser(description="Parse flags") + + parser.add_argument( + '-t', + '--top', + nargs=1, + metavar='', + type=str, + help='Top module name' + ) + + parser.add_argument( + '-v', + '--verilog', + nargs='+', + metavar='', + type=str, + help='Verilog file list' + ) + + parser.add_argument( + '-x', + '--xdc', + nargs='+', + metavar='', + type=str, + help='XDC file list' + ) + + parser.add_argument( + '-d', + '--device', + nargs=1, + metavar='', + type=str, + help='Device type (e.g. artix7)' + ) + + parser.add_argument( + '-p', + '--part', + nargs=1, + metavar='', + type=str, + help='Part name' + ) + + return parser.parse_args() -def main(): - mypath = os.path.realpath(sys.argv[0]) - mypath = os.path.dirname(mypath) - - share_dir_path = os.path.realpath(os.path.join(mypath, '../share/symbiflow')) - techmap_path = os.path.join(share_dir_path, 'techmaps/xc7_vpr/techmap') - utils_path = os.path.join(share_dir_path, 'scripts') - synth_tcl_path = os.path.join(utils_path, 'xc7/synth.tcl') - conv_tcl_path = os.path.join(utils_path, 'xc7/conv.tcl') - split_inouts = os.path.join(utils_path, 'split_inouts.py') - os.environ['SHARE_DIR_PATH'] = share_dir_path - os.environ['TECHMAP_PATH'] = techmap_path - os.environ['UTILS_PATH'] = utils_path +def main(): + share_dir_path = (Path(sys_argv[0]).resolve().parent / '../share/symbiflow').resolve() + utils_path = share_dir_path / 'scripts' - parser = setup_arg_parser() + environ['SHARE_DIR_PATH'] = str(share_dir_path) + environ['TECHMAP_PATH'] = str(share_dir_path / 'techmaps/xc7_vpr/techmap') + environ['UTILS_PATH'] = str(utils_path) - args = parser.parse_args() + args = arg_parser() - if not os.environ['DATABASE_DIR']: - os.environ['DATABASE_DIR'] = sub(['prjxray-config']) - database_dir = os.environ['DATABASE_DIR'] + database_dir = environ.get('DATABASE_DIR', str(run('prjxray-config'))) + environ['DATABASE_DIR'] = database_dir # TODO: is this crossplatform??? - if not os.environ['PYTHON3']: - os.environ['PYTHON3'] = sub(['which', 'python3']) + if 'PYTHON3' not in environ: + environ['PYTHON3'] = run(['which', 'python3']) if not args.verilog: - print('Please provide at least one Verilog file\n') - exit(0) + raise(Exception('Please provide at least one Verilog file\n')) + if not args.top: - print('Top module must be specified\n') - exit(0) + raise(Exception('Top module must be specified\n')) + if not args.device: - print('Device parameter required\n') - exit(0) - if not args.part: - print('Part parameter required\n') - exit(0) - - out_json = args.top + '.json' - synth_json = args.top + '_io.json' - log = args.top + '_synth.log' - - os.environ['TOP'] = args.top - os.environ['OUT_JSON'] = out_json - os.environ['OUT_SDC'] = args.top + '.sdc' - os.environ['SYNTH_JSON'] = synth_json - os.environ['OUT_SYNTH_V'] = args.top + '_synth.v' - os.environ['OUT_EBLIF'] = args.top + '.eblif' - os.environ['PART_JSON'] = \ - os.path.join(database_dir, args.device, args.part, 'part.json') - os.environ['OUT_FASM_EXTRA'] = args.top + '_fasm_extra.fasm' + raise(Exception('Device parameter required\n')) - if args.xdc: - os.environ['INPUT_XDC_FILES'] = ' '.join(args.xdc) + if not args.part: + raise(Exception('Part parameter required\n')) - verilog_paths_str = ' '.join(args.verilog) + out_json = f"{args.top}.json" + synth_json = f"{args.top}_io.json" + log = f"{args.top}_synth.log" - print('------------------------------------> In symbiflow_synth!!!\n') + environ['TOP'] = args.top + environ['OUT_JSON'] = out_json + environ['OUT_SDC'] = f"{args.top}.sdc" + environ['SYNTH_JSON'] = synth_json + environ['OUT_SYNTH_V'] = f"{args.top}_synth.v" + environ['OUT_EBLIF'] = f"{args.top}.eblif" + environ['PART_JSON'] = str(Path(database_dir) / f"{args.device}/{args.part}/part.json") + environ['OUT_FASM_EXTRA'] = args.top + '_fasm_extra.fasm' - sub('yosys', '-p', f'\"tcl {synth_tcl_path}\"', '-l', 'log', verilog_paths_str) - sub('python3', split_inouts, '-i', out_json, '-o', synth_json) - sub('yosys', '-p', f'\"read_json {synth_json}; tcl {conv_tcl_path}\"') + if args.xdc: + environ['INPUT_XDC_FILES'] = ' '.join(args.xdc) + + run( + 'yosys', + '-p', + f'\"tcl {(utils_path / "xc7/synth.tcl")!s}\"', + '-l', + 'log', + ' '.join(args.verilog) + ) + + run( + 'python3', + str(utils_path / 'split_inouts.py'), + '-i', + out_json, + '-o', + synth_json + ) + + run( + 'yosys', + '-p', + f'\"read_json {synth_json}; tcl {(utils_path / "xc7/conv.tcl")!s}\"' + ) diff --git a/f4pga/wrappers/xc7/vpr.py b/f4pga/wrappers/xc7/vpr.py new file mode 100644 index 000000000..7a54e1b5d --- /dev/null +++ b/f4pga/wrappers/xc7/vpr.py @@ -0,0 +1,133 @@ +from typing import List +from pathlib import Path +from argparse import ArgumentParser +from os import environ +from shutil import move as sh_mv + +from f4pga.wrappers import run + +class VprArgs: + arch_dir: Path + arch_def: Path + lookahead: Path + rr_graph: Path + rr_graph_xml: Path + place_delay: Path + device_name: Path + eblif: str + vpr_options: str + optional: List[str] + + def __init__(self, mypath, args): + self.arch_dir = (Path(mypath) / '../share/symbiflow/arch' / args.device).resolve() + self.arch_def = self.arch_dir / 'arch.timing.xml' + filename = f'rr_graph_{args.device}' + self.lookahead = self.arch_dir / f'{filename}.lookahead.bin' + self.rr_graph = self.arch_dir / f'{filename}.rr_graph.real.bin' + self.rr_graph_xml = self.arch_dir / f'{filename}.rr_graph.real.xml' + self.place_delay = self.arch_dir / f'{filename}.place_delay.bin' + self.device_name = args.device.replace('_', '-') + self.eblif = args.eblif + self.vpr_options = args.vpr_options + self.optional = ['--sdc_file', args.sdc] if args.sdc else [] + + def export(self): + environ['ARCH_DIR'] = str(self.arch_dir) + environ['ARCH_DEF'] = str(self.arch_def) + environ['LOOKAHEAD'] = str(self.lookahead) + environ['RR_GRAPH'] = str(self.rr_graph) + environ['RR_GRAPH_XML'] = str(self.rr_graph_xml) + environ['PLACE_DELAY'] = str(self.place_delay) + environ['DEVICE_NAME'] = str(self.device_name) + + +def setup_vpr_arg_parser(): + parser = ArgumentParser(description="Parse flags") + + parser.add_argument( + '-d', + '--device', + nargs=1, + metavar='', + type=str, + help='Device type (e.g. artix7)', + default='artix7' + ) + + parser.add_argument( + '-e', + '--eblif', + nargs=1, + metavar='', + type=str, + help='EBLIF filename' + ) + + parser.add_argument( + '-p', + '--pcf', + nargs=1, + metavar='', + type=str, + help='PCF filename' + ) + + parser.add_argument( + '-P', + '--part', + nargs=1, + metavar='', + type=str, + help='Part name' + ) + + parser.add_argument( + '-s', + '--sdc', + nargs=1, + metavar='', + type=str, + help='SDC file' + ) + + parser.add_argument( + '-a', + '--vpr_options', + metavar='', + type=str, + help='Additional VPR options' + ) + + parser.add_argument( + 'additional_vpr_args', + nargs='*', + metavar='', + type=str, + help='Additional arguments for vpr command' + ) + + return parser + + +def vpr(vprargs: VprArgs): + """ + Execute `vpr` + """ + return run( + 'vpr', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + vprargs.vpr_options, + '--read_rr_graph', vprargs.rr_graph, + '--read_router_lookahead', vprargs.lookahead, + 'read_placement_delay_lookup', vprargs.place_delay, + *vprargs.optional + ) + + +def save_vpr_log(filename): + """ + Save VPR log. + """ + sh_mv('vpr_stdout.log', filename) diff --git a/f4pga/wrappers/xc7/write_fasm.py b/f4pga/wrappers/xc7/write_fasm.py deleted file mode 100644 index b8413709d..000000000 --- a/f4pga/wrappers/xc7/write_fasm.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/python3 - -import shutil -import re -from f4pga.wrappers.xc7.common import ( - my_path, - setup_vpr_arg_parser, - VprArgs, - sub -) - -def main(): - mypath = my_path() - parser = setup_vpr_arg_parser() - args = parser.parse_args() - vprargs = VprArgs(mypath, args) - - - top = vprargs.eblif - top_ext_match = re.search('.*\\.[^.]*', vprargs.eblif) - if top_ext_match: - top = top[:top_ext_match.pos] - - fasm_extra = top + '_fasm_extra.fasm' - - noisy_warnings() - - sub('genfasm', - vprargs.arch_def, - vprargs.eblif, - '--device', vprargs.device_name, - vprargs.vpr_options, - '--read_rr_graph', vprargs.rr_graph) - - print(f'FASM extra: {fasm_extra}\n') - - # Concatenate top.fasm with extra.fasm if necessary - if os.path.isfile(fasm_extra): - print('writing final fasm') - with open(top + '.fasm', 'r+<') as top_file, open(fasm_extra) as extra_file: - cat = top_file.read() - cat += '\n' - cat += extra_file.read() - top_file.seek(0) - top_file.write(cat) - top_file.truncate() - - save_vpr_log('fasm.log') From 26fb1d63b00d5d55c79b66d8c0840fbe6d2d4f3f Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Fri, 4 Mar 2022 03:06:43 +0100 Subject: [PATCH 23/33] f4pga: mv part_db/parts.json part_db.json Signed-off-by: Unai Martinez-Corral --- f4pga/__init__.py | 8 ++------ f4pga/{part_db/parts.json => part_db.json} | 0 f4pga/setup.py | 2 +- 3 files changed, 3 insertions(+), 7 deletions(-) rename f4pga/{part_db/parts.json => part_db.json} (100%) diff --git a/f4pga/__init__.py b/f4pga/__init__.py index a9f7e84d2..1dde88240 100755 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -508,18 +508,14 @@ def get_platform_name_for_part(part_name: str): The reason for such distinction is that plenty of chips with different names differ only in a type of package they use. """ - - d: dict - with open(os.path.join(mypath, 'part_db/parts.json')) as f: - d = json.loads(f.read()) - return d.get(part_name.upper()) + with (Path(mypath) / 'part_db.json').open('r') as rfptr: + return json.load(rfptr).get(part_name.upper()) def cmd_build(args: Namespace): """ sfbuild's `build` command implementation """ project_flow_cfg: ProjectFlowConfig = None - platform = args.platform if platform is None: if args.part: diff --git a/f4pga/part_db/parts.json b/f4pga/part_db.json similarity index 100% rename from f4pga/part_db/parts.json rename to f4pga/part_db.json diff --git a/f4pga/setup.py b/f4pga/setup.py index acb8ad94a..1c96d23ef 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -85,7 +85,7 @@ def get_requirements(file: Path) -> List[str]: ], package_dir={"f4pga": "."}, package_data={ - 'f4pga': ['platforms/*.json'], + 'f4pga': ['*.json', 'platforms/*.json'], 'f4pga.wrappers.sh': ['xc7/*.f4pga.sh', 'quicklogic/*.f4pga.sh'] }, classifiers=[], From 636da72d325daec54dcd495f4705cf4f3a081437 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Fri, 4 Mar 2022 05:13:42 +0100 Subject: [PATCH 24/33] f4pga: cleanup and style Signed-off-by: Unai Martinez-Corral --- .gitignore | 1 + f4pga/__init__.py | 72 ++-- f4pga/argparser.py | 196 ++++++++--- f4pga/cache.py | 88 +++-- f4pga/common.py | 158 +++++---- f4pga/common_modules/__init__.py | 1 - f4pga/common_modules/fasm.py | 55 ++-- .../common_modules/generic_script_wrapper.py | 132 ++++---- f4pga/common_modules/io_rename.py | 16 +- f4pga/common_modules/mkdirs.py | 27 +- f4pga/common_modules/pack.py | 54 +-- f4pga/common_modules/place.py | 38 +-- f4pga/common_modules/place_constraints.py | 17 +- f4pga/common_modules/route.py | 38 +-- f4pga/common_modules/synth.py | 24 +- f4pga/flow_config.py | 309 ++++++++++++------ f4pga/module.py | 89 ++--- f4pga/module_runner.py | 66 ++-- f4pga/ugly.py | 19 -- 19 files changed, 781 insertions(+), 619 deletions(-) delete mode 100644 f4pga/ugly.py diff --git a/.gitignore b/.gitignore index cbf0fa605..8310e6d86 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ *.pyc *.sw* +/f4pga/build/ diff --git a/f4pga/__init__.py b/f4pga/__init__.py index 1dde88240..225998009 100755 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -1,31 +1,28 @@ -#!/usr/bin/env python3 - """ -sfbuild - Symbiflow Build System +F4PGA Build System + +This tool allows for building FPGA targets (such as bitstreams) for any supported platform with just one simple command +and a project file. -This tool allows for building FPGA targets (such as bitstreams) for any supported -platform with just one simple command and a project file. +The idea is that F4PGA wraps all the tools needed by different platforms in "modules", which define inputs/outputs and +various parameters. +This allows F4PGA to resolve dependencies for any target provided that a "flow definition" file exists for such target. +The flow defeinition file list modules available for that platform and may tweak some settings of those modules. -The idea is that sfbuild wraps all the tools needed by different platforms in -"modules", which define inputs/outputs and various parameters. This allows -sfbuild to resolve dependencies for any target provided that a "flow definition" -file exists for such target. The flow defeinition file list modules available for -that platform and may tweak some settings of those modules. +A basic example of using F4PGA: -A basic example of using sfbuild: -$ sfbuild build --platform arty_35 -t bitstream +$ f4pga build --platform arty_35 -t bitstream -This will make sfbuild attempt to create a bitstream for arty_35 platform. -flow.json is a flow configuration file, which should be created for a project -that uses sfbuild. Iontains project-specific definitions needed within the flow, -such as list of source code files. +This will make F4PGA attempt to create a bitstream for arty_35 platform. +``flow.json`` is a flow configuration file, which should be created for a project that uses F4PGA. +Contains project-specific definitions needed within the flow, such as list of source code files. """ from pathlib import Path from argparse import Namespace -import os +from sys import argv as sys_argv from os import environ -import json +from json import load as json_load, loads as json_loads from typing import Iterable from colorama import Fore, Style @@ -34,11 +31,11 @@ fatal, scan_modules, set_verbosity_level, - sfprint + sfprint, + sub as common_sub ) from f4pga.module import * from f4pga.cache import SymbiCache -import f4pga.ugly as ugly from f4pga.flow_config import ( ProjectFlowConfig, FlowConfig, @@ -54,10 +51,10 @@ SYMBICACHEPATH = '.symbicache' -binpath = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(os.sys.argv[0])), '..')) +binpath = str(Path(sys_argv[0]).resolve().parent.parent) mypath = str(Path(__file__).resolve().parent) -share_dir_path = os.path.realpath(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow") +share_dir_path = str(Path(f"{environ.get('F4PGA_INSTALL_DIR', '/usr/local')}/xc7/install/share/symbiflow").resolve()) class DependencyNotProducedException(Exception): dep_name: str @@ -86,8 +83,7 @@ def req_exists(r): """ Checks whether a dependency exists on a drive. """ if type(r) is str: - if not os.path.isfile(r) and not os.path.islink(r) \ - and not os.path.isdir(r): + if not Path(r).is_file() and not Path(r).is_symlink() and not Path(r).is_dir(): return False elif type(r) is list: return not (False in map(req_exists, r)) @@ -471,9 +467,27 @@ def setup_resolution_env(): r_env = ResolutionEnv({ 'shareDir': share_dir_path, - 'binDir': os.path.realpath(os.path.join(share_dir_path, '../../bin')) + 'binDir': str((Path(share_dir_path) / '../../bin').resolve()) }) - r_env.add_values(ugly.generate_values()) + + def _noisy_warnings(): + """ + Emit some noisy warnings. + """ + environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log' + return 'noisy_warnings.log' + + def _generate_values(): + """ + Generate initial values, available in configs. + """ + return { + 'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''), + 'python3': common_sub('which', 'python3').decode().replace('\n', ''), + 'noisyWarnings': _noisy_warnings() + } + + r_env.add_values(_generate_values()) return r_env def open_project_flow_config(path: str) -> ProjectFlowConfig: @@ -509,7 +523,7 @@ def get_platform_name_for_part(part_name: str): differ only in a type of package they use. """ with (Path(mypath) / 'part_db.json').open('r') as rfptr: - return json.load(rfptr).get(part_name.upper()) + return json_load(rfptr).get(part_name.upper()) def cmd_build(args: Namespace): """ sfbuild's `build` command implementation """ @@ -535,7 +549,7 @@ def cmd_build(args: Namespace): fatal(-1, 'No configuration was provided. Use `--flow`, `--platform` or ' '`--part` to configure flow..') - platform_path = os.path.join(mypath, 'platforms', platform + '.json') + platform_path = str(Path(mypath) / f'platforms/{platform}.json') platform_def = None try: with open(platform_path) as platform_file: @@ -550,7 +564,7 @@ def cmd_build(args: Namespace): sfprint(2, 'Scanning modules...') scan_modules(mypath) - flow_definition_dict = json.loads(platform_def) + flow_definition_dict = json_loads(platform_def) flow_def = FlowDefinition(flow_definition_dict, r_env) flow_cfg = FlowConfig(project_flow_cfg, flow_def, platform) diff --git a/f4pga/argparser.py b/f4pga/argparser.py index b9e3b6bb5..831208bd6 100644 --- a/f4pga/argparser.py +++ b/f4pga/argparser.py @@ -1,69 +1,158 @@ from argparse import ArgumentParser, Namespace -import re +from re import finditer as re_finditer + def _add_flow_arg(parser: ArgumentParser): - parser.add_argument('-f', '--flow', metavar='flow_path', type=str, - help='Path to flow definition file') + parser.add_argument( + '-f', + '--flow', + metavar='flow_path', + type=str, + help='Path to flow definition file' + ) + def _setup_build_parser(parser: ArgumentParser): _add_flow_arg(parser) - parser.add_argument('-t', '--target', metavar='target_name', type=str, - help='Perform stages necessary to acquire target') - parser.add_argument('--platform', metavar='platform_name', - help='Target platform_name') - parser.add_argument('-P', '--pretend', action='store_true', - help='Show dependency resolution without executing flow') - parser.add_argument('-i', '--info', action='store_true', - help='Display info about available targets') - parser.add_argument('-c', '--nocache', action='store_true', - help='Ignore caching and rebuild everything up to the ' - 'target.') - parser.add_argument('-S', '--stageinfo', nargs=1, metavar='stage_name', - help='Display info about stage') - parser.add_argument('-r', '--requirements', action='store_true', - help='Display info about project\'s requirements.') - parser.add_argument('-p', '--part', metavar='part_name', - help='Name of the target chip') - parser.add_argument('--dep', '-D', action='append', default=[]) - parser.add_argument('--val', '-V', action='append', default=[]) + + parser.add_argument( + '-t', + '--target', + metavar='target_name', + type=str, + help='Perform stages necessary to acquire target' + ) + + parser.add_argument( + '--platform', + metavar='platform_name', + help='Target platform_name' + ) + + parser.add_argument( + '-P', + '--pretend', + action='store_true', + help='Show dependency resolution without executing flow' + ) + + parser.add_argument( + '-i', + '--info', + action='store_true', + help='Display info about available targets' + ) + + parser.add_argument( + '-c', + '--nocache', + action='store_true', + help='Ignore caching and rebuild everything up to the target.' + ) + + parser.add_argument( + '-S', + '--stageinfo', + nargs=1, + metavar='stage_name', + help='Display info about stage' + ) + + parser.add_argument( + '-r', + '--requirements', + action='store_true', + help='Display info about project\'s requirements.' + ) + + parser.add_argument( + '-p', + '--part', + metavar='part_name', + help='Name of the target chip' + ) + + parser.add_argument( + '--dep', + '-D', + action='append', + default=[] + ) + + parser.add_argument( + '--val', + '-V', + action='append', + default=[] + ) + # Currently unsupported - parser.add_argument('-M', '--moduleinfo', nargs=1, - metavar='module_name_or_path', - help='Display info about module. Requires `-p` option ' - 'in case of module name') - parser.add_argument('-T', '--take_explicit_paths', nargs='+', - metavar='', type=str, - help='Specify stage inputs explicitely. This might be ' - 'required if some files got renamed or deleted and ' - 'symbiflow is unable to deduce the flow that lead ' - 'to dependencies required by the requested stage') + parser.add_argument( + '-M', + '--moduleinfo', + nargs=1, + metavar='module_name_or_path', + help='Display info about module. Requires `-p` option in case of module name' + ) + + parser.add_argument( + '-T', + '--take_explicit_paths', + nargs='+', + metavar='', + type=str, + help='Specify stage inputs explicitely. This might be required if some files got renamed or deleted and ' + 'symbiflow is unable to deduce the flow that lead to dependencies required by the requested stage' + ) + def _setup_show_dep_parser(parser: ArgumentParser): - parser.add_argument('-p', '--platform', metavar='platform_name', type=str, - help='Name of the platform (use to display ' - 'platform-specific values.') - parser.add_argument('-s', '--stage', metavar='stage_name', type=str, - help='Name of the stage (use if you want to set the ' - 'value only for that stage). Requires `-p`.') + parser.add_argument( + '-p', + '--platform', + metavar='platform_name', + type=str, + help='Name of the platform (use to display platform-specific values.' + ) + + parser.add_argument( + '-s', + '--stage', + metavar='stage_name', + type=str, + help='Name of the stage (use if you want to set the value only for that stage). Requires `-p`.' + ) + _add_flow_arg(parser) -# Set up argument parser for the program. Pretty self-explanatory. + def setup_argparser(): + """ + Set up argument parser for the program. + """ parser = ArgumentParser(description='SymbiFlow Build System') - parser.add_argument('-v', '--verbose', action='count', default=0) - parser.add_argument('-s', '--silent', action='store_true') + parser.add_argument( + '-v', + '--verbose', + action='count', + default=0 + ) + + parser.add_argument( + '-s', + '--silent', + action='store_true' + ) subparsers = parser.add_subparsers(dest='command') - build = subparsers.add_parser('build') - _setup_build_parser(build) - show_dep = subparsers.add_parser('showd', - description='Show the value(s) assigned to a ' - 'dependency') + _setup_build_parser(subparsers.add_parser('build')) + show_dep = subparsers.add_parser('showd', description='Show the value(s) assigned to a dependency') _setup_show_dep_parser(show_dep) return parser + def _parse_depval(depvalstr: str): """ Parse a dependency or value definition in form of: @@ -94,6 +183,7 @@ def _parse_depval(depvalstr: str): return d + def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): """ Find all occurences of a pattern in a string that contains escape sequences. @@ -109,8 +199,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): offsets = [] offset = 0 for sl in s.split(escape_chr): - l = len(sl) - if l <= 1: + if len(sl) <= 1: continue noescape = sl[(1 if offset != 0 else 0):] for _ in noescape: @@ -118,7 +207,7 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): offset += 2 noescapes += noescape - iter = re.finditer(regexp, noescapes) + iter = re_finditer(regexp, noescapes) for m in iter: start = m.start() @@ -127,10 +216,13 @@ def _unescaped_matches(regexp: str, s: str, escape_chr='\\'): off2 = end + offsets[end] yield off1, off2 + def _unescaped_separated(regexp: str, s: str, escape_chr='\\'): - """ Yields substrings of a string that contains escape sequences. """ + """ + Yields substrings of a string that contains escape sequences. + """ - last_end = 0; + last_end = 0 for start, end in _unescaped_matches(regexp, s, escape_chr=escape_chr): yield s[last_end:start] last_end = end @@ -139,6 +231,7 @@ def _unescaped_separated(regexp: str, s: str, escape_chr='\\'): else: yield '' + def _parse_cli_value(s: str): """ Parse a value/dependency passed to CLI @@ -207,6 +300,7 @@ def _parse_cli_value(s: str): # String return s.replace('\\', '') + def get_cli_flow_config(args: Namespace, platform: str): def create_defdict(): return { diff --git a/f4pga/cache.py b/f4pga/cache.py index fd11177f6..08d0ec5cc 100755 --- a/f4pga/cache.py +++ b/f4pga/cache.py @@ -1,19 +1,13 @@ -import os -import zlib -import json +from pathlib import Path +from zlib import adler32 as zlib_adler32 +from json import dump as json_dump, load as json_load, JSONDecodeError -def _get_file_hash(path: str): - with open(path, 'rb') as f: - b = f.read() - return str(zlib.adler32(b)) class SymbiCache: """ - `SymbiCache` is used to track changes among dependencies and keep - the status of the files on a persistent storage. + `SymbiCache` is used to track changes among dependencies and keep the status of the files on a persistent storage. Files which are tracked get their checksums calculated and stored in a file. - If file's checksum differs from the one saved in a file, that means, the file - has changed. + If file's checksum differs from the one saved in a file, that means, the file has changed. """ hashes: 'dict[str, dict[str, str]]' @@ -21,13 +15,14 @@ class SymbiCache: cachefile_path: str def __init__(self, cachefile_path): - """ `chachefile_path` - path to a file used for persistent storage of - checksums. """ + """ + `chachefile_path` - path to a file used for persistent storage of checksums. + """ self.status = {} self.cachefile_path = cachefile_path self.load() - + def _try_pop_consumer(self, path: str, consumer: str): if self.status.get(path) and self.status[path].get(consumer): self.status[path].pop(consumer) @@ -37,7 +32,7 @@ def _try_pop_consumer(self, path: str, consumer: str): self.hashes[path].pop(consumer) if len(self.hashes[path]) == 0: self.hashes.pop(path) - + def _try_push_consumer_hash(self, path: str, consumer: str, hash): if not self.hashes.get(path): self.hashes[path] = {} @@ -46,43 +41,39 @@ def _try_push_consumer_status(self, path: str, consumer: str, status): if not self.status.get(path): self.status[path] = {} self.status[path][consumer] = status - - def _get_last_hash(self, path: str, consumer: str): - last_hashes = self.hashes.get(path) - if last_hashes is None: - return None - return last_hashes.get(consumer) def update(self, path: str, consumer: str): - """ Add/remove a file to.from the tracked files, update checksum - if necessary and calculate status. + """ Add/remove a file to.from the tracked files, update checksum if necessary and calculate status. Multiple hashes are stored per file, one for each consumer module. - "__target" is used as a convention for a "fake" consumer in case the file - is requested as a target and not used by a module within the active flow. + "__target" is used as a convention for a "fake" consumer in case the file is requested as a target and not used + by a module within the active flow. """ - isdir = os.path.isdir(path) - if not (os.path.isfile(path) or os.path.islink(path) or isdir): + isdir = Path(path).is_dir() + if not (Path(path).is_file() or Path(path).is_symlink() or isdir): self._try_pop_consumer(path, consumer) return True hash = 0 # Directories always get '0' hash. if not isdir: - hash = _get_file_hash(path) - last_hash = self._get_last_hash(path, consumer) + with Path(path).open('rb') as rfptr: + hash = str(zlib_adler32(rfptr.read())) + + last_hashes = self.hashes.get(path) + last_hash = None if last_hashes is None else last_hashes.get(consumer) + if hash != last_hash: self._try_push_consumer_status(path, consumer, 'changed') self._try_push_consumer_hash(path, consumer, hash) return True - else: - self._try_push_consumer_status(path, consumer, 'same') - return False - + self._try_push_consumer_status(path, consumer, 'same') + return False + def get_status(self, path: str, consumer: str): """ Get status for a file with a given path. - returns 'untracked' if the file is not tracked or hasn't been - treated with `update` procedure before calling `get_status`. """ - + returns 'untracked' if the file is not tracked or hasn't been treated with `update` procedure before calling + `get_status`. + """ statuses = self.status.get(path) if not statuses: return 'untracked' @@ -90,26 +81,23 @@ def get_status(self, path: str, consumer: str): if not status: return 'untracked' return status - + def load(self): """Loads cache's state from the persistent storage""" try: - with open(self.cachefile_path, 'r') as f: - b = f.read() - self.hashes = json.loads(b) - except json.JSONDecodeError as jerr: - print('WARNING: .symbicache is corrupted! ' - 'This will cause flow to re-execute from the beggining.') + with Path(self.cachefile_path).open('r') as rfptr: + self.hashes = json_load(rfptr) + except JSONDecodeError as jerr: + print("""WARNING: .symbicache is corrupted! +This will cause flow to re-execute from the beggining.""") self.hashes = {} except FileNotFoundError: - print('Couldn\'t open .symbicache cache file. ' - 'This will cause flow to re-execute from the beggining.') + print("""Couldn\'t open .symbicache cache file. +This will cause flow to re-execute from the beggining.""") self.hashes = {} def save(self): - """Saves cache's state to the persistent storage""" - - with open(self.cachefile_path, 'w') as f: - b = json.dumps(self.hashes, indent=4) - f.write(b) \ No newline at end of file + """Saves cache's state to the persistent storage.""" + with Path(self.cachefile_path).open('w') as wfptr: + json_dump(str(self.hashes), wfptr, indent=4) diff --git a/f4pga/common.py b/f4pga/common.py index 6cfd2dbc7..4510026b7 100644 --- a/f4pga/common.py +++ b/f4pga/common.py @@ -1,9 +1,11 @@ +from pathlib import Path +from os import environ, listdir as os_listdir +from sys import argv as sys_argv from argparse import Namespace -import subprocess -import os -import shutil -import sys -import re +from shutil import move as sh_mv +from subprocess import run +from re import match as re_match, finditer as re_finditer + def decompose_depname(name: str): spec = 'req' @@ -16,6 +18,7 @@ def decompose_depname(name: str): name = name[:len(name) - 1] return name, spec + def with_qualifier(name: str, q: str) -> str: if q == 'req': return decompose_depname(name)[0] @@ -24,25 +27,33 @@ def with_qualifier(name: str, q: str) -> str: if q == 'demand': return decompose_depname(name)[0] + '!' + _sfbuild_module_collection_name_to_path = {} + + def scan_modules(mypath: str): global _sfbuild_module_collection_name_to_path sfbuild_home = mypath - sfbuild_home_dirs = os.listdir(sfbuild_home) + sfbuild_home_dirs = os_listdir(sfbuild_home) sfbuild_module_dirs = \ - [dir for dir in sfbuild_home_dirs if re.match('.*_modules$', dir)] - _sfbuild_module_collection_name_to_path = \ - dict([(re.match('(.*)_modules$', moddir).groups()[0], - os.path.join(sfbuild_home, moddir)) - for moddir in sfbuild_module_dirs]) + [dir for dir in sfbuild_home_dirs if re_match('.*_modules$', dir)] + _sfbuild_module_collection_name_to_path = dict([ + ( + re_match('(.*)_modules$', moddir).groups()[0], + str(Path(sfbuild_home) / moddir) + ) + for moddir in sfbuild_module_dirs + ]) + -"""Resolves module location from modulestr""" def resolve_modstr(modstr: str): + """ + Resolves module location from modulestr. + """ sl = modstr.split(':') if len(sl) > 2: - raise Exception('Incorrect module sysntax. ' - 'Expected one \':\' or one \'::\'') + raise Exception('Incorrect module sysntax. Expected one \':\' or one \'::\'') if len(sl) < 2: return modstr collection_name = sl[0] @@ -51,14 +62,13 @@ def resolve_modstr(modstr: str): col_path = _sfbuild_module_collection_name_to_path.get(collection_name) if not col_path: fatal(-1, f'Module collection {collection_name} does not exist') - return os.path.join(col_path, module_filename) + return str(Path(col_path) / module_filename) + def deep(fun): """ - Create a recursive string transform function for 'str | list | dict', - i.e a dependency + Create a recursive string transform function for 'str | list | dict', i.e a dependency. """ - def d(paths, *args, **kwargs): if type(paths) is str: return fun(paths) @@ -66,18 +76,13 @@ def d(paths, *args, **kwargs): return [d(p) for p in paths]; elif type(paths) is dict: return dict([(k, d(p)) for k, p in paths.items()]) - return d -def file_noext(path: str): - """ Return a file without it's extenstion""" - m = re.match('(.*)\\.[^.]*$', path) - if m: - path = m.groups()[0] - return path class VprArgs: - """ Represents argument list for VPR (Versatile Place and Route) """ + """ + Represents argument list for VPR (Versatile Place and Route). + """ arch_dir: str arch_def: str @@ -91,13 +96,13 @@ class VprArgs: def __init__(self, share: str, eblif, values: Namespace, sdc_file: 'str | None' = None, vpr_extra_opts: 'list | None' = None): - self.arch_dir = os.path.join(share, 'arch') + self.arch_dir = str(Path(share) / 'arch') self.arch_def = values.arch_def self.lookahead = values.rr_graph_lookahead_bin self.rr_graph = values.rr_graph_real_bin self.place_delay = values.vpr_place_delay self.device_name = values.vpr_grid_layout_name - self.eblif = os.path.realpath(eblif) + self.eblif = str(Path(eblif).resolve()) if values.vpr_options is not None: self.optional = options_dict_to_list(values.vpr_options) else: @@ -107,13 +112,17 @@ def __init__(self, share: str, eblif, values: Namespace, if sdc_file is not None: self.optional += ['--sdc_file', sdc_file] + class SubprocessException(Exception): return_code: int + def sub(*args, env=None, cwd=None): - """ Execute subroutine """ + """ + Execute subroutine. + """ - out = subprocess.run(args, capture_output=True, env=env, cwd=cwd) + out = run(args, capture_output=True, env=env, cwd=cwd) if out.returncode != 0: print(f'[ERROR]: {args[0]} non-zero return code.\n' f'stderr:\n{out.stderr.decode()}\n\n' @@ -121,8 +130,11 @@ def sub(*args, env=None, cwd=None): exit(out.returncode) return out.stdout + def vpr(mode: str, vprargs: VprArgs, cwd=None): - """ Execute `vpr` """ + """ + Execute `vpr`. + """ modeargs = [] if mode == 'pack': @@ -132,15 +144,17 @@ def vpr(mode: str, vprargs: VprArgs, cwd=None): elif mode == 'route': modeargs = ['--route'] - return sub(*(['vpr', - vprargs.arch_def, - vprargs.eblif, - '--device', vprargs.device_name, - '--read_rr_graph', vprargs.rr_graph, - '--read_router_lookahead', vprargs.lookahead, - '--read_placement_delay_lookup', vprargs.place_delay] + - modeargs + vprargs.optional), - cwd=cwd) + return sub(*([ + 'vpr', + vprargs.arch_def, + vprargs.eblif, + '--device', vprargs.device_name, + '--read_rr_graph', vprargs.rr_graph, + '--read_router_lookahead', vprargs.lookahead, + '--read_placement_delay_lookup', vprargs.place_delay + ] + modeargs + vprargs.optional), cwd=cwd) + + _vpr_specific_values = [ 'arch_def', @@ -150,10 +164,13 @@ def vpr(mode: str, vprargs: VprArgs, cwd=None): 'vpr_grid_layout_name', 'vpr_options?' ] + + def vpr_specific_values(): global _vpr_specific_values return _vpr_specific_values + def options_dict_to_list(opt_dict: dict): """ Converts a dictionary of named options for CLI program to a list. @@ -167,36 +184,44 @@ def options_dict_to_list(opt_dict: dict): opts.append(str(val)) return opts + def noisy_warnings(device): - """ Emit some noisy warnings """ - os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings-' + device + '_pack.log' + """ + Emit some noisy warnings. + """ + environ['OUR_NOISY_WARNINGS'] = f'noisy_warnings-{device}_pack.log' + def my_path(): - """ Get current PWD """ - mypath = os.path.realpath(sys.argv[0]) - return os.path.dirname(mypath) + """ + Get current PWD. + """ + return str(Path(sys_argv[0]).resolve().parent) + def save_vpr_log(filename, build_dir=''): - """ Save VPR logic (moves the default output file into a desired path) """ - shutil.move(os.path.join(build_dir, 'vpr_stdout.log'), filename) + """ + Save VPR logic (moves the default output file into a desired path). + """ + sh_mv(str(Path(build_dir) / 'vpr_stdout.log'), filename) + def fatal(code, message): """ - Print a message informing about an error that has occured and terminate program - with a given return code. + Print a message informing about an error that has occured and terminate program with a given return code. """ - raise(Exception(f'[FATAL ERROR]: {message}')) exit(code) + class ResolutionEnv: """ - ResolutionEnv is used to hold onto mappings for variables used in flow and - perform text substitutions using those variables. - Variables can be referred in any "resolvable" string using the following - syntax: 'Some static text ${variable_name}'. The '${variable_name}' part - will be replaced by the value associated with name 'variable_name', is such - mapping exists. + ResolutionEnv is used to hold onto mappings for variables used in flow and perform text substitutions using those + variables. + Variables can be referred in any "resolvable" string using the following syntax: 'Some static text ${variable_name}'. + The '${variable_name}' part will be replaced by the value associated with name 'variable_name', is such mapping + exists. + values: dict """ @@ -209,15 +234,14 @@ def __copy__(self): def resolve(self, s, final=False): """ Perform resolution on `s`. - `s` can be a `str`, a `dict` with arbitrary keys and resolvable values, - or a `list` of resolvable values. + `s` can be a `str`, a `dict` with arbitrary keys and resolvable values, or a `list` of resolvable values. final=True - resolve any unknown variables into '' This is a hack and probably should be removed in the future """ if type(s) is str: - match_list = list(re.finditer('\$\{([^${}]*)\}', s)) - # Assumption: re.finditer finds matches in a left-to-right order + match_list = list(re_finditer('\$\{([^${}]*)\}', s)) + # Assumption: re_finditer finds matches in a left-to-right order match_list.reverse() for match in match_list: match_str = match.group(1) @@ -242,24 +266,30 @@ def resolve(self, s, final=False): return s def add_values(self, values: dict): - """ Add mappings from `values`""" - + """ + Add mappings from `values`. + """ for k, v in values.items(): self.values[k] = self.resolve(v) + verbosity_level = 0 -def sfprint(verbosity: int, *args): - """ Print with regards to currently set verbosity level """ +def sfprint(verbosity: int, *args): + """ + Print with regards to currently set verbosity level. + """ global verbosity_level if verbosity <= verbosity_level: print(*args) + def set_verbosity_level(level: int): global verbosity_level verbosity_level = level + def get_verbosity_level() -> int: global verbosity_level return verbosity_level diff --git a/f4pga/common_modules/__init__.py b/f4pga/common_modules/__init__.py index 041b0435b..e69de29bb 100644 --- a/f4pga/common_modules/__init__.py +++ b/f4pga/common_modules/__init__.py @@ -1 +0,0 @@ -# This is only to make pydoc recognize this catalogue as a package diff --git a/f4pga/common_modules/fasm.py b/f4pga/common_modules/fasm.py index cfee9b421..35285bc8d 100644 --- a/f4pga/common_modules/fasm.py +++ b/f4pga/common_modules/fasm.py @@ -1,39 +1,20 @@ -#!/usr/bin/python3 +from pathlib import Path +from shutil import move as sh_mv -# Symbiflow Stage Module +from f4pga.common import vpr_specific_values, VprArgs, get_verbosity_level, sub +from f4pga.module import Module, ModuleContext -# ----------------------------------------------------------------------------- # - -import os -from f4pga.common import * -from f4pga.module import * - -# ----------------------------------------------------------------------------- # - -def concat_fasm(fasm: str, fasm_extra: str, output: str): - fasm_data = None - fasm_extra_data = None - with open(fasm, 'r') as fasm_file, open(fasm_extra, 'r') as fasm_extra_file: - fasm_data = fasm_file.read() - fasm_extra_data = fasm_extra_file.read() - data = fasm_data + '\n' + fasm_extra_data - - with open(output, 'w') as output_file: - output_file.write(data) - -def fasm_output_path(build_dir: str, top: str): - return f'{build_dir}/{top}.fasm' class FasmModule(Module): def map_io(self, ctx: ModuleContext): - build_dir = os.path.dirname(ctx.takes.eblif) + build_dir = str(Path(ctx.takes.eblif).parent) return { - 'fasm': fasm_output_path(build_dir, ctx.values.top) + 'fasm': f'{(Path(build_dir)/ctx.values.top)!s}.fasm' } def execute(self, ctx: ModuleContext): - build_dir = os.path.dirname(ctx.takes.eblif) + build_dir = str(Path(ctx.takes.eblif).parent) vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values) @@ -43,10 +24,14 @@ def execute(self, ctx: ModuleContext): if ctx.takes.sdc: optional += ['--sdc', ctx.takes.sdc] - s = ['genfasm', vprargs.arch_def, - os.path.realpath(ctx.takes.eblif), - '--device', vprargs.device_name, - '--read_rr_graph', vprargs.rr_graph + s = [ + 'genfasm', + vprargs.arch_def, + str(Path(ctx.takes.eblif).resolve()), + '--device', + vprargs.device_name, + '--read_rr_graph', + vprargs.rr_graph ] + vprargs.optional if get_verbosity_level() >= 2: @@ -56,13 +41,17 @@ def execute(self, ctx: ModuleContext): sub(*s, cwd=build_dir) - default_fasm_output_name = fasm_output_path(build_dir, ctx.values.top) + default_fasm_output_name = f'{(Path(build_dir)/ctx.values.top)!s}.fasm' if default_fasm_output_name != ctx.outputs.fasm: - shutil.move(default_fasm_output_name, ctx.outputs.fasm) + sh_mv(default_fasm_output_name, ctx.outputs.fasm) if ctx.takes.fasm_extra: yield 'Appending extra FASM...' - concat_fasm(ctx.outputs.fasm, ctx.takes.fasm_extra, ctx.outputs.fasm) + with \ + open(ctx.outputs.fasm, 'r') as fasm_file, \ + open(ctx.takes.fasm_extra, 'r') as fasm_extra_file, \ + open(ctx.outputs.fasm, 'w') as wfptr: + wfptr.write(f"{fasm_file.read()}\n{fasm_extra_file.read()}") else: yield 'No extra FASM to append' diff --git a/f4pga/common_modules/generic_script_wrapper.py b/f4pga/common_modules/generic_script_wrapper.py index d93c6ac36..6ad77c3bd 100644 --- a/f4pga/common_modules/generic_script_wrapper.py +++ b/f4pga/common_modules/generic_script_wrapper.py @@ -1,7 +1,3 @@ -#!/usr/bin/python3 - -# Symbiflow Stage Module - """ This module is intended for wrapping simple scripts without rewriting them as an sfbuild module. This is mostly to maintain compatibility with workflows @@ -12,53 +8,42 @@ * `script` (string, mandatory): Path to the script to be executed * `interpreter` (string, optional): Interpreter for the script * `cwd` (string, optional): Current Working Directory for the script -* `outputs` (dict[string -> dict[string -> string]], - mandatory): - A dict with output descriptions (dicts). Keys name output dependencies. - * `mode` (string, mandatory): "file" or "stdout". Describes how the output is - grabbed from the script. - * `file` (string, required if `mode` is "file"): Name of the file generated by the - script. - * `target` (string, required): Default name of the file of the generated - dependency. You can use all values available durng map_io stage. Each input - dependency alsogets two extra values associated with it: - `:dependency_name[noext]`, which contains the path to the dependency the - extension with anything after last "." removed and `:dependency_name[dir]` which - contains directory paths of the dependency. This is useful for deriving an output - name from the input. +* `outputs` (dict[string -> dict[string -> string]], mandatory): + A dict with output descriptions (dicts). + Keys name output dependencies. + * `mode` (string, mandatory): "file" or "stdout". + Describes how the output is grabbed from the script. + * `file` (string, required if `mode` is "file"): Name of the file generated by the script. + * `target` (string, required): Default name of the file of the generated dependency. + You can use all values available durng map_io stage. + Each input dependency alsogets two extra values associated with it: + `:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "." + removed and `:dependency_name[dir]` which contains directory paths of the dependency. + This is useful for deriving an output name from the input. * `meta` (string, optional): Description of the output dependency. * `inputs` (dict[string -> string | bool], mandatory): - A dict with input descriptions. Key is either a name of a named argument or a - position of unnamed argument prefaced with "#" (eg. "#1"). Positions are indexed - from 1, as it's a convention that 0th argument is the path of the executed program. - Values are strings that can contains references to variables to be resolved - after the project flow configuration is loaded (that means they can reference - values and dependencies which are to be set by the user). All of modules inputs - will be determined by the references used. Thus dependency and value definitions - are implicit. If the value of the resolved string is empty and is associated with a - named argument, the argument in question will be skipped entirely. This allows - using optional dependencies. To use a named argument as a flag instead, set it to - `true`. + A dict with input descriptions. + Key is either a name of a named argument or a position of unnamed argument prefaced with "#" (eg. "#1"). + Positions are indexed from 1, as it's a convention that 0th argument is the path of the executed program. + Values are strings that can contains references to variables to be resolved after the project flow configuration is + loaded (that means they can reference values and dependencies which are to be set by the user). + All of modules inputs will be determined by the references used. + Thus dependency and value definitions are implicit. + If the value of the resolved string is empty and is associated with a named argument, the argument in question will be + skipped entirely. + This allows using optional dependencies. + To use a named argument as a flag instead, set it to `true`. """ # TODO: `environment` input kind -# ----------------------------------------------------------------------------- # - -import os -import shutil -import re +from pathlib import Path +from shutil import move as sh_mv +from re import match as re_match, finditer as re_finditer -from f4pga.common import * -from f4pga.module import * +from f4pga.common import decompose_depname, deep, get_verbosity_level, sub +from f4pga.module import Module, ModuleContext -# ----------------------------------------------------------------------------- # - -def _generate_stage_name(params): - stage_name = params.get('stage_name') - if stage_name is None: - stage_name = '' - return f'{stage_name}-generic' def _get_param(params, name: str): param = params.get(name) @@ -67,6 +52,7 @@ def _get_param(params, name: str): f'missing `{name}` field') return param + def _parse_param_def(param_def: str): if param_def[0] == '#': return 'positional', int(param_def[1:]) @@ -74,8 +60,6 @@ def _parse_param_def(param_def: str): return 'environmental', param_def[1:] return 'named', param_def -_file_noext_deep = deep(file_noext) -_realdirpath_deep = deep(lambda p: os.path.realpath(os.path.dirname(p))) class InputReferences: dependencies: 'set[str]' @@ -89,48 +73,34 @@ def __init__(self): self.dependencies = set() self.values = set() + def _get_input_references(input: str) -> InputReferences: refs = InputReferences() - if type(input) is not str: return refs - - matches = re.finditer('\$\{([^${}]*)\}', input) - for match in matches: + for match in re_finditer('\$\{([^${}]*)\}', input): match_str = match.group(1) - if match_str[0] == ':': - if len(match_str) < 2: - raise Exception('Dependency name must be at least 1 character ' - 'long') - dep_name = re.match('([^\\[\\]]*)', match_str[1:]).group(1) - refs.dependencies.add(dep_name) - else: + if match_str[0] != ':': refs.values.add(match_str) - + continue + if len(match_str) < 2: + raise Exception('Dependency name must be at least 1 character long') + refs.dependencies.add(re_match('([^\\[\\]]*)', match_str[1:]).group(1)) return refs +def _make_noop1(): + def noop(_): + return + return noop + + def _tailcall1(self, fun): def newself(arg, self=self, fun=fun): fun(arg) self(arg) return newself -def _add_extra_values_to_env(ctx: ModuleContext): - takes = dict(vars(ctx.takes).items()) - for take_name, take_path in takes.items(): - if take_path is None: - continue - attr_name = f':{take_name}[noext]' - ctx.r_env.values[attr_name] = _file_noext_deep(take_path) - attr_name = f':{take_name}[dir]' - dirname = _realdirpath_deep(take_path) - ctx.r_env.values[attr_name] = dirname - -def _make_noop1(): - def noop(_): - return - return noop class GenericScriptWrapperModule(Module): script_path: str @@ -139,8 +109,15 @@ class GenericScriptWrapperModule(Module): interpreter: 'None | str' cwd: 'None | str' + @staticmethod + def _add_extra_values_to_env(ctx: ModuleContext): + for take_name, take_path in vars(ctx.takes).items(): + if take_path is not None: + ctx.r_env.values[f':{take_name}[noext]'] = deep(lambda p: str(Path(p).with_suffix('')))(take_path) + ctx.r_env.values[f':{take_name}[dir]'] = deep(lambda p: str(Path(p).parent.resolve()))(take_path) + def map_io(self, ctx: ModuleContext): - _add_extra_values_to_env(ctx) + self._add_extra_values_to_env(ctx) outputs = {} for dep, _, out_path in self.file_outputs: @@ -155,7 +132,7 @@ def map_io(self, ctx: ModuleContext): return outputs def execute(self, ctx: ModuleContext): - _add_extra_values_to_env(ctx) + self._add_extra_values_to_env(ctx) cwd = ctx.r_env.resolve(self.cwd) @@ -187,7 +164,7 @@ def execute(self, ctx: ModuleContext): file = ctx.r_env.resolve(file, final=True) target = ctx.r_env.resolve(target, final=True) if target != file: - shutil.move(file, target) + sh_mv(file, target) def _init_outputs(self, output_defs: 'dict[str, dict[str, str]]'): self.stdout_target = None @@ -294,7 +271,8 @@ def get_all_env(ctx: ModuleContext): self.values.append(val) def __init__(self, params): - self.name = _generate_stage_name(params) + stage_name = params.get('stage_name') + self.name = f"{'' if stage_name is None else stage_name}-generic" self.no_of_phases = 2 self.script_path = params.get('script') self.interpreter = params.get('interpreter') @@ -307,4 +285,4 @@ def __init__(self, params): self._init_outputs(_get_param(params, 'outputs')) self._init_inputs(_get_param(params, 'inputs')) -ModuleClass = GenericScriptWrapperModule \ No newline at end of file +ModuleClass = GenericScriptWrapperModule diff --git a/f4pga/common_modules/io_rename.py b/f4pga/common_modules/io_rename.py index da1f196ec..a8b6b86d0 100644 --- a/f4pga/common_modules/io_rename.py +++ b/f4pga/common_modules/io_rename.py @@ -1,7 +1,3 @@ -#!/usr/bin/python3 - -# Symbiflow Stage Module - """ Rename (ie. change) dependencies and values of a module. This module wraps another, module whoose name is specified in `params.module` and changes the names of the @@ -25,13 +21,10 @@ """ -# ----------------------------------------------------------------------------- # - from f4pga.common import * -from f4pga.module import * +from f4pga.module import Module, ModuleContext from f4pga.module_runner import get_module -# ----------------------------------------------------------------------------- # def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]': newd = {} @@ -43,6 +36,7 @@ def _switch_keys(d: 'dict[str, ]', renames: 'dict[str, str]') -> 'dict[str, ]': newd[k] = v return newd + def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespace: newn = SimpleNamespace() for k, v in vars(d).items(): @@ -54,6 +48,7 @@ def _switchback_attrs(d: Namespace, renames: 'dict[str, str]') -> SimpleNamespac setattr(newn, k, v) return newn + def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]': newl = [] for e in l: @@ -65,12 +60,11 @@ def _switch_entries(l: 'list[str]', renames: 'dict[str, str]') -> 'list[str]': newl.append(r if r is not None else e) return newl -def _generate_stage_name(name: str): - return f'{name}-io_renamed' def _or_empty_dict(d: 'dict | None'): return d if d is not None else {} + class IORenameModule(Module): module: Module rename_takes: 'dict[str, str]' @@ -102,7 +96,7 @@ def __init__(self, params): self.rename_values = _or_empty_dict(params.get("rename_values")) self.module = module - self.name = _generate_stage_name(module.name) + self.name = f'{module.name}-io_renamed' self.no_of_phases = module.no_of_phases self.takes = _switch_entries(module.takes, self.rename_takes) self.produces = _switch_entries(module.produces, self.rename_produces) diff --git a/f4pga/common_modules/mkdirs.py b/f4pga/common_modules/mkdirs.py index 8b3d10542..0b796ebc3 100644 --- a/f4pga/common_modules/mkdirs.py +++ b/f4pga/common_modules/mkdirs.py @@ -1,21 +1,14 @@ -#!/usr/bin/python3 +""" +This module is used as a helper in a abuild chain to automate creating build directiores. +It's currenty the only parametric module, meaning it can take user-provided input at an early stage in order to +determine its take/produces I/O. +This allows other repesenting configurable directories, such as a build directory as dependencies and by doing so, allow +the dependency algorithm to lazily create the directories if they become necessary. +""" -# Symbiflow Stage Module +from pathlib import Path +from f4pga.module import Module, ModuleContext -""" This module is used as a helper in a abuild chain to automate creating build -directiores. It' currenty the only parametric module, meaning it can take -user-provided input at an early stage in order todetermine its take/produces -I/O. This allows other repesenting configurable directories, such as a build -directory as dependencies and by doing so, allow the dependency algorithm to -lazily create the directories if they become necessary. """ - -# ----------------------------------------------------------------------------- # - -import os -from f4pga.common import * -from f4pga.module import * - -# ----------------------------------------------------------------------------- # class MkDirsModule(Module): deps_to_produce: 'dict[str, str]' @@ -27,7 +20,7 @@ def execute(self, ctx: ModuleContext): outputs = vars(ctx.outputs) for _, path in outputs.items(): yield f'Creating directory {path}...' - os.makedirs(path, exist_ok=True) + Path(path).mkdir(parents=True, exist_ok=True) def __init__(self, params): self.name = 'mkdirs' diff --git a/f4pga/common_modules/pack.py b/f4pga/common_modules/pack.py index 1aeec6919..703513ce8 100644 --- a/f4pga/common_modules/pack.py +++ b/f4pga/common_modules/pack.py @@ -1,54 +1,54 @@ -#!/usr/bin/python3 +from pathlib import Path +from os import remove as os_remove +from shutil import move as sh_mv -# Symbiflow Stage Module - -# ----------------------------------------------------------------------------- # - -import os -import re from f4pga.common import * -from f4pga.module import * +from f4pga.module import Module, ModuleContext -# ----------------------------------------------------------------------------- # DEFAULT_TIMING_RPT = 'pre_pack.report_timing.setup.rpt' DEFAULT_UTIL_RPT = 'packing_pin_util.rpt' + class PackModule(Module): def map_io(self, ctx: ModuleContext): - p = file_noext(ctx.takes.eblif) - build_dir = os.path.dirname(p) - + epath = Path(ctx.takes.eblif) + build_dir = epath.parent return { - 'net': p + '.net', - 'util_rpt': os.path.join(build_dir, DEFAULT_UTIL_RPT), - 'timing_rpt': os.path.join(build_dir, DEFAULT_TIMING_RPT) + 'net': str(epath.with_suffix('.net')), + 'util_rpt': str(build_dir / DEFAULT_UTIL_RPT), + 'timing_rpt': str(build_dir / DEFAULT_TIMING_RPT) } def execute(self, ctx: ModuleContext): - vpr_args = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, - sdc_file=ctx.takes.sdc) - build_dir = os.path.dirname(ctx.outputs.net) - noisy_warnings(ctx.values.device) + build_dir = Path(ctx.outputs.net).parent yield 'Packing with VPR...' - vpr('pack', vpr_args, cwd=build_dir) + vpr( + 'pack', + VprArgs( + ctx.share, + ctx.takes.eblif, + ctx.values, + sdc_file=ctx.takes.sdc + ), + cwd=str(build_dir) + ) - og_log = os.path.join(build_dir, 'vpr_stdout.log') + og_log = str(build_dir / 'vpr_stdout.log') yield 'Moving/deleting files...' if ctx.outputs.pack_log: - shutil.move(og_log, ctx.outputs.pack_log) + sh_mv(og_log, ctx.outputs.pack_log) else: - os.remove(og_log) + os_remove(og_log) if ctx.outputs.timing_rpt: - shutil.move(os.path.join(build_dir, DEFAULT_TIMING_RPT), - ctx.outputs.timing_rpt) + sh_mv(str(build_dir / DEFAULT_TIMING_RPT), ctx.outputs.timing_rpt) + if ctx.outputs.util_rpt: - shutil.move(os.path.join(build_dir, DEFAULT_UTIL_RPT), - ctx.outputs.util_rpt) + sh_mv(str(build_dir / DEFAULT_UTIL_RPT), ctx.outputs.util_rpt) def __init__(self, _): self.name = 'pack' diff --git a/f4pga/common_modules/place.py b/f4pga/common_modules/place.py index 1cefd100d..f7a078d94 100644 --- a/f4pga/common_modules/place.py +++ b/f4pga/common_modules/place.py @@ -1,34 +1,28 @@ -#!/usr/bin/python3 - -# Symbiflow Stage Module - -# ----------------------------------------------------------------------------- # - +from pathlib import Path import os +from shutil import move as sh_mv +from re import match as re_match from f4pga.common import * -from f4pga.module import * +from f4pga.module import Module, ModuleContext -# ----------------------------------------------------------------------------- # def default_output_name(place_constraints): p = place_constraints - m = re.match('(.*)\\.[^.]*$', place_constraints) + m = re_match('(.*)\\.[^.]*$', place_constraints) if m: - p = m.groups()[0] + '.place' - else: - p += '.place' - return p + return m.groups()[0] + '.place' + return f'{p}.place' + def place_constraints_file(ctx: ModuleContext): - dummy =- False p = ctx.takes.place_constraints - if not p: - p = ctx.takes.io_place - if not p: - dummy = True - p = file_noext(ctx.takes.eblif) + '.place' + if p: + return p, False + p = ctx.takes.io_place + if p: + return p, False + return f'{Path(ctx.takes.eblif).stem}.place', True - return p, dummy class PlaceModule(Module): def map_io(self, ctx: ModuleContext): @@ -45,7 +39,7 @@ def execute(self, ctx: ModuleContext): with open(place_constraints, 'wb') as f: f.write(b'') - build_dir = os.path.dirname(ctx.takes.eblif) + build_dir = str(Path(ctx.takes.eblif).parent) vpr_options = ['--fix_clusters', place_constraints] @@ -63,7 +57,7 @@ def execute(self, ctx: ModuleContext): # the ones in flow configuration. if ctx.is_output_explicit('place'): output_file = default_output_name(place_constraints) - shutil.move(output_file, ctx.outputs.place) + sh_mv(output_file, ctx.outputs.place) yield 'Saving log...' save_vpr_log('place.log', build_dir=build_dir) diff --git a/f4pga/common_modules/place_constraints.py b/f4pga/common_modules/place_constraints.py index 04495f2ea..2e38cc3be 100644 --- a/f4pga/common_modules/place_constraints.py +++ b/f4pga/common_modules/place_constraints.py @@ -1,24 +1,17 @@ -#!/usr/bin/python3 - -# Symbiflow Stage Module - -# ----------------------------------------------------------------------------- # - -import os +from pathlib import Path from f4pga.common import * -from f4pga.module import * +from f4pga.module import Module, ModuleContext -# ----------------------------------------------------------------------------- # class PlaceConstraintsModule(Module): def map_io(self, ctx: ModuleContext): return { - 'place_constraints': file_noext(ctx.takes.net) + '.preplace' + 'place_constraints': f'{Path(ctx.takes.net).stem!s}.preplace' } def execute(self, ctx: ModuleContext): - arch_dir = os.path.join(ctx.share, 'arch') - arch_def = os.path.join(arch_dir, ctx.values.device, 'arch.timing.xml') + arch_dir = str(Path(ctx.share) / 'arch') + arch_def = str(Path(arch_dir) / ctx.values.device / 'arch.timing.xml') database = sub('prjxray-config').decode().replace('\n', '') diff --git a/f4pga/common_modules/route.py b/f4pga/common_modules/route.py index 7ffb8ebbf..6fafa57b0 100644 --- a/f4pga/common_modules/route.py +++ b/f4pga/common_modules/route.py @@ -1,41 +1,41 @@ -#!/usr/bin/python3 +from pathlib import Path +from shutil import move as sh_mv -# Symbiflow Stage Module - -# ----------------------------------------------------------------------------- # - -import os -import shutil from f4pga.common import * -from f4pga.module import * +from f4pga.module import Module, ModuleContext + -# ----------------------------------------------------------------------------- # +def route_place_file(ctx: ModuleContext): + return str(Path(ctx.takes.eblif).with_suffix('.route')) -def route_place_file(eblif: str): - return file_noext(eblif) + '.route' class RouteModule(Module): def map_io(self, ctx: ModuleContext): return { - 'route': route_place_file(ctx.takes.eblif) + 'route': route_place_file(ctx) } def execute(self, ctx: ModuleContext): - build_dir = os.path.dirname(ctx.takes.eblif) + build_dir = str(Path(ctx.takes.eblif).parent) vpr_options = [] if ctx.values.vpr_options: vpr_options = options_dict_to_list(ctx.values.vpr_options) - - vprargs = VprArgs(ctx.share, ctx.takes.eblif, ctx.values, - sdc_file=ctx.takes.sdc) - yield 'Routing with VPR...' - vpr('route', vprargs, cwd=build_dir) + vpr( + 'route', + VprArgs( + ctx.share, + ctx.takes.eblif, + ctx.values, + sdc_file=ctx.takes.sdc + ), + cwd=build_dir + ) if ctx.is_output_explicit('route'): - shutil.move(route_place_file(ctx.takes.eblif), ctx.outputs.route) + sh_mv(route_place_file(ctx), ctx.outputs.route) yield 'Saving log...' save_vpr_log('route.log', build_dir=build_dir) diff --git a/f4pga/common_modules/synth.py b/f4pga/common_modules/synth.py index 48cac762a..a40d4ab57 100755 --- a/f4pga/common_modules/synth.py +++ b/f4pga/common_modules/synth.py @@ -1,17 +1,12 @@ -#!/usr/bin/python3 - -# Symbiflow Stage Module - -# ----------------------------------------------------------------------------- # - import os from f4pga.common import * -from f4pga.module import * +from f4pga.module import Module, ModuleContext -# ----------------------------------------------------------------------------- # -# Setup environmental variables for YOSYS TCL scripts def yosys_setup_tcl_env(tcl_env_def): + """ + Setup environmental variables for YOSYS TCL scripts. + """ env = {} for key, value in tcl_env_def.items(): if value is None: @@ -22,6 +17,7 @@ def yosys_setup_tcl_env(tcl_env_def): env[key] = v return env + def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None): # Set up environment for TCL weirdness optional = [] @@ -41,19 +37,15 @@ def yosys_synth(tcl, tcl_env, verilog_files=[], read_verilog_args=None, log=None verilog_files = [] # Execute YOSYS command - return sub(*(['yosys', '-p', tcl] + optional + verilog_files), - env=env) + return sub(*(['yosys', '-p', tcl] + optional + verilog_files), env=env) + def yosys_conv(tcl, tcl_env, synth_json): # Set up environment for TCL weirdness env = os.environ.copy() env.update(tcl_env) + return sub('yosys', '-p', f'read_json {synth_json}; tcl {tcl}', env=env) - # Execute YOSYS command - return sub('yosys', '-p', 'read_json ' + synth_json + '; tcl ' + tcl, - env=env) - -# ----------------------------------------------------------------------------- # class SynthModule(Module): extra_products: 'list[str]' diff --git a/f4pga/flow_config.py b/f4pga/flow_config.py index 43329f05b..49fb8dcd9 100644 --- a/f4pga/flow_config.py +++ b/f4pga/flow_config.py @@ -1,22 +1,21 @@ -import os -import json +from pathlib import Path +from copy import copy +from os import listdir as os_listdir +from json import dump as json_dump, load as json_load -from f4pga.common import file_noext, ResolutionEnv, deep +from f4pga.common import ResolutionEnv, deep from f4pga.stage import Stage -from copy import copy -_realpath_deep = deep(os.path.realpath) def open_flow_cfg(path: str) -> dict: - flow_cfg_json: str - with open(path, 'r') as flow_cfg_file: - flow_cfg_json = flow_cfg_file.read() - return json.loads(flow_cfg_json) + with Path(path).open('r') as rfptr: + return json_load(rfptr) + def save_flow_cfg(flow: dict, path: str): - flow_cfg_json = json.dumps(flow, indent=4) - with open(path, 'w') as flow_cfg_file: - flow_cfg_file.write(flow_cfg_json) + with Path(path).open('w') as wfptr: + json_dump(flow, wfptr, indent=4) + def _get_lazy_dict(parent: dict, name: str): d = parent.get(name) @@ -25,69 +24,96 @@ def _get_lazy_dict(parent: dict, name: str): parent[name] = d return d -def _get_ov_dict(dname: str, flow: dict, - platform: 'str | None' = None, stage: 'str | None' = None): - d: dict - if platform: - platform_dict: dict = flow[platform] - if stage: - stage_dict: dict = _get_lazy_dict(platform_dict, stage) - d = _get_lazy_dict(stage_dict, dname) - else: - d = _get_lazy_dict(platform_dict, dname) - else: - d = _get_lazy_dict(flow, dname) - - return d -def _get_dep_dict(flow: dict, - platform: 'str | None' = None, stage: 'str | None' = None): +def _get_ov_dict( + dname: str, + flow: dict, + platform: 'str | None' = None, + stage: 'str | None' = None +): + if not platform: + return _get_lazy_dict(flow, dname) + platform_dict: dict = flow[platform] + if stage: + stage_dict: dict = _get_lazy_dict(platform_dict, stage) + return _get_lazy_dict(stage_dict, dname) + return _get_lazy_dict(platform_dict, dname) + + +def _get_dep_dict( + flow: dict, + platform: 'str | None' = None, + stage: 'str | None' = None +): return _get_ov_dict('dependencies', flow, platform, stage) -def _get_vals_dict(flow: dict, - platform: 'str | None' = None, stage: 'str | None' = None): + +def _get_vals_dict( + flow: dict, + platform: 'str | None' = None, + stage: 'str | None' = None +): return _get_ov_dict('values', flow, platform, stage) -def _add_ov(ov_dict_getter, failstr_constr, flow_cfg: dict, name: str, - values: list, platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: - d = ov_dict_getter(flow_cfg, platform, stage) - - deps = d.get(name) - if type(deps) is list: - deps += values - elif deps is None: - d[name] = values - else: - print(failstr_constr(name)) - return False +def _add_ov( + ov_dict_getter, + failstr_constr, + flow_cfg: dict, + name: str, + values: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: + d = ov_dict_getter(flow_cfg, platform, stage) + deps = d.get(name) + if type(deps) is list: + deps += values + return True + + if deps is None: + d[name] = values return True -def _rm_ov_by_values(ov_dict_getter, notset_str_constr, notlist_str_constr, - flow: dict, name: str, vals: list, - platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: - values_to_remove = set(vals) + print(failstr_constr(name)) + return False + + +def _rm_ov_by_values( + ov_dict_getter, + notset_str_constr, + notlist_str_constr, + flow: dict, + name: str, + vals: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: d = ov_dict_getter(flow, platform, stage) vallist: list = d.get(name) if type(vallist) is list: - d[name] = [val for val in vallist if val not in values_to_remove] - elif type(vallist) is None: + d[name] = [val for val in vallist if val not in set(vals)] + return True + + if type(vallist) is None: print(notset_str_constr(name)) return False - else: - print(notlist_str_constr(name)) - return False - return True + print(notlist_str_constr(name)) + return False -def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr, - flow: dict, name: str, idcs: list, - platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: +def _rm_ov_by_idx( + ov_dict_getter, + notset_str_constr, + notlist_str_constr, + flow: dict, + name: str, + idcs: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: idcs.sort(reverse=True) if len(idcs) == 0: @@ -103,17 +129,22 @@ def _rm_ov_by_idx(ov_dict_getter, notset_str_constr, notlist_str_constr, for idx in idcs: vallist.pop(idx) - elif vallist is None: + return True + + if vallist is None: print(notset_str_constr(name)) return False - else: - print(notlist_str_constr(name)) - return False - return True + print(notlist_str_constr(name)) + return False + -def _get_ovs_raw(dict_name: str, flow_cfg, - platform: 'str | None', stage: 'str | None'): +def _get_ovs_raw( + dict_name: str, + flow_cfg, + platform: 'str | None', + stage: 'str | None' +): vals = flow_cfg.get(dict_name) if vals is None: vals = {} @@ -128,48 +159,105 @@ def _get_ovs_raw(dict_name: str, flow_cfg, return vals -def _remove_dependencies_by_values(flow: dict, name: str, deps: list, - platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: + +def _remove_dependencies_by_values( + flow: dict, + name: str, + deps: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: def notset_str_constr(dname): return f'Dependency `{dname}` is not set. Nothing to remove.' def notlist_str_constr(dname): return f'Dependency `{dname}` is not a list! Use unsetd instead.' - return _rm_ov_by_values(_get_dep_dict, notset_str_constr, notlist_str_constr, - flow, name, deps, platform, stage) - -def _remove_dependencies_by_idx(flow: dict, name: str, idcs: list, - platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: + return _rm_ov_by_values( + _get_dep_dict, + notset_str_constr, + notlist_str_constr, + flow, + name, + deps, + platform, + stage + ) + + +def _remove_dependencies_by_idx( + flow: dict, + name: str, + idcs: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: def notset_str_constr(dname): return f'Dependency `{dname}` is not set. Nothing to remove.' def notlist_str_constr(dname): return f'Dependency `{dname}` is not a list! Use unsetd instead.' - return _rm_ov_by_idx(_get_dep_dict, notset_str_constr, notlist_str_constr, - flow, name, idcs, platform, stage) - -def _remove_values_by_values(flow: dict, name: str, deps: list, - platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: + return _rm_ov_by_idx( + _get_dep_dict, + notset_str_constr, + notlist_str_constr, + flow, + name, + idcs, + platform, + stage + ) + + +def _remove_values_by_values( + flow: dict, + name: str, + deps: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: def notset_str_constr(vname): return f'Value `{vname}` is not set. Nothing to remove.' def notlist_str_constr(vname): return f'Value `{vname}` is not a list! Use unsetv instead.' - return _rm_ov_by_values(_get_vals_dict, notset_str_constr, notlist_str_constr, - flow, name, deps, platform, stage) - -def _remove_values_by_idx(flow: dict, name: str, idcs: list, - platform: 'str | None' = None, - stage: 'str | None' = None) -> bool: + return _rm_ov_by_values( + _get_vals_dict, + notset_str_constr, + notlist_str_constr, + flow, + name, + deps, + platform, + stage + ) + + +def _remove_values_by_idx( + flow: dict, + name: str, + idcs: list, + platform: 'str | None' = None, + stage: 'str | None' = None +) -> bool: def notset_str_constr(dname): return f'Dependency `{dname}` is not set. Nothing to remove.' def notlist_str_constr(dname): return f'Dependency `{dname}` is not a list! Use unsetv instead.' - return _rm_ov_by_idx(_get_vals_dict, notset_str_constr, notlist_str_constr, - flow, name, idcs, platform, stage) - -def unset_dependency(flow: dict, name: str, - platform: 'str | None', stage: 'str | None'): + return _rm_ov_by_idx( + _get_vals_dict, + notset_str_constr, + notlist_str_constr, + flow, + name, + idcs, + platform, + stage + ) + + +def unset_dependency( + flow: dict, + name: str, + platform: 'str | None', + stage: 'str | None' +): d = _get_dep_dict(flow, platform, stage) if d.get(name) is None: print(f'Dependency `{name}` is not set!') @@ -177,22 +265,26 @@ def unset_dependency(flow: dict, name: str, d.pop(name) return True + def verify_platform_name(platform: str, mypath: str): - for plat_def_filename in os.listdir(os.path.join(mypath, 'platforms')): - platform_name = file_noext(plat_def_filename) + for plat_def_filename in os_listdir(str(Path(mypath) / 'platforms')): + platform_name = str(Path(plat_def_filename).stem) if platform == platform_name: return True return False + def verify_stage(platform: str, stage: str, mypath: str): # TODO: Verify stage return True + def _is_kword(w: str): return \ (w == 'dependencies') | (w == 'values') | \ (w == 'default_platform') | (w == 'default_target') + class FlowDefinition: # stage name -> module path mapping stages: 'dict[str, Stage]' @@ -225,6 +317,7 @@ def get_stage_r_env(self, stage_name: 'str') -> ResolutionEnv: r_env.add_values(stage.value_overrides) return r_env + class ProjectFlowConfig: flow_cfg: dict # r_env: ResolutionEnv @@ -275,20 +368,26 @@ def get_stage_r_env(self, platform: str, stage: str) -> ResolutionEnv: return r_env - """ Get dependencies without value resolution applied """ def get_dependencies_raw(self, platform: 'str | None' = None): + """ + Get dependencies without value resolution applied. + """ return _get_ovs_raw('dependencies', self.flow_cfg, platform, None) - """ Get values without value resolution applied """ - def get_values_raw(self, platform: 'str | None' = None, - stage: 'str | None' = None): + def get_values_raw( + self, + platform: 'str | None' = None, + stage: 'str | None' = None + ): + """ + Get values without value resolution applied. + """ return _get_ovs_raw('values', self.flow_cfg, platform, stage) def get_stage_value_overrides(self, platform: str, stage: str): stage_cfg = self.flow_cfg[platform].get(stage) if stage_cfg is None: return {} - stage_vals_ovds = stage_cfg.get('values') if stage_vals_ovds is None: return {} @@ -317,8 +416,7 @@ def __init__(self, project_config: ProjectFlowConfig, raw_project_deps = project_config.get_dependencies_raw(platform) - self.dependencies_explicit = \ - _realpath_deep(self.r_env.resolve(raw_project_deps)) + self.dependencies_explicit = deep(lambda p: str(Path(p).resolve()))(self.r_env.resolve(raw_project_deps)) for stage_name, stage in platform_def.stages.items(): project_val_ovds = \ @@ -349,12 +447,9 @@ def __init__(self, path: str, message: str): def __str__(self) -> str: return f'Error in config `{self.path}: {self.message}' + def open_project_flow_cfg(path: str) -> ProjectFlowConfig: cfg = ProjectFlowConfig(path) - - flow_cfg_json: str - with open(path, 'r') as flow_cfg_file: - flow_cfg_json = flow_cfg_file.read() - cfg.flow_cfg = json.loads(flow_cfg_json) - - return cfg \ No newline at end of file + with Path(path).open('r') as rfptr: + cfg.flow_cfg = json_load(rfptr) + return cfg diff --git a/f4pga/module.py b/f4pga/module.py index e4a25691c..58d8a4cfa 100644 --- a/f4pga/module.py +++ b/f4pga/module.py @@ -1,17 +1,22 @@ -# Here are the things necessary to write a symbiflow Module +""" +Here are the things necessary to write an F4PGA Module. +""" -import abc from types import SimpleNamespace -from f4pga.common import * -from colorama import Fore, Style +from abc import abstractmethod + +from f4pga.common import ( + decompose_depname, + ResolutionEnv +) + class Module: """ A `Module` is a wrapper for whatever tool is used in a flow. - Modules can request dependencies, values and are guranteed to have all the - required ones present when entering `exec` mode. - They also have to specify what dependencies they produce and create the files - for these dependencies. + Modules can request dependencies, values and are guranteed to have all the required ones present when entering + `exec` mode. + They also have to specify what dependencies they produce and create the files for these dependencies. """ no_of_phases: int @@ -21,16 +26,16 @@ class Module: values: 'list[str]' prod_meta: 'dict[str, str]' - @abc.abstractmethod + @abstractmethod def execute(self, ctx): """ - Executes module. Use yield to print a message informing about current - execution phase. + Executes module. + Use yield to print a message informing about current execution phase. `ctx` is `ModuleContext`. """ pass - @abc.abstractmethod + @abstractmethod def map_io(self, ctx) -> 'dict[str, ]': """ Returns paths for outputs derived from given inputs. @@ -44,48 +49,50 @@ def __init__(self, params: 'dict[str, ]'): self.name = '' self.prod_meta = {} + class ModuleContext: """ - A class for object holding mappings for dependencies and values as well as - other information needed during modules execution. + A class for object holding mappings for dependencies and values as well as other information needed during modules + execution. """ - share: str # Absolute path to Symbiflow's share directory - bin: str # Absolute path to Symbiflow's bin directory - takes: SimpleNamespace # Maps symbolic dependency names to relative - # paths. - produces: SimpleNamespace # Contains mappings for explicitely specified - # dependencies. Useful mostly for checking for - # on-demand optional outputs (such as logs) - # with `is_output_explicit` method. - outputs: SimpleNamespace # Contains mappings for all available outputs. - values: SimpleNamespace # Contains all available requested values. - r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings - # for current scope. + share: str # Absolute path to Symbiflow's share directory + bin: str # Absolute path to Symbiflow's bin directory + takes: SimpleNamespace # Maps symbolic dependency names to relative paths. + produces: SimpleNamespace # Contains mappings for explicitely specified dependencies. + # Useful mostly for checking for on-demand optional outputs (such as logs) with + # `is_output_explicit` method. + outputs: SimpleNamespace # Contains mappings for all available outputs. + values: SimpleNamespace # Contains all available requested values. + r_env: ResolutionEnv # `ResolutionEnvironmet` object holding mappings for current scope. module_name: str # Name of the module. def is_output_explicit(self, name: str): - """ True if user has explicitely specified output's path. """ - o = getattr(self.produces, name) - return o is not None + """ + True if user has explicitely specified output's path. + """ + return getattr(self.produces, name) is not None def _getreqmaybe(self, obj, deps: 'list[str]', deps_cfg: 'dict[str, ]'): """ - Add attribute for a dependency or panic if a required dependency has not - been given to the module on its input. + Add attribute for a dependency or panic if a required dependency has not been given to the module on its input. """ - for name in deps: name, spec = decompose_depname(name) value = deps_cfg.get(name) if value is None and spec == 'req': - fatal(-1, f'Dependency `{name}` is required by module ' - f'`{self.module_name}` but wasn\'t provided') + fatal(-1, f'Dependency `{name}` is required by module `{self.module_name}` but wasn\'t provided') setattr(obj, name, self.r_env.resolve(value)) # `config` should be a dictionary given as modules input. - def __init__(self, module: Module, config: 'dict[str, ]', - r_env: ResolutionEnv, share: str, bin: str): + def __init__( + self, + module: Module, + config: 'dict[str, ]', + r_env: ResolutionEnv, + share: str, + bin: str + ): self.module_name = module.name self.takes = SimpleNamespace() self.produces = SimpleNamespace() @@ -122,6 +129,7 @@ def shallow_copy(self): return mycopy + class ModuleRuntimeException(Exception): info: str @@ -131,14 +139,15 @@ def __init__(self, info: str): def __str___(self): return self.info -def get_mod_metadata(module: Module): - """ Get descriptions for produced dependencies. """ +def get_mod_metadata(module: Module): + """ + Get descriptions for produced dependencies. + """ meta = {} has_meta = hasattr(module, 'prod_meta') for prod in module.produces: - prod = prod.replace('?', '') - prod = prod.replace('!', '') + prod = prod.replace('?', '').replace('!', '') if not has_meta: meta[prod] = '' continue diff --git a/f4pga/module_runner.py b/f4pga/module_runner.py index 33acf0e60..8be7ccd7c 100644 --- a/f4pga/module_runner.py +++ b/f4pga/module_runner.py @@ -1,14 +1,16 @@ -""" Dynamically import and run sfbuild modules """ +""" +Dynamically import and run F4PGA modules. +""" from contextlib import contextmanager -import importlib -import importlib.util -import os +import importlib.util as importlib_util +from pathlib import Path + +from colorama import Style + from f4pga.module import Module, ModuleContext, get_mod_metadata from f4pga.common import ResolutionEnv, deep, sfprint -from colorama import Fore, Style -_realpath_deep = deep(os.path.realpath) @contextmanager def _add_to_sys_path(path: str): @@ -20,17 +22,20 @@ def _add_to_sys_path(path: str): finally: sys.path = old_syspath + def import_module_from_path(path: str): - absolute_path = os.path.realpath(path) + absolute_path = str(Path(path).resolve()) with _add_to_sys_path(path): - spec = importlib.util.spec_from_file_location(absolute_path, absolute_path) - module = importlib.util.module_from_spec(spec) + spec = importlib_util.spec_from_file_location(absolute_path, absolute_path) + module = importlib_util.module_from_spec(spec) spec.loader.exec_module(module) return module + # Once imported a module will be added to that dict to avaid re-importing it preloaded_modules = {} + def get_module(path: str): global preloaded_modules @@ -41,10 +46,10 @@ def get_module(path: str): mod = import_module_from_path(path) preloaded_modules[path] = mod - # All sfbuild modules should expose a `ModuleClass` type/alias which is a - # class implementing a Module interface + # All F4PGA modules should expose a `ModuleClass` type/alias which is a class implementing a Module interface return mod.ModuleClass + class ModRunCtx: share: str bin: str @@ -58,6 +63,7 @@ def __init__(self, share: str, bin: str, config: 'dict[str, ]'): def make_r_env(self): return ResolutionEnv(self.config['values']) + class ModuleFailException(Exception): module: str mode: str @@ -69,8 +75,11 @@ def __init__(self, module: str, mode: str, e: Exception): self.e = e def __str__(self) -> str: - return f'ModuleFailException:\n Module `{self.module}` failed ' \ - f'MODE: \'{self.mode}\'\n\nException `{type(self.e)}`: {self.e}' + return f"""ModuleFailException: + Module `{self.module}` failed MODE: \'{self.mode}\' + Exception `{type(self.e)}`: {self.e} +""" + def module_io(module: Module): return { @@ -80,32 +89,41 @@ def module_io(module: Module): 'meta': get_mod_metadata(module) } + def module_map(module: Module, ctx: ModRunCtx): try: - mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, - ctx.bin) + mod_ctx = ModuleContext( + module, + ctx.config, + ctx.make_r_env(), + ctx.share, + ctx.bin + ) except Exception as e: raise ModuleFailException(module.name, 'map', e) - return _realpath_deep(vars(mod_ctx.outputs)) + return deep(lambda p: str(Path(p).resolve()))(vars(mod_ctx.outputs)) + def module_exec(module: Module, ctx: ModRunCtx): try: - mod_ctx = ModuleContext(module, ctx.config, ctx.make_r_env(), ctx.share, - ctx.bin) + mod_ctx = ModuleContext( + module, + ctx.config, + ctx.make_r_env(), + ctx.share, + ctx.bin + ) except Exception as e: raise ModuleFailException(module.name, 'exec', e) - sfprint(1, 'Executing module ' - f'`{Style.BRIGHT + module.name + Style.RESET_ALL}`:') + sfprint(1, f'Executing module `{Style.BRIGHT + module.name + Style.RESET_ALL}`:') current_phase = 1 try: for phase_msg in module.execute(mod_ctx): - sfprint(1, f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}]' - f'{Style.RESET_ALL}: {phase_msg}') + sfprint(1, f' {Style.BRIGHT}[{current_phase}/{module.no_of_phases}] {Style.RESET_ALL}: {phase_msg}') current_phase += 1 except Exception as e: raise ModuleFailException(module.name, 'exec', e) - sfprint(1, f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` ' - 'has finished its work!') \ No newline at end of file + sfprint(1, f'Module `{Style.BRIGHT + module.name + Style.RESET_ALL}` has finished its work!') diff --git a/f4pga/ugly.py b/f4pga/ugly.py deleted file mode 100644 index fdb0ec338..000000000 --- a/f4pga/ugly.py +++ /dev/null @@ -1,19 +0,0 @@ -""" The "ugly" module is dedicated for some *ugly* workarounds """ - -import os -from f4pga.common import sub as common_sub - -def noisy_warnings(): - """ Emit some noisy warnings """ - - os.environ['OUR_NOISY_WARNINGS'] = 'noisy_warnings.log' - return 'noisy_warnings.log' - -def generate_values(): - """ Generate initial values, available in configs """ - - return{ - 'prjxray_db': common_sub('prjxray-config').decode().replace('\n', ''), - 'python3': common_sub('which', 'python3').decode().replace('\n', ''), - 'noisyWarnings': noisy_warnings() - } From 9d169a44af9ee37829c174fb830d0082cfe03ea8 Mon Sep 17 00:00:00 2001 From: Unai Martinez-Corral Date: Fri, 18 Mar 2022 04:09:00 +0100 Subject: [PATCH 25/33] add tests with pytest Signed-off-by: Unai Martinez-Corral --- .github/workflows/Pipeline.yml | 7 ++---- test/requirements.txt | 1 + test/wrappers.py | 46 ++++++++++++++++++++++++++++++++++ 3 files changed, 49 insertions(+), 5 deletions(-) create mode 100644 test/requirements.txt create mode 100644 test/wrappers.py diff --git a/.github/workflows/Pipeline.yml b/.github/workflows/Pipeline.yml index 6cc03a277..396dcbc09 100644 --- a/.github/workflows/Pipeline.yml +++ b/.github/workflows/Pipeline.yml @@ -214,8 +214,5 @@ jobs: run: | . ./.github/scripts/activate.sh - for tool in place route synth write-fasm; do - echo "::group::Test f4pga-$tool" - "f4pga-$tool" || echo "Failing?" - echo "::endgroup::" - done; + pip3 install -r ./test/requirements.txt + pytest -vsrA --color=yes test/wrappers.py diff --git a/test/requirements.txt b/test/requirements.txt new file mode 100644 index 000000000..e079f8a60 --- /dev/null +++ b/test/requirements.txt @@ -0,0 +1 @@ +pytest diff --git a/test/wrappers.py b/test/wrappers.py new file mode 100644 index 000000000..ea18079cd --- /dev/null +++ b/test/wrappers.py @@ -0,0 +1,46 @@ +from pytest import mark +from sys import stdout, stderr + +from subprocess import check_call + + +@mark.xfail +@mark.parametrize("wrapper", ['place', 'route', 'synth', 'write-fasm']) +def test_wrapper(wrapper): + print(f"\n::group::Test {wrapper}") + stdout.flush() + stderr.flush() + try: + check_call(f"f4pga-{wrapper}") + finally: + print("\n::endgroup::") + + +@mark.xfail +@mark.parametrize( + "wrapper", + [ + 'symbiflow_generate_constraints', + 'symbiflow_pack', + 'symbiflow_place', + 'symbiflow_route', + 'symbiflow_synth', + 'symbiflow_write_bitstream', + 'symbiflow_write_fasm', + 'symbiflow_write_xml_rr_graph', + 'vpr_common', + 'symbiflow_analysis', + 'symbiflow_repack', + 'symbiflow_generate_bitstream', + 'symbiflow_generate_libfile', + 'ql_symbiflow' + ] +) +def test_shell_wrapper(wrapper): + print(f"\n::group::Test {wrapper}") + stdout.flush() + stderr.flush() + try: + check_call(f"{wrapper}") + finally: + print("\n::endgroup::") From 2291026181192b604e5eb44b29113c47b055f118 Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Fri, 6 May 2022 06:00:12 -0500 Subject: [PATCH 26/33] f4pga: Remove irrelevant old prototype code Signed-off-by: Krzysztof Boronski --- f4pga/wrappers/__init__.py | 28 ------- f4pga/wrappers/xc7/__init__.py | 113 ---------------------------- f4pga/wrappers/xc7/synth.py | 126 ------------------------------- f4pga/wrappers/xc7/vpr.py | 133 --------------------------------- 4 files changed, 400 deletions(-) delete mode 100644 f4pga/wrappers/__init__.py delete mode 100644 f4pga/wrappers/xc7/__init__.py delete mode 100755 f4pga/wrappers/xc7/synth.py delete mode 100644 f4pga/wrappers/xc7/vpr.py diff --git a/f4pga/wrappers/__init__.py b/f4pga/wrappers/__init__.py deleted file mode 100644 index 7c627bc54..000000000 --- a/f4pga/wrappers/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -from pathlib import Path -from os import environ -from sys import argv as sys_argv -from subprocess import run as subprocess_run - - -def run(*args): - """ - Execute subroutine - """ - out = subprocess_run(args, capture_output=True) - if out.returncode != 0: - raise(Exception(out.returncode)) - return out.stdout - - -def noisy_warnings(device): - """ - Emit some noisy warnings - """ - environ['OUR_NOISY_WARNINGS'] = f'noisy_warnings-{device}_pack.log' - - -def my_path(): - """ - Get current PWD - """ - return str(Path(sys_argv[0]).resolve().parent) diff --git a/f4pga/wrappers/xc7/__init__.py b/f4pga/wrappers/xc7/__init__.py deleted file mode 100644 index 7a9cf1473..000000000 --- a/f4pga/wrappers/xc7/__init__.py +++ /dev/null @@ -1,113 +0,0 @@ -from pathlib import Path -from re import search as re_search - -from f4pga.wrappers import ( - my_path, - noisy_warnings, - run -) - -from f4pga.wrappers.xc7.vpr import ( - save_vpr_log, - setup_vpr_arg_parser, - VprArgs, - vpr -) - - -def place(): - parser = setup_vpr_arg_parser() - parser.add_argument( - '-n', - '--net', - nargs='+', - metavar='', - type=str, - help='NET filename' - ) - args = parser.parse_args() - - vprargs = VprArgs(my_path(), args) + [ - '--fix_clusters', - 'constraints.place', - '--place' - ] - vprargs.export() - - if not args.net: - print('Please provide NET filename') - exit(1) - - noisy_warnings() - - print('Generating constraints...\n') - - run( - 'symbiflow_generate_constraints', - args.eblif, - args.net, - args.part, - vprargs.arch_def, - args.pcf - ) - - vpr(vprargs) - - save_vpr_log('place.log') - - -def route(): - args = setup_vpr_arg_parser().parse_args() - - vprargs = VprArgs(my_path(), args) - vprargs.export() - - noisy_warnings(args.device) - - vprargs.optional += '--route' - - print('Routing...') - vpr(vprargs) - - save_vpr_log('route.log') - - -def write_fasm(): - vprargs = VprArgs( - my_path(), - setup_vpr_arg_parser().parse_args() - ) - - if vprargs.eblif is None: - raise(Exception("Argument EBLIF is required!")) - - top_ext_match = re_search('.*\\.[^.]*', vprargs.eblif) - top = top[:top_ext_match.pos] if top_ext_match else vprargs.eblif - - fasm_extra = top + '_fasm_extra.fasm' - - noisy_warnings() - - run( - 'genfasm', - vprargs.arch_def, - vprargs.eblif, - '--device', vprargs.device_name, - vprargs.vpr_options, - '--read_rr_graph', vprargs.rr_graph - ) - - print(f'FASM extra: {fasm_extra}\n') - - # Concatenate top.fasm with extra.fasm if necessary - if Path(fasm_extra).is_file(): - print('writing final fasm') - with open(top + '.fasm', 'r+<') as top_file, open(fasm_extra) as extra_file: - cat = top_file.read() - cat += '\n' - cat += extra_file.read() - top_file.seek(0) - top_file.write(cat) - top_file.truncate() - - save_vpr_log('fasm.log') diff --git a/f4pga/wrappers/xc7/synth.py b/f4pga/wrappers/xc7/synth.py deleted file mode 100755 index f209c24eb..000000000 --- a/f4pga/wrappers/xc7/synth.py +++ /dev/null @@ -1,126 +0,0 @@ -from pathlib import Path -from sys import argv as sys_argv -from os import environ -from argparse import ArgumentParser -from f4pga.wrappers import run - - -def arg_parser(): - parser = ArgumentParser(description="Parse flags") - - parser.add_argument( - '-t', - '--top', - nargs=1, - metavar='', - type=str, - help='Top module name' - ) - - parser.add_argument( - '-v', - '--verilog', - nargs='+', - metavar='', - type=str, - help='Verilog file list' - ) - - parser.add_argument( - '-x', - '--xdc', - nargs='+', - metavar='', - type=str, - help='XDC file list' - ) - - parser.add_argument( - '-d', - '--device', - nargs=1, - metavar='', - type=str, - help='Device type (e.g. artix7)' - ) - - parser.add_argument( - '-p', - '--part', - nargs=1, - metavar='', - type=str, - help='Part name' - ) - - return parser.parse_args() - - -def main(): - share_dir_path = (Path(sys_argv[0]).resolve().parent / '../share/symbiflow').resolve() - utils_path = share_dir_path / 'scripts' - - environ['SHARE_DIR_PATH'] = str(share_dir_path) - environ['TECHMAP_PATH'] = str(share_dir_path / 'techmaps/xc7_vpr/techmap') - environ['UTILS_PATH'] = str(utils_path) - - args = arg_parser() - - database_dir = environ.get('DATABASE_DIR', str(run('prjxray-config'))) - environ['DATABASE_DIR'] = database_dir - - # TODO: is this crossplatform??? - if 'PYTHON3' not in environ: - environ['PYTHON3'] = run(['which', 'python3']) - - if not args.verilog: - raise(Exception('Please provide at least one Verilog file\n')) - - if not args.top: - raise(Exception('Top module must be specified\n')) - - if not args.device: - raise(Exception('Device parameter required\n')) - - if not args.part: - raise(Exception('Part parameter required\n')) - - out_json = f"{args.top}.json" - synth_json = f"{args.top}_io.json" - log = f"{args.top}_synth.log" - - environ['TOP'] = args.top - environ['OUT_JSON'] = out_json - environ['OUT_SDC'] = f"{args.top}.sdc" - environ['SYNTH_JSON'] = synth_json - environ['OUT_SYNTH_V'] = f"{args.top}_synth.v" - environ['OUT_EBLIF'] = f"{args.top}.eblif" - environ['PART_JSON'] = str(Path(database_dir) / f"{args.device}/{args.part}/part.json") - environ['OUT_FASM_EXTRA'] = args.top + '_fasm_extra.fasm' - - if args.xdc: - environ['INPUT_XDC_FILES'] = ' '.join(args.xdc) - - run( - 'yosys', - '-p', - f'\"tcl {(utils_path / "xc7/synth.tcl")!s}\"', - '-l', - 'log', - ' '.join(args.verilog) - ) - - run( - 'python3', - str(utils_path / 'split_inouts.py'), - '-i', - out_json, - '-o', - synth_json - ) - - run( - 'yosys', - '-p', - f'\"read_json {synth_json}; tcl {(utils_path / "xc7/conv.tcl")!s}\"' - ) diff --git a/f4pga/wrappers/xc7/vpr.py b/f4pga/wrappers/xc7/vpr.py deleted file mode 100644 index 7a54e1b5d..000000000 --- a/f4pga/wrappers/xc7/vpr.py +++ /dev/null @@ -1,133 +0,0 @@ -from typing import List -from pathlib import Path -from argparse import ArgumentParser -from os import environ -from shutil import move as sh_mv - -from f4pga.wrappers import run - -class VprArgs: - arch_dir: Path - arch_def: Path - lookahead: Path - rr_graph: Path - rr_graph_xml: Path - place_delay: Path - device_name: Path - eblif: str - vpr_options: str - optional: List[str] - - def __init__(self, mypath, args): - self.arch_dir = (Path(mypath) / '../share/symbiflow/arch' / args.device).resolve() - self.arch_def = self.arch_dir / 'arch.timing.xml' - filename = f'rr_graph_{args.device}' - self.lookahead = self.arch_dir / f'{filename}.lookahead.bin' - self.rr_graph = self.arch_dir / f'{filename}.rr_graph.real.bin' - self.rr_graph_xml = self.arch_dir / f'{filename}.rr_graph.real.xml' - self.place_delay = self.arch_dir / f'{filename}.place_delay.bin' - self.device_name = args.device.replace('_', '-') - self.eblif = args.eblif - self.vpr_options = args.vpr_options - self.optional = ['--sdc_file', args.sdc] if args.sdc else [] - - def export(self): - environ['ARCH_DIR'] = str(self.arch_dir) - environ['ARCH_DEF'] = str(self.arch_def) - environ['LOOKAHEAD'] = str(self.lookahead) - environ['RR_GRAPH'] = str(self.rr_graph) - environ['RR_GRAPH_XML'] = str(self.rr_graph_xml) - environ['PLACE_DELAY'] = str(self.place_delay) - environ['DEVICE_NAME'] = str(self.device_name) - - -def setup_vpr_arg_parser(): - parser = ArgumentParser(description="Parse flags") - - parser.add_argument( - '-d', - '--device', - nargs=1, - metavar='', - type=str, - help='Device type (e.g. artix7)', - default='artix7' - ) - - parser.add_argument( - '-e', - '--eblif', - nargs=1, - metavar='', - type=str, - help='EBLIF filename' - ) - - parser.add_argument( - '-p', - '--pcf', - nargs=1, - metavar='', - type=str, - help='PCF filename' - ) - - parser.add_argument( - '-P', - '--part', - nargs=1, - metavar='', - type=str, - help='Part name' - ) - - parser.add_argument( - '-s', - '--sdc', - nargs=1, - metavar='', - type=str, - help='SDC file' - ) - - parser.add_argument( - '-a', - '--vpr_options', - metavar='', - type=str, - help='Additional VPR options' - ) - - parser.add_argument( - 'additional_vpr_args', - nargs='*', - metavar='', - type=str, - help='Additional arguments for vpr command' - ) - - return parser - - -def vpr(vprargs: VprArgs): - """ - Execute `vpr` - """ - return run( - 'vpr', - vprargs.arch_def, - vprargs.eblif, - '--device', vprargs.device_name, - vprargs.vpr_options, - '--read_rr_graph', vprargs.rr_graph, - '--read_router_lookahead', vprargs.lookahead, - 'read_placement_delay_lookup', vprargs.place_delay, - *vprargs.optional - ) - - -def save_vpr_log(filename): - """ - Save VPR log. - """ - sh_mv('vpr_stdout.log', filename) From eb195d2135ae419a573e03487b7b0924a5f6d30a Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Fri, 6 May 2022 06:44:11 -0500 Subject: [PATCH 27/33] f4pga: Remove some dead code and refactor flow_config.py Signed-off-by: Krzysztof Boronski --- f4pga/flow_config.py | 269 ++----------------------------------------- 1 file changed, 8 insertions(+), 261 deletions(-) diff --git a/f4pga/flow_config.py b/f4pga/flow_config.py index 49fb8dcd9..622f2d1e5 100644 --- a/f4pga/flow_config.py +++ b/f4pga/flow_config.py @@ -11,134 +11,6 @@ def open_flow_cfg(path: str) -> dict: with Path(path).open('r') as rfptr: return json_load(rfptr) - -def save_flow_cfg(flow: dict, path: str): - with Path(path).open('w') as wfptr: - json_dump(flow, wfptr, indent=4) - - -def _get_lazy_dict(parent: dict, name: str): - d = parent.get(name) - if d is None: - d = {} - parent[name] = d - return d - - -def _get_ov_dict( - dname: str, - flow: dict, - platform: 'str | None' = None, - stage: 'str | None' = None -): - if not platform: - return _get_lazy_dict(flow, dname) - platform_dict: dict = flow[platform] - if stage: - stage_dict: dict = _get_lazy_dict(platform_dict, stage) - return _get_lazy_dict(stage_dict, dname) - return _get_lazy_dict(platform_dict, dname) - - -def _get_dep_dict( - flow: dict, - platform: 'str | None' = None, - stage: 'str | None' = None -): - return _get_ov_dict('dependencies', flow, platform, stage) - - -def _get_vals_dict( - flow: dict, - platform: 'str | None' = None, - stage: 'str | None' = None -): - return _get_ov_dict('values', flow, platform, stage) - - -def _add_ov( - ov_dict_getter, - failstr_constr, - flow_cfg: dict, - name: str, - values: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - d = ov_dict_getter(flow_cfg, platform, stage) - deps = d.get(name) - if type(deps) is list: - deps += values - return True - - if deps is None: - d[name] = values - return True - - print(failstr_constr(name)) - return False - - -def _rm_ov_by_values( - ov_dict_getter, - notset_str_constr, - notlist_str_constr, - flow: dict, - name: str, - vals: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - d = ov_dict_getter(flow, platform, stage) - - vallist: list = d.get(name) - if type(vallist) is list: - d[name] = [val for val in vallist if val not in set(vals)] - return True - - if type(vallist) is None: - print(notset_str_constr(name)) - return False - - print(notlist_str_constr(name)) - return False - - -def _rm_ov_by_idx( - ov_dict_getter, - notset_str_constr, - notlist_str_constr, - flow: dict, - name: str, - idcs: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - idcs.sort(reverse=True) - - if len(idcs) == 0: - print(f'Index list is emtpy!') - return False - - d = ov_dict_getter(flow, platform, stage) - vallist: list = d.get(name) - if type(vallist) is list: - if idcs[0] >= len(vallist) or idcs[len(idcs) - 1] < 0: - print(f'Index out of range (max: {len(vallist)}!') - return False - - for idx in idcs: - vallist.pop(idx) - return True - - if vallist is None: - print(notset_str_constr(name)) - return False - - print(notlist_str_constr(name)) - return False - - def _get_ovs_raw( dict_name: str, flow_cfg, @@ -159,113 +31,6 @@ def _get_ovs_raw( return vals - -def _remove_dependencies_by_values( - flow: dict, - name: str, - deps: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - def notset_str_constr(dname): - return f'Dependency `{dname}` is not set. Nothing to remove.' - def notlist_str_constr(dname): - return f'Dependency `{dname}` is not a list! Use unsetd instead.' - return _rm_ov_by_values( - _get_dep_dict, - notset_str_constr, - notlist_str_constr, - flow, - name, - deps, - platform, - stage - ) - - -def _remove_dependencies_by_idx( - flow: dict, - name: str, - idcs: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - def notset_str_constr(dname): - return f'Dependency `{dname}` is not set. Nothing to remove.' - def notlist_str_constr(dname): - return f'Dependency `{dname}` is not a list! Use unsetd instead.' - return _rm_ov_by_idx( - _get_dep_dict, - notset_str_constr, - notlist_str_constr, - flow, - name, - idcs, - platform, - stage - ) - - -def _remove_values_by_values( - flow: dict, - name: str, - deps: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - def notset_str_constr(vname): - return f'Value `{vname}` is not set. Nothing to remove.' - def notlist_str_constr(vname): - return f'Value `{vname}` is not a list! Use unsetv instead.' - return _rm_ov_by_values( - _get_vals_dict, - notset_str_constr, - notlist_str_constr, - flow, - name, - deps, - platform, - stage - ) - - -def _remove_values_by_idx( - flow: dict, - name: str, - idcs: list, - platform: 'str | None' = None, - stage: 'str | None' = None -) -> bool: - def notset_str_constr(dname): - return f'Dependency `{dname}` is not set. Nothing to remove.' - def notlist_str_constr(dname): - return f'Dependency `{dname}` is not a list! Use unsetv instead.' - return _rm_ov_by_idx( - _get_vals_dict, - notset_str_constr, - notlist_str_constr, - flow, - name, - idcs, - platform, - stage - ) - - -def unset_dependency( - flow: dict, - name: str, - platform: 'str | None', - stage: 'str | None' -): - d = _get_dep_dict(flow, platform, stage) - if d.get(name) is None: - print(f'Dependency `{name}` is not set!') - return False - d.pop(name) - return True - - def verify_platform_name(platform: str, mypath: str): for plat_def_filename in os_listdir(str(Path(mypath) / 'platforms')): platform_name = str(Path(plat_def_filename).stem) @@ -280,14 +45,17 @@ def verify_stage(platform: str, stage: str, mypath: str): def _is_kword(w: str): - return \ - (w == 'dependencies') | (w == 'values') | \ - (w == 'default_platform') | (w == 'default_target') + kwords = { + 'dependencies', + 'values', + 'default_platform', + 'default_target' + } + return w in kwords class FlowDefinition: - # stage name -> module path mapping - stages: 'dict[str, Stage]' + stages: 'dict[str, Stage]' # stage name -> module path mapping r_env: ResolutionEnv def __init__(self, flow_def: dict, r_env: ResolutionEnv): @@ -320,38 +88,17 @@ def get_stage_r_env(self, stage_name: 'str') -> ResolutionEnv: class ProjectFlowConfig: flow_cfg: dict - # r_env: ResolutionEnv path: str - # platform_r_envs: 'dict[str, ResolutionEnv]' def __init__(self, path: str): self.flow_cfg = {} self.path = copy(path) - # self.r_env = ResolutionEnv({}) - # self.platform_r_envs = {} def platforms(self): for platform, _ in self.flow_cfg.items(): if not _is_kword(platform): yield platform - def add_platform(self, device: str) -> bool: - d = self.flow_cfg.get(device) - if d: - print(f'Device {device} already exists') - return False - - self.flow_cfg[device] = {} - return True - - def set_default_platform(self, device: str) -> bool: - self.flow_cfg['default_platform'] = device - return True - - def set_default_target(self, platform: str, target: str) -> bool: - self.flow_cfg[platform]['default_target'] = target - return True - def get_default_platform(self) -> 'str | None': return self.flow_cfg.get('default_platform') From 1e82b22bb57bfdd1eb70375f8fc4ebebc37a8f82 Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Fri, 6 May 2022 06:46:50 -0500 Subject: [PATCH 28/33] f4pga: Update f4pga/setupy.py to not install removed code Signed-off-by: Krzysztof Boronski --- f4pga/setup.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/f4pga/setup.py b/f4pga/setup.py index 1c96d23ef..320dceb1d 100644 --- a/f4pga/setup.py +++ b/f4pga/setup.py @@ -79,9 +79,7 @@ def get_requirements(file: Path) -> List[str]: packages=[ "f4pga", "f4pga.common_modules", - "f4pga.wrappers.sh", - "f4pga.wrappers", - "f4pga.wrappers.xc7" + "f4pga.wrappers.sh" ], package_dir={"f4pga": "."}, package_data={ @@ -93,11 +91,7 @@ def get_requirements(file: Path) -> List[str]: install_requires=list(set(get_requirements(requirementsFile))), entry_points={ "console_scripts": [ - "f4pga = f4pga.__init__:main", - "f4pga-place = f4pga.wrappers.xc7.__init__:place", - "f4pga-route = f4pga.wrappers.xc7.__init__:route", - "f4pga-synth = f4pga.wrappers.xc7.synth:main", - "f4pga-write-fasm = f4pga.wrappers.xc7.__init__:write_fasm", + "f4pga = f4pga.__init__:main" ] + wrapper_entrypoints }, ) From c763733b8be3d6cfbf95e336319d3cbcd482ca1e Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Fri, 6 May 2022 10:58:44 -0500 Subject: [PATCH 29/33] Update docs and replace some references to sfbuild/symbiflow with f4pga Signed-off-by: Krzysztof Boronski --- docs/f4pga/DevNotes.md | 142 +++++-------------- docs/f4pga/Usage.md | 19 +-- docs/f4pga/browse_pydoc.sh | 2 +- docs/f4pga/modules/fasm.md | 18 +++ docs/f4pga/modules/generic_script_wrapper.md | 2 +- docs/f4pga/modules/index.md | 69 +++++---- docs/f4pga/modules/io_rename.md | 4 + docs/f4pga/modules/mkdirs.md | 2 +- docs/f4pga/modules/pack.md | 7 + docs/f4pga/modules/place.md | 7 + docs/f4pga/modules/place_constraints.md | 11 ++ docs/f4pga/modules/route.md | 7 + docs/f4pga/modules/synth.md | 8 +- f4pga/__init__.py | 4 +- f4pga/argparser.py | 4 +- f4pga/module.py | 4 +- 16 files changed, 154 insertions(+), 156 deletions(-) create mode 100644 docs/f4pga/modules/fasm.md create mode 100644 docs/f4pga/modules/pack.md create mode 100644 docs/f4pga/modules/place.md create mode 100644 docs/f4pga/modules/place_constraints.md create mode 100644 docs/f4pga/modules/route.md diff --git a/docs/f4pga/DevNotes.md b/docs/f4pga/DevNotes.md index 87678f43c..fabce0827 100644 --- a/docs/f4pga/DevNotes.md +++ b/docs/f4pga/DevNotes.md @@ -1,38 +1,45 @@ # Developer's notes +##### Last update: 2022-05-06 -## Project's structure - -The main script is in the `sfbuild.py` file. -`sf_cache.py` contains code needed for tracking modifications in the project. -`sf_ugly` contains some ugly workarounds. - -There a are two python modules which are shared by the code of `sfbuild.py` and -_sfbuild modules_: `sf_common` and `sf_module`. - -_sfbuild modules_ are extensions to the build system that wrap tools to be used -within _sfbuild_ and currently they are standalone executable scripts. All -_sfbuild modules_ are single python scripts located under directories that -follow `sf_*_modules/` pattern. So currently those are: - - * `sf_common_modules` - modules which can be shared by multiple platforms. - * `sf_xc7_modules` - modules specific to xc7 flows. - * `sf_quicklogic_modules` - modules specific to Quiclogic flows. +:::{warning} +These notes are provided as-is and they shouldn't be treated as a full-blown accurate +documentation, but rather as a helpful resource for those who want to get involved with +development of _f4pga_. These are not updated regularly. -There's also a `docs` directory which you are probably aware of if you are reading -this. All the documentation regarding sfbuild goes here. +For more detailed, up-to-date information about the code, refer to the pydoc documentation. +::: -`platforms` direcotory contains JSON files with _platform flow definitions_. -Names of those files must follow `platform_name.json` pattern. +## Project's structure -## Differnt subsystems and where to find them? +* `__init__.py` contains the logic and entrypoint of the build system +* `argparser.py` contains boring code for CLI interface +* `cache.py` contains code needed for tracking modifications in the project. +* `common.py` contains code shared by the main utility and the modules +* `flow_config.py` contains code for reading and accessing flow definitions and configurations +* `module_inspector.py` contains utilities for inpecting I/O of modules +* `module_runner.py` contains code required to load modules at runtime +* `module.py` contains definitions required for writing and using f4pga modules +* `part_db.json` contains mappings from part names to platform names +* `setup.py` contains a package installation script +* `stage.py` contains classes relevant to stage represetation +* `modules` contains loadable modules +* `platforms` contains platform flow definitions + +:::{important} +Through the codebase _f4pga_ (tool) might be often referenced as _sfbuild_. +Similarly, _F4PGA_ (toolchain) might get called _Symbiflow_. +This is due to the project being written back when _F4PGA_ was called _Symbiflow_. +::: + +## Different subsystems and where to find them? ### Building and dependency resolution -All the code regarding dependency resolution is located in `sfbuild.py` file. +All the code regarding dependency resolution is located in `__init__.py` file. Take a look at the `Flow` class. Most of the work is done in `Flow._resolve_dependencies` method. Basically it -performs a _DFS_ with _stages_ (instances of _sfbuild modules_) as its nodes +performs a _DFS_ with _stages_ (instances of _f4pga modules_) as its nodes which are linked using symbolic names of dependencies on inputs and outputs. It queries the modules for information regarding i/o (most importantly the paths on which they are going to produce outputs), checks whether @@ -54,27 +61,16 @@ to individual stages. Keeping track of status of each file is done using `SymbiCache` class, which is defined in `sf_cache.py` file. `SymbiCache` is used mostly inside `Flow`'s methods. -### Module's internals and API - -`sf_module` contains everything that is necessary to write a module. -Prticularly the `Module` and `ModuleContext` classes -The `do_module` function currently servers as to create an instance of some -`Module`'s subtype and provide a _CLI_ interface for it. - -The _CLI_ interface however, is not meant to be used by an end-user, especially -given that it reads JSON data from _stdin_. A wrapper for interfacing with modules -exists in `sfbuild.py` and it's called `_run_module`. - ### Internal environmental variable system -_sfbuild_ exposes some data to the user as well as reads some using internal +_f4pga_ exposes some data to the user as well as reads some using internal environmental variables. These can be referenced by users in _platform flow definitions_ and _project flow configurations_ using the `${variable_name}` syntax when defining values. They can also be read inside -_sfbuild modules_ by accesing the `ctx.values` namespace. +_f4pga modules_ by accesing the `ctx.values` namespace. The core of tis system is the `ResolutionEnvironemt` class which can be found -inside the `sf_common` module. +inside the `common` module. ### Installation @@ -82,23 +78,6 @@ Check `CMakeLists.txt`. ## TODO: -Therea re a couple things that need some work: - -### Urgent - -* Full support for Quicklogic platforms. -* Testing XC7 projects with more sophisticated setups and PCF flows. - -### Important - -* Fix and refactor overloading mechanism in _platform flow definitions_ and - _platform flow configurations_. Values in the global `values` dict should - be overloaded by those in `values` dict under `module_options.stage_name` - inside _platform flow definition_. Values in `platform flow configuration` - should be imported from `platform flow definition` and then overloaded by - entries in `values`, `platform_name.values`, - `platform_name.stages.stage_name.values` dicts respectively. - * Define a clear specification for entries in _platform flow definitions_ and _platform flow configurations_. Which environmental variables can be accessed where, and when? @@ -110,64 +89,15 @@ Therea re a couple things that need some work: * Make commenting style consistent -* Write more docs - -### Not very important +* Document writing flow defintions * Extend the metadata system for modules, perhaps make it easier to use. * Add missing metadata for module targets. -### Maybe possible in the future - -* Generate platform defintions using CMake. +* (_suggestion_) Generate platform defintions using CMake. ### Out of the current scope * Change interfaces of some internal python scripts. This could lead to possibly merging some modules for XC7 and Quicklogic into one common module. - -## Quicklogic - -So far I've been trying to bring support to _EOS-S3_ platform with mixed results. -Some parts of upstream Symbiflow aren't there yet. The Quicklogic scripts are -incomplete. - -The _k4n8_ family remains a mystery to me. There's zero information about any -other familiar that _PP3_ and _PP2_. Neither could I find example projects for that. -Symbiflow's website mentions that only briefly. Yosys complains about `_DLATCH_N_` -not being supported when I tried synthesisng anything. Possibly related to the fact -that there's no equivalent of `pp3_latches_map.v` for `k4n8/umc22` in -[Yosys](https://github.com/YosysHQ/yosys/tree/master/techlibs/quicklogic). - -**UPDATE**: Finally got the ioplace stage to work. Pulling the Quicklogic fork was -necessary in order to progress. The Quicklogic EOS-S3 development is now moved into -`eos-s3` branch of my fork. -Additionally The `chandalar.pcf` file in _symbiflow-examples_ seemed to be faulty. -The '()' parenthesis should be replaced by '[]' brackets. -I also tried to synthesize the `iir` project from `tool-perf`, but **VPR** seems -to be unable to fit it (at least on my installation of Symbiflow which at this point -is a bit old and heavily modified). - -Here's a flow configuration I've used for `btn_counter` on `eos-s3`: - -```json -{ - "dependencies": { - "sources": ["btn_counter.v"], - "synth_log": "${build_dir}/synth.log", - "pack_log": "${build_dir}/pack.log" - }, - "values": { - "top": "top", - "build_dir": "build/eos-s3" - }, - - "ql-eos-s3": { - "dependencies": { - "pcf": "chandalar.pcf", - "build_dir": "${build_dir}" - } - } -} -``` \ No newline at end of file diff --git a/docs/f4pga/Usage.md b/docs/f4pga/Usage.md index 66a8d55b5..e666f9eb8 100644 --- a/docs/f4pga/Usage.md +++ b/docs/f4pga/Usage.md @@ -77,7 +77,8 @@ All *dependencies* are tracked by a modification tracking system which stores ha When F4PGA constructs a *flow*, it will try to omit execution of modules which would receive the same data on their input. There is a strong _assumption_ there that a *module*'s output remains unchanged if the input configuration isn't -change, ie. *modules* are deterministic. +changed, ie. *modules* are deterministic. This is might be not true for some tools and in case you really want to re-run +a stage, there's a `--nocache` option that treats the `.symbicache` file as if it was empty. ### Resolution @@ -247,24 +248,21 @@ Project status: [N] sources: ['counter.v'] [O] xdc: ['arty.xdc'] -F4PGA: DONE +f4pga: DONE ``` The letters in the boxes describe the status of a dependency which's name is next to the box. * **X** - dependency unresolved. + Dependency is not present or cannot be produced. This isn't always a bad sign. Some dependencies are not required to, such as `pcf`. - * **U** - dependency unreachable. - The dependency has a module that could produce it, but the module's dependencies are unresolved. - This doesn't say whether the dependency was necessary or not. - * **O** - dependency present, unchanged. This dependency is already built and is confirmed to stay unchanged during flow execution. * **N** - dependency present, new/changed. This dependency is already present on the persistent storage, but it was either missing earlier, or its content - changed from the last time. + changed since the last time it was used. :::{warning} It won't continue to be reported as "**N**" after a successful build of any target. @@ -292,11 +290,8 @@ Additional info about a dependency will be displayed next to its name after a co * In case of unresolved dependencies (**X**), which are never produced by any module, a text sying "`MISSING`" will be displayed. -* In case of unreachable dependencies, a name of such module that could produce them will be displayed followed by - `-> ???`. - In the example above file `counter.v` has been modified and is now marked as "**N**". -This couses a bunch of other dependencies to be reqbuilt ("**R**"). +This causes a bunch of other dependencies to be reqbuilt ("**R**"). `build_dir` and `xdc` were already present, so they are marked as "**O**". ## Common targets and values @@ -328,7 +323,7 @@ Below are lists of the target and value names along with their meanings. | Value name | type | Description | |------------|------|-------------| -| `shareDir` | `string` | Path to symbiflow's installation "share" directory | +| `shareDir` | `string` | Path to f4pga's installation "share" directory | | `python3` | `string` | Path to Python 3 executable | | `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) | | `prjxray_db` | `string` | Path to Project X-Ray database | diff --git a/docs/f4pga/browse_pydoc.sh b/docs/f4pga/browse_pydoc.sh index d82dd669c..5e96b9950 100755 --- a/docs/f4pga/browse_pydoc.sh +++ b/docs/f4pga/browse_pydoc.sh @@ -2,6 +2,6 @@ MY_DIR=`dirname $0` SFBUILD_DIR=${MY_DIR}/../../f4pga -SFBUILD_PY=${SFBUILD_DIR}/sfbuild.py +SFBUILD_PY=${SFBUILD_DIR}/__init__.py PYTHONPATH=${SFBUILD_DIR} pydoc -b diff --git a/docs/f4pga/modules/fasm.md b/docs/f4pga/modules/fasm.md new file mode 100644 index 000000000..94cdcd316 --- /dev/null +++ b/docs/f4pga/modules/fasm.md @@ -0,0 +1,18 @@ +# fasm + +The _fasm_ module generates FPGA assebly using `genfasm` (VPR-only). + +The module should guarantee the following outputs: + * `fasm` + +For detailed information about these targets, please refer to +`docs/common targets and variables.md` + +The setup of the synth module follows the following specifications: + +## Values + +The `fasm` module accepts the following values: + +* `pnr_corner` (string, optional): PnR conrenr to use. Relevant only for Quicklogic's + eFPGAs. \ No newline at end of file diff --git a/docs/f4pga/modules/generic_script_wrapper.md b/docs/f4pga/modules/generic_script_wrapper.md index 32c2a5085..207a2df6c 100644 --- a/docs/f4pga/modules/generic_script_wrapper.md +++ b/docs/f4pga/modules/generic_script_wrapper.md @@ -1,6 +1,6 @@ # generic_script_wrapper -This module provides a way to integrate an external command into an sfbuild flow. +This module provides a way to integrate an external command into an f4pga flow. Its inputs and outputs are fully defined by the author of flow definition. ## Parameters diff --git a/docs/f4pga/modules/index.md b/docs/f4pga/modules/index.md index dd0c0e1f4..089d494da 100644 --- a/docs/f4pga/modules/index.md +++ b/docs/f4pga/modules/index.md @@ -3,20 +3,21 @@ ## Interface This document contains all the information needed to configure modules for -your _**sfbuild**_ project as well as some info about the API used to write +your _**f4pga**_ project as well as some info about the API used to write modules. ### Configuration interface: -Modules are configured through an internal API by _**sfbuild**_. +Modules are configured through an internal API by _**sfbuf4pgaild**_. The basic requirement for a module script is to expose a class with `Module` interface. -_**sfbuild**_ reads configuration from two different places: -**platform's flow definition** file and **project's flow configuration** file. +_**f4pga**_ reads its configuration from two different sources: +**platform's flow definition**, which is a file that usually comes bundled with f4pga +and **project's flow configuration**, which is a set of configuration options provided by the user +through a JSON file or CLI interface. -The files, as described in "_Getting Started_" document, contain _JSON_ serialized -data. And they contain snippets of _module configurations_ +Thosse sources contain snippets of _module configurations_. A _module configuration_ is a structure with the following fields: @@ -24,15 +25,13 @@ A _module configuration_ is a structure with the following fields: The values are paths to those dependencies. They can be either singular strings or lists of strings. -* `produces` = a dictionary that contains keys which are names of the dependencies produced by the module. +* `produces` - a dictionary that contains keys which are names of the dependencies produced by the module. The values are requested filenames for the files generated by the module. They can be either singular strings or lists of strings. * `values` - a dictionary that contains other values used to configure the module. The keys are value's names and the values can have any type. -* `platform` - Platform's name. This is a string. - ### Platform-level configuration In case of **platform's flow definition**, a `values` dictionary can be defined @@ -40,25 +39,29 @@ globally and the values defined there will be passed to every module's config. Those values can be overriden per-module through `module_options` dictionary. -Parameters used during module's contruction can also be defined in `module_options` +Parameters used during module's construction can also be defined in `module_options` as `params` (those are not a part of _module configuration_, instead they are used during the actual construction of a module instance, before it declares any of its -input/outputs etc.) +input/outputs etc.. This is typically used to acieve some parametrization over module's +I/O). -Defining dictionaries for `takes` and `produces` is disallowed within +Defining dictionaries for `takes` and `produces` is currently disallowed within **platform's flow definition**. -For a detailed look on the concepts described here, please have a look at -`sfbuild/platforms/xc7a50t` +For examples of **platform's flow definition** described here, please have a look at +`f4pga/platforms/` directory. It contains **platform flow definitions** that come bundled +with f4pga. ### Project-level configuration +This section describes **project's flow configuration**. + Similarly to **platform's flow definition**, `values` dict can be provided. The values provided there will overwrite the values from **platform's flow definition** in case of a collision. Unlike **platform's flow definition**, **project's flow configuration** may contain -`dependencies` dict. This dictionary would be used to map saymbolic dependency +`dependencies` dict. This dictionary would be used to map symbolic dependency names to actual paths. Most dependencies can have their paths resolved implicitly without the need to provide explicit paths, which is a mechanism that is described in a later section of this document. However some dependencies must be provided @@ -71,24 +74,34 @@ dependencies, which won't be produced unless the user explicitelly provides a pa for them. **project's flow configuration** cannot specify `params` for modules and does not -use `module_options` dictionary. +use `module_options` dictionary. Neither it can instantiate any extra stages. -Any entry with a key other than `dependencies` or `values` is treated as a -platform name. Thise entries are necessaery to enable support for a given platform. +Any entry with a couple _exceptions*_ is treated as a platform name. +Enabling support for a given platform within a **project's flow configuration** file +requires having an entry for that platform. Each of those entries may contain `dependencies`, `values` fields which will overload the `dependecies` and `values` defined in a global scope of **project's flow configuration**. Any other field under those platform entries is treated as a _stage-specific-configuration_. The key is a name of a stage within a flow for the specified platform and the values are dicts which may contain `dependencies` and `values` fields that overload `dependencies` and `values` -repespectively, locally for the stage. +repespectively, locally for the stage. Additionally a `default_target` field can be +provided to specify a default target to built when the user does not specify it throgh +a CLI inteface. + +The afromentioned _*exceptions_ are: + +* `dependencies` - dependencies shared by all platforms. +* `values` - values shared by all platforms +* `default_platform` - default platform to chose in case it doesn't get specified + by the user ### Internal environmental variables -It's very usefule to be able to refer to some data within +It's very useful to be able to refer to some data within **platform's flow definition** and **project's flow configuration** to -either avoid redundant definitions or to access results of certain operations. -_**sfbuild**_ allows doing that by using a special syntax for accessing internal +either avoid redundant definitions or to store and access results of certain operations. +_**f4pga**_ allows doing that by using a special syntax for accessing internal environmental variables. The syntax is `${variable_name}`. Any string value within @@ -148,7 +161,8 @@ categories: (more on that later). * **built-in references** - there are a couple of built-in variables which are very handy: - * `shareDir` - path to symbiflow's _share_ directory. + * `shareDir` - path to f4pga's _share_ directory. + * `binDir` - path to f4pga's _bin_ directory. * `prjxray_db` - Project X-Ray database path. * `python3` - path to Python 3 interpreter. * `noisyWarnings` - (this one should probably get removed) @@ -166,7 +180,7 @@ The class should implement the following methods: is a dict with optional parameter for the module. Each module script should expose the class by defining it's name/type alias as -`ModuleClass`. sfbuild tries to access a `ModuleClass` attribute within a package +`ModuleClass`. f4pga tries to access a `ModuleClass` attribute within a package when instantiating a module. ### Module's execution modes @@ -224,7 +238,7 @@ set: by the module. * `values` - a list of names given to the variables used withing the module * `prod_meta` - A dictionary which maps product names to descriptions of these - products. + products. Those entries are optional and can be skipped. #### Qualifiers/decorators @@ -250,8 +264,13 @@ is implemented at the moment. It might be removed in the future. ## Common modules ```{toctree} +fasm generic_script_wrapper io_rename mkdirs +pack +place +place_constraints +route synth ``` diff --git a/docs/f4pga/modules/io_rename.md b/docs/f4pga/modules/io_rename.md index 4e994d594..e84730a1e 100644 --- a/docs/f4pga/modules/io_rename.md +++ b/docs/f4pga/modules/io_rename.md @@ -19,3 +19,7 @@ Not specifying a mapping for a given entry will leave it with its original name. ## Values All values specified for this modules will be accessible by tyhe wrapped module. + +## Extra notes + +This module might be removed in the future in favor of a native renaming support. diff --git a/docs/f4pga/modules/mkdirs.md b/docs/f4pga/modules/mkdirs.md index 5e778b808..bcdafbae2 100644 --- a/docs/f4pga/modules/mkdirs.md +++ b/docs/f4pga/modules/mkdirs.md @@ -1,4 +1,4 @@ -# io_rename +# mkdirs This modules creates directiories specified by the author of flow definition as its targets.. diff --git a/docs/f4pga/modules/pack.md b/docs/f4pga/modules/pack.md new file mode 100644 index 000000000..c3b05bd35 --- /dev/null +++ b/docs/f4pga/modules/pack.md @@ -0,0 +1,7 @@ +# pack + +:::{warning} +this page is under construction +::: + +Pack circuit with VPR. diff --git a/docs/f4pga/modules/place.md b/docs/f4pga/modules/place.md new file mode 100644 index 000000000..6a7d2fe7a --- /dev/null +++ b/docs/f4pga/modules/place.md @@ -0,0 +1,7 @@ +# place + +:::{warning} +this page is under construction +::: + +Place cells with VPR. diff --git a/docs/f4pga/modules/place_constraints.md b/docs/f4pga/modules/place_constraints.md new file mode 100644 index 000000000..2b35fdc36 --- /dev/null +++ b/docs/f4pga/modules/place_constraints.md @@ -0,0 +1,11 @@ +# place_constraints + +:::{warning} +this page is under construction +::: + +Move cell placement to satisfy constraints imposed by an architecture. (VPR-only) + +:::{note} +This will be deprecated once VPR constraint system supports this functionality natively. +::: diff --git a/docs/f4pga/modules/route.md b/docs/f4pga/modules/route.md new file mode 100644 index 000000000..40f4a38cc --- /dev/null +++ b/docs/f4pga/modules/route.md @@ -0,0 +1,7 @@ +# route + +:::{warning} +this page is under construction +::: + +Route a design with VPR. diff --git a/docs/f4pga/modules/synth.md b/docs/f4pga/modules/synth.md index 5478816c0..3910b007a 100644 --- a/docs/f4pga/modules/synth.md +++ b/docs/f4pga/modules/synth.md @@ -20,17 +20,17 @@ will be generated upon a successful YOSYS run. The setup of the synth module follows the following specifications: -## Parameters: +## Parameters The `params` section of a stage configuration may contain a `produces` list. The list should specify additional targets that will be generated -(`?` qualifier is allowedd). +(`?` qualifier is allowed). -## Values: +## Values The `synth` module requires the following values: -* `tcl_scripts` (string, required ): A path to a directory containing `synth.tcl` +* `tcl_scripts` (string, required): A path to a directory containing `synth.tcl` and `conv.tcl` scripts that wiull be used by YOSYS. * `read_verilog_args` (list[string | number], optional) - If specified, the verilog sources will be read using the `read_verilog` procedure with options contained in diff --git a/f4pga/__init__.py b/f4pga/__init__.py index 225998009..da1e0fa95 100755 --- a/f4pga/__init__.py +++ b/f4pga/__init__.py @@ -458,8 +458,8 @@ def sfbuild_fail(): sfbuild_done_str = Style.BRIGHT + Fore.RED + 'FAILED' def sfbuild_done(): - sfprint(1, f'sfbuild: {sfbuild_done_str}' - f'{Style.RESET_ALL + Fore.RESET}') + sfprint(1, f'f4pga: {sfbuild_done_str}' + f'{Style.RESET_ALL + Fore.RESET}') exit(0) def setup_resolution_env(): diff --git a/f4pga/argparser.py b/f4pga/argparser.py index 831208bd6..eba722f9e 100644 --- a/f4pga/argparser.py +++ b/f4pga/argparser.py @@ -102,7 +102,7 @@ def _setup_build_parser(parser: ArgumentParser): metavar='', type=str, help='Specify stage inputs explicitely. This might be required if some files got renamed or deleted and ' - 'symbiflow is unable to deduce the flow that lead to dependencies required by the requested stage' + 'f4pga is unable to deduce the flow that lead to dependencies required by the requested stage' ) @@ -130,7 +130,7 @@ def setup_argparser(): """ Set up argument parser for the program. """ - parser = ArgumentParser(description='SymbiFlow Build System') + parser = ArgumentParser(description='F4PGA Build System') parser.add_argument( '-v', diff --git a/f4pga/module.py b/f4pga/module.py index 58d8a4cfa..0ff006b85 100644 --- a/f4pga/module.py +++ b/f4pga/module.py @@ -56,8 +56,8 @@ class ModuleContext: execution. """ - share: str # Absolute path to Symbiflow's share directory - bin: str # Absolute path to Symbiflow's bin directory + share: str # Absolute path to F4PGA's share directory + bin: str # Absolute path to F4PGA's bin directory takes: SimpleNamespace # Maps symbolic dependency names to relative paths. produces: SimpleNamespace # Contains mappings for explicitely specified dependencies. # Useful mostly for checking for on-demand optional outputs (such as logs) with From ea14047a2916b1247644ba71700c444030ee9281 Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Fri, 6 May 2022 12:41:45 -0500 Subject: [PATCH 30/33] f4pga: remove unused arguments Signed-off-by: Krzysztof Boronski --- f4pga/argparser.py | 29 +---------------------------- 1 file changed, 1 insertion(+), 28 deletions(-) diff --git a/f4pga/argparser.py b/f4pga/argparser.py index eba722f9e..55c132a01 100644 --- a/f4pga/argparser.py +++ b/f4pga/argparser.py @@ -58,13 +58,6 @@ def _setup_build_parser(parser: ArgumentParser): help='Display info about stage' ) - parser.add_argument( - '-r', - '--requirements', - action='store_true', - help='Display info about project\'s requirements.' - ) - parser.add_argument( '-p', '--part', @@ -86,33 +79,13 @@ def _setup_build_parser(parser: ArgumentParser): default=[] ) - # Currently unsupported - parser.add_argument( - '-M', - '--moduleinfo', - nargs=1, - metavar='module_name_or_path', - help='Display info about module. Requires `-p` option in case of module name' - ) - - parser.add_argument( - '-T', - '--take_explicit_paths', - nargs='+', - metavar='', - type=str, - help='Specify stage inputs explicitely. This might be required if some files got renamed or deleted and ' - 'f4pga is unable to deduce the flow that lead to dependencies required by the requested stage' - ) - - def _setup_show_dep_parser(parser: ArgumentParser): parser.add_argument( '-p', '--platform', metavar='platform_name', type=str, - help='Name of the platform (use to display platform-specific values.' + help='Name of the platform (use to display platform-specific values.)' ) parser.add_argument( From 100367b13460052e268ba6bab9e59f40fe0b280b Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Fri, 6 May 2022 12:42:36 -0500 Subject: [PATCH 31/33] f4pga: Update usage commands and fix small errors in docs Signed-off-by: Krzysztof Boronski --- docs/f4pga/Usage.md | 93 +++++++++++++++++++++++++++++++------ docs/f4pga/modules/index.md | 2 + 2 files changed, 82 insertions(+), 13 deletions(-) diff --git a/docs/f4pga/Usage.md b/docs/f4pga/Usage.md index e666f9eb8..08a3f84e8 100644 --- a/docs/f4pga/Usage.md +++ b/docs/f4pga/Usage.md @@ -11,10 +11,9 @@ To get started with a project that already uses `f4pga`, go to the project's dir generate a bitstream: ```bash -$ f4pga flow.json -p platform_name -t bitstream +$ f4pga build -f flow.json ``` -Substitute `platform_name` by the name of the target platform (eg. `x7a50t`). `flow.json` should be a *project flow configuration* file included with the project. If you are unsure if you got the right file, you can check an example of the contents of such file shown in the *Build a target* section below. @@ -123,10 +122,12 @@ Typically *projects flow configuration* will be used to resolve dependencies for ## Build a target +### Using flow configuration file + To build a *target* `target_name`, use the following command: ```bash -$ f4pga flow.json -p platform_device_name -t target_name +$ f4pga build -f flow.json -p platform_device_name -t target_name ``` where `flow.json` is a path to *projects flow configuration*. @@ -134,14 +135,18 @@ For example, let's consider the following *projects flow configuration (flow.jso ```json { + "default_platform": "xc7a50t", "dependencies": { "sources": ["counter.v"], "xdc": ["arty.xdc"], "synth_log": "synth.log", "pack_log": "pack.log", + }, + "values": { "top": "top" }, "xc7a50t": { + "default_target": "bitstream", "dependencies": { "build_dir": "build/arty_35" } @@ -166,9 +171,43 @@ With this flow configuration, you can build a bitstream for arty_35 using the following command: ``` -$ f4pga flow.json -p x7a50t -t bitstream +$ f4pga build -f flow.json -p XC7A35TCSG324-1 -t bitstream +``` + +Because we have `default_platform` defined, we can skip the `--platform` or `--part` argument. +We can also skip the `--target` argument because we have a `default_target` defined for the +chosen platform. This will default to the `bitstream` target of `xc7a50t` platform: + +``` +$ f4pga build -f flow.json ``` +### Using Command-Line Interface + +Alternatively you can use CLI to pass the configuration without creating a flow file: + +``` +$ f4pga build -p XC7A35TCSG324-1 -Dsources=[counter.v] -Dxdc=[arty.xdc] -Dsynth_log=synth.log -Dpack_log=pack.log -Dbuild_dir=buils/arty_35 -Vtop=top -t bitstream +``` + +CLI flow configuration can be used alongside a flow configuration file and will override +conflicting dependencies/values from the file. + +CLI configuration follows the following format: + +`=` + +`` is the name of dependency or value optionally prefixed by a stage +name and a dot (`.`). Using the notation with stage name sets the dependency/value only for the +specified stage. + +`` is a form of defining a dependency path or a value. Characters are interpreted +as strings unless the follow one of the following format: +* `[item1,item2,item3,...]` - this is a list of strings +* `{key1:value1,key2:value2,key3:value3,...}` - this is a dictionary + +Nesting structures is curently unsupported in CLI. + ### Pretend mode You can also add a `--pretend` (`-P`) option if you just want to see the results of dependency resolution for a @@ -189,7 +228,7 @@ _on-demand_ is currently displayed. Example: ```bash -$ f4pga flow.json -p x7a50t -i +$ f4pga -v build flow.json --platform x7a50t -i ``` ``` @@ -213,20 +252,48 @@ Platform dependencies/targets: This is only a snippet of the entire output. ::: -### Summary of all available options +### Summary of global options + +| long | short | arguments | description | +|-----------|:-----:|--------------------------|----------------------------------------------------------------------------| +| --verobse | -v | - | Constrol verbosity level. 0 for no verbose output. 2 for maximum verbisity | +| --silent | -s | - | Surpress any output | + +### Summary of all available subcommands + +| name | description | +|---------|-----------------------------| +| build | Build a project | +| showd | Print value of a dependency + +### Summary of all options available for `build` subcommand + +| long | short | arguments | description | +|-------------|:-----:|--------------------------|---------------------------------------------------------| +| --flow | -f | flow configuration file | Use flow configuration file | +| --platform | | platform name | Specify target platform name (eg. x7a100t) | +| --part | -p | part name | Speify target platform by part name | +| --target | -t | target dependency name | Specify target to produce | +| --info | -i | - | Display information about available targets | +| --pretend | -P | - | Resolve dependencies without executing the flow | +| --nocache | | - | Do not perform incremental build (do full a full build) | +| --stageinfo | -S | stage name | Display information about a specified stage | +| --dep | -D | dependency_name=pathexpr | Add a dependency to configuration | +| --val | -V | value_name=valueexpr | Add a value to configuration | + +### Summary of all options available for `showd` subcommand -| long | short | arguments | description | -|------------|:-----:|------------------------|-------------------------------------------------| -| --platform | -p | device name | Specify target device name (eg. x7a100t) | -| --target | -t | target dependency name | Specify target to produce | -| --info | -i | - | Display information about available targets | -| --pretend | -P | - | Resolve dependencies without executing the flow | +| long | short | arguments | description | +|-------------|:-----:|--------------------------|--------------------------------------------------------------------------| +| --flow | -f | flow configuration file | Use flow configuration file | +| --platform | -p | platform name | Specify target platform name (to display platform-specific dependencies) | +| --stage | -s | part name | Specify stage name (to display stage-specific dependencies) | ### Dependency resolution display F4PGA displays some information about dependencies when requesting a target. -Here's an example of a possible output when trying to build `bitstream` target: +Here's an example of a possible output when trying to build `bitstream` target (use `-P`): ``` F4PGA Build System diff --git a/docs/f4pga/modules/index.md b/docs/f4pga/modules/index.md index 089d494da..262a76ff6 100644 --- a/docs/f4pga/modules/index.md +++ b/docs/f4pga/modules/index.md @@ -96,6 +96,8 @@ The afromentioned _*exceptions_ are: * `default_platform` - default platform to chose in case it doesn't get specified by the user +Those apply only to flow configuration file. + ### Internal environmental variables It's very useful to be able to refer to some data within From 3f5202d2ba2843270813877163fa9f0da7abf6bd Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Wed, 11 May 2022 07:50:07 -0500 Subject: [PATCH 32/33] f4pga docs: Updated platform support list Signed-off-by: Krzysztof Boronski --- docs/f4pga/Usage.md | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/f4pga/Usage.md b/docs/f4pga/Usage.md index 08a3f84e8..cb877f88a 100644 --- a/docs/f4pga/Usage.md +++ b/docs/f4pga/Usage.md @@ -35,11 +35,13 @@ If you want to create a new project, it's highly recommended that you read this It works by wrapping the necessary tools in Python, which are called *f4pga modules*. Modules are then referenced in *platform flow definition* files, together with configuration specific for a given platform. -Flow definition files for the following platforms are included as a part of `f4pga`: +Flow definition files for the following platforms are included as a part of _f4pga_: -* x7a50t -* x7a100t -* x7a200t (_soon_) +* **AMD Xilinx x7a50t** (and architecturally equivalent devices, such as x7a35t) +* **AMD Xilinx x7a100t** +* **AMD Xilinx x7a200t** +* **Quicklogic EOS-S3** (currently unsupported, provided only for development purposes) +* **Quicklogic K4N8** (currently unsupported, provided only for development purposes) You can also write your own *platform flow definition* file if you want to bring support for a different device. From 080a59884a441f39c8be83dd67e5dae496661296 Mon Sep 17 00:00:00 2001 From: Krzysztof Boronski Date: Wed, 11 May 2022 08:14:50 -0500 Subject: [PATCH 33/33] Spelling corrections Signed-off-by: Krzysztof Boronski --- docs/f4pga/DevNotes.md | 20 ++--- docs/f4pga/Usage.md | 88 ++++++++++---------- docs/f4pga/modules/fasm.md | 4 +- docs/f4pga/modules/generic_script_wrapper.md | 4 +- docs/f4pga/modules/index.md | 36 ++++---- docs/f4pga/modules/io_rename.md | 4 +- docs/f4pga/modules/mkdirs.md | 4 +- docs/f4pga/modules/synth.md | 10 +-- 8 files changed, 85 insertions(+), 85 deletions(-) diff --git a/docs/f4pga/DevNotes.md b/docs/f4pga/DevNotes.md index fabce0827..de2ea74a0 100644 --- a/docs/f4pga/DevNotes.md +++ b/docs/f4pga/DevNotes.md @@ -11,17 +11,17 @@ For more detailed, up-to-date information about the code, refer to the pydoc doc ## Project's structure -* `__init__.py` contains the logic and entrypoint of the build system +* `__init__.py` contains the logic and entry point of the build system * `argparser.py` contains boring code for CLI interface * `cache.py` contains code needed for tracking modifications in the project. * `common.py` contains code shared by the main utility and the modules * `flow_config.py` contains code for reading and accessing flow definitions and configurations -* `module_inspector.py` contains utilities for inpecting I/O of modules -* `module_runner.py` contains code required to load modules at runtime +* `module_inspector.py` contains utilities for inspecting I/O of modules +* `module_runner.py` contains code required to load modules at run-time * `module.py` contains definitions required for writing and using f4pga modules * `part_db.json` contains mappings from part names to platform names * `setup.py` contains a package installation script -* `stage.py` contains classes relevant to stage represetation +* `stage.py` contains classes relevant to stage representation * `modules` contains loadable modules * `platforms` contains platform flow definitions @@ -51,7 +51,7 @@ _DFS_ approach to invoke modules and check their inputs and outputs. ### Modification tracking Modification tracking is done by taking, comparing and keeping track of `adler32` -hashes of all dependencies. Each dependency has a set of hashes associted with it. +hashes of all dependencies. Each dependency has a set of hashes associated with it. The reason for having multiple hashes is that a dependency may have multiple "_consumers_", ie. _stages_ which take it as input. Each hash is associated with particular consumer. This is necessary, because the system tries to avoid rebuilds @@ -67,9 +67,9 @@ _f4pga_ exposes some data to the user as well as reads some using internal environmental variables. These can be referenced by users in _platform flow definitions_ and _project flow configurations_ using the `${variable_name}` syntax when defining values. They can also be read inside -_f4pga modules_ by accesing the `ctx.values` namespace. +_f4pga modules_ by accessing the `ctx.values` namespace. -The core of tis system is the `ResolutionEnvironemt` class which can be found +The core of its system is the `ResolutionEnvironemt` class which can be found inside the `common` module. ### Installation @@ -84,18 +84,18 @@ Check `CMakeLists.txt`. * Force "_on-demand_" outputs if they are required by another stage. This may require redesigning the "on-demand" feature, which currently works - by producing a dependency if and only if the user explicitely provides the + by producing a dependency if and only if the user explicitly provides the path. Otherwise the path is unknown. * Make commenting style consistent -* Document writing flow defintions +* Document writing flow definitions * Extend the metadata system for modules, perhaps make it easier to use. * Add missing metadata for module targets. -* (_suggestion_) Generate platform defintions using CMake. +* (_suggestion_) Generate platform definitions using CMake. ### Out of the current scope diff --git a/docs/f4pga/Usage.md b/docs/f4pga/Usage.md index cb877f88a..163145fb7 100644 --- a/docs/f4pga/Usage.md +++ b/docs/f4pga/Usage.md @@ -31,7 +31,7 @@ If you want to create a new project, it's highly recommended that you read this ### f4pga -`f4pga` is a modular build system designed to handle various _Verilog-to-bitsream_ flows for FPGAs. +`f4pga` is a modular build system designed to handle various _Verilog-to-bitstream_ flows for FPGAs. It works by wrapping the necessary tools in Python, which are called *f4pga modules*. Modules are then referenced in *platform flow definition* files, together with configuration specific for a given platform. @@ -55,7 +55,7 @@ _modules_ and f4pga _modules_) is a Python script that wraps a tool used within The main purpose of the wrappers is to provide a unified interface for `f4pga` to use and to configure the tool, as well as provide information about files required and produced by the tool. -### Dependecies +### Dependencies A *dependency* is any file, directory or a list of such that a *module* takes as its input or produces on its output. @@ -83,10 +83,10 @@ a stage, there's a `--nocache` option that treats the `.symbicache` file as if i ### Resolution -A *dependency* is said to be *resolved* if it meets one of the following critereia: +A *dependency* is said to be *resolved* if it meets one of the following criteria: * it exists on persistent storage and its hash matches the one stored in .symbicache -* there exists such *flow* that all of the dependieces of its modules are *resolved* and it produces the *dependency* in +* there exists such *flow* that all of the dependencies of its modules are *resolved* and it produces the *dependency* in question. ### Platform's flow definition @@ -99,12 +99,12 @@ the modules can reference. In case of some modules it may also define a set of parameters used during their construction. `mkdirs` module uses that to allow production of of multiple directories as separate dependencies. This however is an experimental feature which possibly will be removed in favor of having multiple instances of the same -module with renameable ouputs. +module with renameable outputs. Not all *dependencies** have to be *resolved* at this stage, a *platform's flow definition* for example won't be able to provide a list of source files needed in a *flow*. -### Projects's flow configuration +### Project's flow configuration Similarly to *platform flow definition*, *Projects flow configuration* is a _JSON_ that is used to configure *modules*. There are however a couple differences here and there. @@ -157,10 +157,10 @@ For example, let's consider the following *projects flow configuration (flow.jso ``` It specifies list of paths to Verilog source files as `sources` dependency. -Similarily it also provides an `XDC` file with constrains (`xdc` dependency). +Similarly it also provides an `XDC` file with constrains (`xdc` dependency). It also names a path for synthesis and logs (`synth_log`, `pack_log`). -These two are optional on-demand outputs, meaning they won't be produces unless their paths are explicitely set. +These two are optional on-demand outputs, meaning they won't be produces unless their paths are explicitly set. `top` value is set to in order to specify the name of top Verilog module, which is required during synthesis. @@ -208,20 +208,20 @@ as strings unless the follow one of the following format: * `[item1,item2,item3,...]` - this is a list of strings * `{key1:value1,key2:value2,key3:value3,...}` - this is a dictionary -Nesting structures is curently unsupported in CLI. +Nesting structures is currently unsupported in CLI. ### Pretend mode You can also add a `--pretend` (`-P`) option if you just want to see the results of dependency resolution for a specified target without building it. -This is useful when you just want to know what files will be generated and where wilh they be stored. +This is useful when you just want to know what files will be generated and where will they be stored. ### Info mode Modules have the ability to include description to the dependencies they produce. -Running `f4pga` with `--info` (`-i`) flag allows youn to see descriptions of these dependencies. -This option doesn't require a target to be specified, but you still have to provuide a flow configuration and platform +Running `f4pga` with `--info` (`-i`) flag allows you to see descriptions of these dependencies. +This option doesn't require a target to be specified, but you still have to provide a flow configuration and platform name. This is still an experimental option, most targets currently lack descriptions and no information whether the output is @@ -258,23 +258,23 @@ This is only a snippet of the entire output. | long | short | arguments | description | |-----------|:-----:|--------------------------|----------------------------------------------------------------------------| -| --verobse | -v | - | Constrol verbosity level. 0 for no verbose output. 2 for maximum verbisity | -| --silent | -s | - | Surpress any output | +| --verbose | -v | - | Control verbosity level. 0 for no verbose output. 2 for maximum verbosity | +| --silent | -s | - | Suppress any output | -### Summary of all available subcommands +### Summary of all available sub-commands | name | description | |---------|-----------------------------| | build | Build a project | | showd | Print value of a dependency -### Summary of all options available for `build` subcommand +### Summary of all options available for `build` sub-command | long | short | arguments | description | |-------------|:-----:|--------------------------|---------------------------------------------------------| | --flow | -f | flow configuration file | Use flow configuration file | | --platform | | platform name | Specify target platform name (eg. x7a100t) | -| --part | -p | part name | Speify target platform by part name | +| --part | -p | part name | Specify target platform by part name | | --target | -t | target dependency name | Specify target to produce | | --info | -i | - | Display information about available targets | | --pretend | -P | - | Resolve dependencies without executing the flow | @@ -283,7 +283,7 @@ This is only a snippet of the entire output. | --dep | -D | dependency_name=pathexpr | Add a dependency to configuration | | --val | -V | value_name=valueexpr | Add a value to configuration | -### Summary of all options available for `showd` subcommand +### Summary of all options available for `showd` sub-command | long | short | arguments | description | |-------------|:-----:|--------------------------|--------------------------------------------------------------------------| @@ -320,7 +320,7 @@ Project status: f4pga: DONE ``` -The letters in the boxes describe the status of a dependency which's name is next to the box. +The letters in the boxes describe the status of a dependency whose name is next to the box. * **X** - dependency unresolved. Dependency is not present or cannot be produced. @@ -339,11 +339,11 @@ The letters in the boxes describe the status of a dependency which's name is nex This should be fixed in the future. ::: - * **S** - depenendency not present, resolved. + * **S** - dependency not present, resolved. This dependency is not currently available on the persistent storage, however it will be produced within flow's execution. - * **R** - depenendency present, resolved, requires rebuild. + * **R** - dependency present, resolved, requires rebuild. This dependency is currently available on the persistent storage, however it has to be rebuilt due to the changes in the project. @@ -356,11 +356,11 @@ Additional info about a dependency will be displayed next to its name after a co * In case of dependencies which do not require execution of any modules, only a path or list of paths to file(s)/directory(ies) that will be displayed. -* In case of unresolved dependencies (**X**), which are never produced by any module, a text sying "`MISSING`" will be +* In case of unresolved dependencies (**X**), which are never produced by any module, a text saying "`MISSING`" will be displayed. In the example above file `counter.v` has been modified and is now marked as "**N**". -This causes a bunch of other dependencies to be reqbuilt ("**R**"). +This causes a bunch of other dependencies to be rebuilt ("**R**"). `build_dir` and `xdc` were already present, so they are marked as "**O**". ## Common targets and values @@ -379,31 +379,31 @@ Below are lists of the target and value names along with their meanings. ### Available in most flows -| Target name | list | Description | -|-------------|:----:|-------------| -| `eblif` | no | Extended blif file | -| `bitstream` | no | Bitstream | -| `net` | no | Netlist | -| `fasm` | no | Final FPGA Assembly | -| `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis | -| `build_dir` | no | A directory to put the output files in | +| Target name | list | Description | +|--------------|:----:|-----------------------------------------------------------------| +| `eblif` | no | Extended blif file | +| `bitstream` | no | Bitstream | +| `net` | no | Netlist | +| `fasm` | no | Final FPGA Assembly | +| `fasm_extra` | no | Additional FPGA assembly that may be generated during synthesis | +| `build_dir` | no | A directory to put the output files in | ### Built-in values -| Value name | type | Description | -|------------|------|-------------| -| `shareDir` | `string` | Path to f4pga's installation "share" directory | -| `python3` | `string` | Path to Python 3 executable | +| Value name | type | Description | +|-----------------|----------|---------------------------------------------------| +| `shareDir` | `string` | Path to f4pga's installation "share" directory | +| `python3` | `string` | Path to Python 3 executable | | `noisyWarnings` | `string` | Path to noisy warnings log (should be deprecated) | -| `prjxray_db` | `string` | Path to Project X-Ray database | +| `prjxray_db` | `string` | Path to Project X-Ray database | ### Used in flow definitions -| Value name | type | Description | -|------------|------|-------------| -| `top` | `string` | Top module name | -| `build_dir` | `string` | Path to build directory (should be optional) | -| `device` | `string` | Name of the device | -| `vpr_options` | `dict[string -> string \| number]` | Named ptions passed to VPR. No `--` prefix included. | -| `part_name` | `string` | Name of the chip used. The distinction between `device` and `part_name` is ambiguous at the moment and should be addressed in the future. | -| `arch_def` | `string` | Path to an XML file containing architecture definition. | +| Value name | type | Description | +|---------------|------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------| +| `top` | `string` | Top module name | +| `build_dir` | `string` | Path to build directory (should be optional) | +| `device` | `string` | Name of the device | +| `vpr_options` | `dict[string -> string \| number]` | Named options passed to VPR. No `--` prefix included. | +| `part_name` | `string` | Name of the chip used. The distinction between `device` and `part_name` is ambiguous at the moment and should be addressed in the future. | +| `arch_def` | `string` | Path to an XML file containing architecture definition. | diff --git a/docs/f4pga/modules/fasm.md b/docs/f4pga/modules/fasm.md index 94cdcd316..2395c6324 100644 --- a/docs/f4pga/modules/fasm.md +++ b/docs/f4pga/modules/fasm.md @@ -1,6 +1,6 @@ # fasm -The _fasm_ module generates FPGA assebly using `genfasm` (VPR-only). +The _fasm_ module generates FPGA assembly using `genfasm` (VPR-only). The module should guarantee the following outputs: * `fasm` @@ -14,5 +14,5 @@ The setup of the synth module follows the following specifications: The `fasm` module accepts the following values: -* `pnr_corner` (string, optional): PnR conrenr to use. Relevant only for Quicklogic's +* `pnr_corner` (string, optional): PnR corner to use. Relevant only for Quicklogic's eFPGAs. \ No newline at end of file diff --git a/docs/f4pga/modules/generic_script_wrapper.md b/docs/f4pga/modules/generic_script_wrapper.md index 207a2df6c..77aa0f28c 100644 --- a/docs/f4pga/modules/generic_script_wrapper.md +++ b/docs/f4pga/modules/generic_script_wrapper.md @@ -19,8 +19,8 @@ Parameters are everything when it comes to this module: * `file` (string, required if `mode` is "file"): Name of the file generated by the script. * `target` (string, required): Default name of the file of the generated - dependency. You can use all values available durng map_io stage. Each input - dependency alsogets two extra values associated with it: + dependency. You can use all values available during map_io stage. Each input + dependency also gets two extra values associated with it: `:dependency_name[noext]`, which contains the path to the dependency the extension with anything after last "." removed and `:dependency_name[dir]` which contains directory paths of the dependency. This is useful for deriving an output diff --git a/docs/f4pga/modules/index.md b/docs/f4pga/modules/index.md index 262a76ff6..7083e9af0 100644 --- a/docs/f4pga/modules/index.md +++ b/docs/f4pga/modules/index.md @@ -8,7 +8,7 @@ modules. ### Configuration interface: -Modules are configured through an internal API by _**sfbuf4pgaild**_. +Modules are configured through an internal API by _**f4pga**_. The basic requirement for a module script is to expose a class with `Module` interface. @@ -17,7 +17,7 @@ _**f4pga**_ reads its configuration from two different sources: and **project's flow configuration**, which is a set of configuration options provided by the user through a JSON file or CLI interface. -Thosse sources contain snippets of _module configurations_. +Those sources contain snippets of _module configurations_. A _module configuration_ is a structure with the following fields: @@ -37,12 +37,12 @@ A _module configuration_ is a structure with the following fields: In case of **platform's flow definition**, a `values` dictionary can be defined globally and the values defined there will be passed to every module's config. -Those values can be overriden per-module through `module_options` dictionary. +Those values can be overridden per-module through `module_options` dictionary. Parameters used during module's construction can also be defined in `module_options` as `params` (those are not a part of _module configuration_, instead they are used during the actual construction of a module instance, before it declares any of its -input/outputs etc.. This is typically used to acieve some parametrization over module's +input/outputs etc.. This is typically used to achieve some parametrization over module's I/O). Defining dictionaries for `takes` and `produces` is currently disallowed within @@ -65,12 +65,12 @@ Unlike **platform's flow definition**, **project's flow configuration** may cont names to actual paths. Most dependencies can have their paths resolved implicitly without the need to provide explicit paths, which is a mechanism that is described in a later section of this document. However some dependencies must be provided -explicitelly, eg. paths to project's verilog source files. It should be noted that +explicitly, eg. paths to project's Verilog source files. It should be noted that depending on the flow definition and the dependency in question, the path does not necessarily have to point to an already existing file. If the dependency is a product of a module within the flow, the path assigned to it will be used by the module to build that dependency. This is also used to in case of _on-demand_ -dependencies, which won't be produced unless the user explicitelly provides a path +dependencies, which won't be produced unless the user explicitly provides a path for them. **project's flow configuration** cannot specify `params` for modules and does not @@ -85,11 +85,11 @@ overload the `dependecies` and `values` defined in a global scope of is treated as a _stage-specific-configuration_. The key is a name of a stage within a flow for the specified platform and the values are dicts which may contain `dependencies` and `values` fields that overload `dependencies` and `values` -repespectively, locally for the stage. Additionally a `default_target` field can be -provided to specify a default target to built when the user does not specify it throgh -a CLI inteface. +respectively, locally for the stage. Additionally a `default_target` field can be +provided to specify a default target to built when the user does not specify it through +a CLI interface. -The afromentioned _*exceptions_ are: +The aforementioned _*exceptions_ are: * `dependencies` - dependencies shared by all platforms. * `values` - values shared by all platforms @@ -145,7 +145,7 @@ With the following values defined ``` Be careful when using this kind of resolution, as it's computational and memory -complexity grows exponentially in ragards to the number of list variables being +complexity grows exponentially in regards to the number of list variables being referenced, which is a rather obvious fact, but it's still worth mentioning. The variables that can be referenced within a definition/configuration fall into 3 @@ -171,7 +171,7 @@ categories: ### `Module` class -Each nmodule is represented as a class derived from `Module` class. +Each module is represented as a class derived from `Module` class. The class should implement the following methods: @@ -187,7 +187,7 @@ when instantiating a module. ### Module's execution modes -A module ahas essentially two execution modes: +A module has essentially two execution modes: * _mapping_ mode * _exec_ mode @@ -196,7 +196,7 @@ A module ahas essentially two execution modes: In _mapping_ mode the module is provided with an incomplete configuration which includes: - * `takes` namespace: this maps names of input dependecies to the paths of these + * `takes` namespace: this maps names of input dependencies to the paths of these dependencies * `values` namespace: this maps names of variables to the values of those variables. @@ -218,7 +218,7 @@ In _exec_ mode the module does the actual work. The configuration passed into this mode is full and it includes: -* `takes` namespace: this maps names of input dependecies to the paths of these +* `takes` namespace: this maps names of input dependencies to the paths of these dependencies * `values` namespace: this maps names of variables to the values of those variables. @@ -235,7 +235,7 @@ described in `outputs` should be present. In the the `__init__` method of module's class, the following fields should be set: -* `takes` - a list of symbolic dependency names for dependencies used byb the module +* `takes` - a list of symbolic dependency names for dependencies used by the module * `produces` - a list of symbolic dependencies names for dependencies produced by the module. * `values` - a list of names given to the variables used withing the module @@ -251,9 +251,9 @@ ways: * '`?`' _suffix_ * In `takes` - the dependency is not necessary for the module to execute - * In `produces` - the dependency may be produceed, but it is not guaranteed. + * In `produces` - the dependency may be produced, but it is not guaranteed. * In `values` the value is not required for the module to execute. - Refferreing to it through `ModuleContext.values.value_name` won't raise an + Referring to it through `ModuleContext.values.value_name` won't raise an exception if the value is not present, instead `None` will be returned. * '`!`' _suffix_ * In `produces` - the dependency is going to be produced only if the user diff --git a/docs/f4pga/modules/io_rename.md b/docs/f4pga/modules/io_rename.md index e84730a1e..ee59125c8 100644 --- a/docs/f4pga/modules/io_rename.md +++ b/docs/f4pga/modules/io_rename.md @@ -1,7 +1,7 @@ # io_rename This module provides a way to rename (ie. change) dependencies and values of an -instance of a different module. It wraps another, module whoose name is specified in `params.module` and changes the names of the dependencies and values it relies on. +instance of a different module. It wraps another, module whose name is specified in `params.module` and changes the names of the dependencies and values it relies on. ## Parameters @@ -18,7 +18,7 @@ Not specifying a mapping for a given entry will leave it with its original name. ## Values -All values specified for this modules will be accessible by tyhe wrapped module. +All values specified for this modules will be accessible by the wrapped module. ## Extra notes diff --git a/docs/f4pga/modules/mkdirs.md b/docs/f4pga/modules/mkdirs.md index bcdafbae2..01e491e2e 100644 --- a/docs/f4pga/modules/mkdirs.md +++ b/docs/f4pga/modules/mkdirs.md @@ -1,9 +1,9 @@ # mkdirs -This modules creates directiories specified by the author of flow definition +This modules creates directories specified by the author of flow definition as its targets.. ## Parameters -Each key serves as aname of a directory to becreated, while the value is the +Each key serves as a name of a directory to be created, while the value is the path for that directory. \ No newline at end of file diff --git a/docs/f4pga/modules/synth.md b/docs/f4pga/modules/synth.md index 3910b007a..5b200ad88 100644 --- a/docs/f4pga/modules/synth.md +++ b/docs/f4pga/modules/synth.md @@ -12,9 +12,9 @@ The module should guarantee the following outputs: For detailed information about these targets, please refer to `docs/common targets and variables.md` -What files and how are they generated is dependendent on TCL scripts executed +What files and how are they generated is dependent on TCL scripts executed withing YOSYS and the script vary depending on the target platform. Due to this -design choice it is required for the author of the flow defnition to parametrize +design choice it is required for the author of the flow definition to parameterize the `synth` module in a way that will **GUARANTEE** the targets mentioned above will be generated upon a successful YOSYS run. @@ -31,11 +31,11 @@ The list should specify additional targets that will be generated The `synth` module requires the following values: * `tcl_scripts` (string, required): A path to a directory containing `synth.tcl` - and `conv.tcl` scripts that wiull be used by YOSYS. -* `read_verilog_args` (list[string | number], optional) - If specified, the verilog + and `conv.tcl` scripts that will be used by YOSYS. +* `read_verilog_args` (list[string | number], optional) - If specified, the Verilog sources will be read using the `read_verilog` procedure with options contained in this value. * `yosys_tcl_env` (dict[string -> string | list[string], required) - A mapping that defines environmental variables that will be used within the TCL scripts. This - should contain the references to module's inputs and outputs in order to gurantee + should contain the references to module's inputs and outputs in order to guarantee the generation of the desired targets.