URL
https://opencores.org/ocsvn/radiohdl/radiohdl/trunk
Subversion Repositories radiohdl
Compare Revisions
- This comparison shows the changes necessary to convert path
/
- from Rev 1 to Rev 2
- ↔ Reverse comparison
Rev 1 → Rev 2
/radiohdl/trunk/base/test/cdf_dir/correct_files/dangling_test.txt
0,0 → 1,6
# Testfile for testing sections the CommonDictFile class |
# |
# First some keys with different spacing (spaces and tabs) |
this is a lost value which causes an exception |
|
global_key_1 = global_1 |
/radiohdl/trunk/base/test/cdf_dir/correct_files/key_value_test.txt
0,0 → 1,43
# Testfile for testing the CommonDictFile class |
# |
# First some keys with different spacing (spaces and tabs) |
|
space_key_0= |
space_key_1=value_1 |
space_key_2 =value_2 |
space_key_3= value_3 |
space_key_4 = value_4 |
space_key_5 = value_5 |
space_key_6 = value_6 |
space_key_7 = value_7 |
|
# Test keys with multiple values |
multi_key_1 = value10 value11 value12 |
multi_key_2 = value20, value21, value22 |
multi_key_3 = value30 |
value31 |
value32 |
value33 |
multi_key_4 = value40 = value41 = |
value42 |
|
# Difference between empty keys and multiline keys |
tricky_key_1 = |
tricky_key_2 = |
tricky_value_2 |
tricky_key_3 = |
|
["my_section"] |
section_key_1 = |
section_value_10 |
|
section_value_11 |
= # equal sign without a key is added as value |
value which is also part of section_key_1 |
|
warning_key_1 = Be aware that multiline values can be tricky: |
|
this also belongs |
to previous |
key 'warning_key_1' |
but_this = a new key-value pair |
/radiohdl/trunk/base/test/cdf_dir/correct_files/section_test.txt
0,0 → 1,18
# Testfile for testing sections the CommonDictFile class |
# |
# First some keys with different spacing (spaces and tabs) |
|
global_key_1 = global_1 |
global_key_2 = [ "aap", "noot", "mies" ] # this is seen as a section header instead of a value!!! |
|
just_some_key_1 = global value1 which is lost without a warning |
just_some_key_2 = global value2 which is lost without a warning |
|
[section_1] |
just_some_key_1 = section1 value1 which is lost without a warning |
just_some_key_2 = section1 value2 which is lost without a warning |
|
[section_2] |
just_some_key_1 = section2 value1 |
just_some_key_2 = section2 value2 |
|
/radiohdl/trunk/base/test/cdf_dir/empty_file/comment_only_dict.txt
0,0 → 1,4
# This file only |
# contains comments |
# and no |
# key = value lines |
/radiohdl/trunk/base/test/cdf_dir/empty_file/empty_dict.txt
--- radiohdl/trunk/base/test/cdf_dir/hdlbuildset_files/hdl_buildset_rsp.cfg (nonexistent)
+++ radiohdl/trunk/base/test/cdf_dir/hdlbuildset_files/hdl_buildset_rsp.cfg (revision 2)
@@ -0,0 +1,22 @@
+# TODO: Does RadioHDL still works for RSP??
+buildset_name = rsp
+technology_names = ip_virtex4
+family_names = virtex4
+block_design_names = sopc
+
+sim_tool_name = modelsim
+sim_tool_version = 6.6c
+synth_tool_name = ise
+synth_tool_version = 11.1
+ip_tool_name = coregen
+
+project_dir_depth_sim = 4
+project_dir_depth_synth = 0
+
+lib_root_dir = $RSP
+build_dir = $HDL_BUILD_DIR
+quartus_dir = /home/software/Altera/
+model_tech_altera_lib = /home/software/modelsim_altera_libs/
+model_tech_dir = /home/software/Mentor//modeltech
+vsim_dir = /linux_x86_64
+modelsim_search_libraries =
/radiohdl/trunk/base/test/cdf_dir/hdlbuildset_files/hdl_buildset_unb1.cfg
0,0 → 1,21
# Uniboardd 1 configuration |
buildset_name = unb1 |
technology_names = ip_stratixiv |
family_names = stratixiv |
block_design_names = sopc |
|
sim_tool_name = modelsim |
sim_tool_version = 6.6c |
synth_tool_name = quartus |
synth_tool_version = 11.1sp2 |
|
lib_root_dirs = $RADIOHDL/libraries $RADIOHDL/applications $RADIOHDL/boards |
build_dir = $HDL_BUILD_DIR |
quartus_dir = /home/software/Altera/<synth_tool_version> |
model_tech_altera_lib = /home/software/modelsim_altera_libs/<synth_tool_version> |
model_tech_dir = /home/software/Mentor/<sim_tool_version>/modeltech |
vsim_dir = <model_tech_dir>/linux_x86_64 |
modelsim_search_libraries = |
# stratixiv only |
altera_ver lpm_ver sgate_ver altera_mf_ver altera_lnsim_ver stratixiv_ver stratixiv_hssi_ver stratixiv_pcie_hip_ver |
altera lpm sgate altera_mf altera_lnsim stratixiv stratixiv_hssi stratixiv_pcie_hip |
/radiohdl/trunk/base/test/cdf_dir/hdlbuildset_files/hdl_buildset_wrong.cfg
0,0 → 1,21
# Uniboardd 1 configuration |
buildset_name = unb1 |
#technology_names = ip_stratixiv |
family_names = stratixiv |
block_design_names = sopc |
|
sim_tool_name = modelsim |
sim_tool_version = 6.6c |
synth_tool_name = quartus |
synth_tool_version = 11.1sp2 |
|
lib_root_dir = $RADIOHDL |
build_dir = $HDL_BUILD_DIR |
quartus_dir = /home/software/Altera/<synth_tool_version> |
model_tech_altera_lib = /home/software/modelsim_altera_libs/<synth_tool_version> |
model_tech_dir = /home/software/Mentor/<sim_tool_version>/modeltech |
vsim_dir = <model_tech_dir>/linux_x86_64 |
modelsim_search_libraries = |
# stratixiv only |
altera_ver lpm_ver sgate_ver altera_mf_ver altera_lnsim_ver stratixiv_ver stratixiv_hssi_ver stratixiv_pcie_hip_ver |
altera lpm sgate altera_mf altera_lnsim stratixiv stratixiv_hssi stratixiv_pcie_hip |
/radiohdl/trunk/base/test/cdf_dir/hdllib_files/hdllib_wrong.cfg
0,0 → 1,24
hdl_lib_name = technology |
hdl_library_clause_name = technology_lib |
#hdl_lib_uses_synth = |
hdl_lib_uses_sim = |
hdl_lib_technology = |
|
synth_files = |
technology_pkg.vhd |
$HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd |
test_bench_files = |
|
regression_test_vhdl = |
# no self checking tb available yet |
|
|
[modelsim_project_file] |
modelsim_copy_files = |
technology_select_pkg_<buildset_name>.vhd $HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd |
|
|
|
[quartus_project_file] |
quartus_copy_files = |
technology_select_pkg_<buildset_name>.vhd $HDL_BUILD_DIR/<buildset_name>/quartus/technology/technology_select_pkg.vhd |
/radiohdl/trunk/base/test/cdf_dir/hdllib_files/test_hdllib.cfg
0,0 → 1,24
hdl_lib_name = technology |
hdl_library_clause_name = technology_lib |
hdl_lib_uses_synth = |
hdl_lib_uses_sim = |
hdl_lib_technology = |
|
synth_files = |
technology_pkg.vhd |
$HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd |
test_bench_files = |
|
regression_test_vhdl = |
# no self checking tb available yet |
|
|
[modelsim_project_file] |
modelsim_copy_files = |
technology_select_pkg_<buildset_name>.vhd $HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd |
|
|
|
[quartus_project_file] |
quartus_copy_files = |
technology_select_pkg_<buildset_name>.vhd $HDL_BUILD_DIR/<buildset_name>/quartus/technology/technology_select_pkg.vhd |
/radiohdl/trunk/base/test/cdf_dir/hdltool_files/hdl_tool_quartus.cfg
0,0 → 1,27
# configuration file for defining the quartus installation on this system |
quartus_rootdir = ${QUARTUS_DIR}/quartus |
quartus_rootdir_override = ${QUARTUS_DIR}/quartus |
niosdir = ${QUARTUS_DIR}/nios2eds |
|
# extension to the PATH variable |
quartus_paths = |
<quartus_rootdir>/bin |
<niosdir>/bin |
<niosdir>/bin/gnu/H-i686-pc-linux-gnu/bin |
<niosdir>/bin/gnu/H-x86_64-pc-linux-gnu/bin |
<niosdir>/sdk2/bin |
|
[sopc] |
sopc_paths = |
<quartus_rootdir>/sopc_builder/bin |
sopc_environment_variables = |
sopc_kit_nios2 <niosdir> |
|
[qsys] |
qsys_paths = |
<quartus_rootdir>/../qsys/bin |
|
[user settings] |
user_environment_variables = |
altera_hw_tcl_keep_temp_files 1 |
|
/radiohdl/trunk/base/test/cdf_dir/referenced_files/reference_test.txt
0,0 → 1,34
# Testfile for testing the CommonDictFile class |
# |
non_ref_key_1 = some_value |
non_ref_key_2 = some_other_value |
|
early_ref_key1 = before <ref_key_2> is defined |
|
ref_key_1 = single_reference |
ref_key_2 = multiple words in the value |
|
simple_ref_1 = a value with <ref_key_1> |
double_ref_1 = a value with twice <ref_key_1><ref_key_1> |
triple_key_1 = its here <ref_key_1> and here <ref_key_1> and here <ref_key_1>!!! |
triple_key_2 = its here <ref_key_2> and here <ref_key_1>!!! |
|
ref_only_key_1 = <ref_key_1> |
|
wrong_ref_1 = what will double brackets <<ref_key_2>> do? # will not be replaced |
|
# this will give all nested_key_x the value 'some_value' |
nested_key_1 = some_value |
nested_key_2 = <nested_key_1> |
nested_key_3 = <nested_key_2> |
|
reverse_nested_key_1 = <reverse_nested_key_2> # becomes <reverse_nested_key_3> after substitution!!! |
reverse_nested_key_2 = <reverse_nested_key_3> |
reverse_nested_key_3 = some_value |
|
mutual_key_1 = <mutual_key_2> # becomes <<mutual_key_1> after substitution |
mutual_key_2 = <mutual_key_1> # remains the same |
|
loop_key_1 = <loop_key_1> # remains the same |
|
undefined_key_1 = reference to <non existing key> |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/a/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir/a |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/b/0/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir/b/0 |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/b/1/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir/b/1 |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/b/2/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir/b/2 |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/b/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir/b |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/c/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir/c |
/radiohdl/trunk/base/test/cdf_dir/tree/cfgfile/dict.txt
0,0 → 1,3
# Testfile for testing the CommonDictFile class |
# |
key_1 = top_dir |
/radiohdl/trunk/base/test/cdf_dir/tree/hdlbuildset/hdl_buildset_rsp.cfg
0,0 → 1,22
# TODO: Does RadioHDL still works for RSP?? |
buildset_name = rsp |
technology_names = ip_virtex4 |
family_names = virtex4 |
block_design_names = sopc |
|
sim_tool_name = modelsim |
sim_tool_version = 6.6c |
synth_tool_name = ise |
synth_tool_version = 11.1 |
ip_tool_name = coregen |
|
project_dir_depth_sim = 4 |
project_dir_depth_synth = 0 |
|
lib_root_dir = $RSP |
build_dir = $HDL_BUILD_DIR |
quartus_dir = /home/software/Altera/<synth_tool_version> |
model_tech_altera_lib = /home/software/modelsim_altera_libs/<synth_tool_version> |
model_tech_dir = /home/software/Mentor/<sim_tool_version>/modeltech |
vsim_dir = <model_tech_dir>/linux_x86_64 |
modelsim_search_libraries = |
/radiohdl/trunk/base/test/cdf_dir/tree/hdlbuildset/hdl_buildset_unb1.cfg
0,0 → 1,21
# Uniboardd 1 configuration |
buildset_name = unb1 |
technology_names = ip_stratixiv |
family_names = stratixiv |
block_design_names = sopc |
|
sim_tool_name = modelsim |
sim_tool_version = 6.6c |
synth_tool_name = quartus |
synth_tool_version = 11.1sp2 |
|
lib_root_dir = $RADIOHDL |
build_dir = $HDL_BUILD_DIR |
quartus_dir = /home/software/Altera/<synth_tool_version> |
model_tech_altera_lib = /home/software/modelsim_altera_libs/<synth_tool_version> |
model_tech_dir = /home/software/Mentor/<sim_tool_version>/modeltech |
vsim_dir = <model_tech_dir>/linux_x86_64 |
modelsim_search_libraries = |
# stratixiv only |
altera_ver lpm_ver sgate_ver altera_mf_ver altera_lnsim_ver stratixiv_ver stratixiv_hssi_ver stratixiv_pcie_hip_ver |
altera lpm sgate altera_mf altera_lnsim stratixiv stratixiv_hssi stratixiv_pcie_hip |
/radiohdl/trunk/base/test/cdf_dir/tree/hdllib/technology/test_hdllib.cfg
0,0 → 1,24
hdl_lib_name = technology |
hdl_library_clause_name = technology_lib |
hdl_lib_uses_synth = |
hdl_lib_uses_sim = |
hdl_lib_technology = |
|
synth_files = |
technology_pkg.vhd |
$HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd |
test_bench_files = |
|
regression_test_vhdl = |
# no self checking tb available yet |
|
|
[modelsim_project_file] |
modelsim_copy_files = |
technology_select_pkg_<buildset_name>.vhd $HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd |
|
|
|
[quartus_project_file] |
quartus_copy_files = |
technology_select_pkg_<buildset_name>.vhd $HDL_BUILD_DIR/<buildset_name>/quartus/technology/technology_select_pkg.vhd |
/radiohdl/trunk/base/test/cdf_dir/tree/hdllib/util/test_hdllib.cfg
0,0 → 1,22
hdl_lib_name = util |
hdl_library_clause_name = util_lib |
hdl_lib_uses_synth = mm common common_mult technology |
hdl_lib_uses_sim = |
hdl_lib_technology = |
|
synth_files = |
src/vhdl/util_logic.vhd |
src/vhdl/util_heater_pkg.vhd |
src/vhdl/util_heater.vhd |
|
test_bench_files = |
tb/vhdl/tb_util_heater.vhd |
|
regression_test_vhdl = |
# no self checking tb available yet |
|
[modelsim_project_file] |
|
|
[quartus_project_file] |
|
/radiohdl/trunk/base/test/cdf_dir/tree/hdltool/hdl_tool_altera.cfg
0,0 → 1,23
# configuration file for defining the altera installation on this system |
altera_rootdir = ${ALTERA_DIR}/altera |
altera_rootdir_override = ${ALTERA_DIR}/altera |
niosdir = ${ALTERA_DIR}/nios2eds |
|
# extension to the PATH variable |
altera_paths = |
<altera_rootdir>/bin |
<niosdir>/bin |
<niosdir>/bin/gnu/H-i686-pc-linux-gnu/bin |
<niosdir>/bin/gnu/H-x86_64-pc-linux-gnu/bin |
<niosdir>/sdk2/bin |
|
[sopc] |
sopc_paths = |
sopc_environment_variables = |
|
[qsys] |
qsys_paths = |
|
[user settings] |
user_environment_variables = |
|
/radiohdl/trunk/base/test/cdf_dir/tree/hdltool/hdl_tool_quartus.cfg
0,0 → 1,27
# configuration file for defining the quartus installation on this system |
quartus_rootdir = ${QUARTUS_DIR}/quartus |
quartus_rootdir_override = ${QUARTUS_DIR}/quartus |
niosdir = ${QUARTUS_DIR}/nios2eds |
|
# extension to the PATH variable |
quartus_paths = |
<quartus_rootdir>/bin |
<niosdir>/bin |
<niosdir>/bin/gnu/H-i686-pc-linux-gnu/bin |
<niosdir>/bin/gnu/H-x86_64-pc-linux-gnu/bin |
<niosdir>/sdk2/bin |
|
[sopc] |
sopc_paths = |
<quartus_rootdir>/sopc_builder/bin |
sopc_environment_variables = |
sopc_kit_nios2 <niosdir> |
|
[qsys] |
qsys_paths = |
<quartus_rootdir>/../qsys/bin |
|
[user settings] |
user_environment_variables = |
altera_hw_tcl_keep_temp_files 1 |
|
/radiohdl/trunk/base/test/cdf_dir/wrong_files/dangling_test.txt
0,0 → 1,6
# Testfile for testing sections the CommonDictFile class |
# |
# First some keys with different spacing (spaces and tabs) |
this is a lost value which causes an exception |
|
global_key_1 = global_1 |
/radiohdl/trunk/base/test/cdf_dir/wrong_files/wrong_key_test.txt
0,0 → 1,6
# Testfile for testing the CommonDictFile class |
# |
# First some keys with different spacing (spaces and tabs) |
|
key with spaces = should result in an exception |
|
/radiohdl/trunk/base/test/t_hdl_configfile.py
0,0 → 1,147
import unittest |
from configfile import * |
from hdl_configfile import * |
|
class Test_construction(unittest.TestCase): |
"Class to the various ways of construction" |
|
def test_wrong_filename(self): |
"Test constructor with non-existing file" |
self.assertRaises(ConfigFileException, ConfigFile, "/Is/Not/A/Valid/Directory") |
|
def test_empty_dictfile(self): |
"Test constructor with empty config file" |
cfg = ConfigFile("./cdf_dir/empty_file/empty_dict.txt") |
self.assertEqual(len(cfg.content), 0) |
|
def test_comment_only_dictfile(self): |
"Test constructor with comment-only config files" |
cfg = ConfigFile("./cdf_dir/empty_file/comment_only_dict.txt") |
self.assertEqual(len(cfg.content), 0) |
|
|
class Test_key_value_spacing(unittest.TestCase): |
"Class to the various kind of spacing between the keys and the values" |
|
def test_key_value_spacing(self): |
cfg = ConfigFile("./cdf_dir/correct_files/key_value_test.txt") |
self.assertEqual(cfg.space_key_1, "value_1") |
self.assertEqual(cfg.space_key_2, "value_2") |
self.assertEqual(cfg.space_key_3, "value_3") |
self.assertEqual(cfg.space_key_4, "value_4") |
self.assertEqual(cfg.space_key_5, "value_5") |
self.assertEqual(cfg.space_key_6, "value_6") |
self.assertEqual(cfg.space_key_7, "value_7") |
self.assertEqual(cfg.multi_key_1, "value10 value11 value12") |
self.assertEqual(cfg.multi_key_2, "value20, value21, value22") |
self.assertEqual(cfg.multi_key_3, "value30 value31 value32 value33") |
self.assertEqual(cfg.multi_key_4, "value40 = value41 = value42") |
self.assertEqual(cfg.tricky_key_1, "") |
self.assertEqual(cfg.tricky_key_2, "tricky_value_2") |
self.assertEqual(cfg.tricky_key_3, "") |
self.assertEqual(cfg.section_headers, ['"my_section"']) |
self.assertEqual(cfg.warning_key_1, |
"Be aware that multiline values can be tricky: this also belongs to previous key 'warning_key_1'") |
# also test attribute access versus item access |
self.assertEqual(cfg.multi_key_2, cfg['multi_key_2']) |
print cfg.content |
|
def test_sections(self): |
cfg = ConfigFile("./cdf_dir/correct_files/section_test.txt") |
self.assertEqual(cfg.global_key_1, "global_1"), |
self.assertEqual(cfg.global_key_2, '[ "aap", "noot", "mies" ]'), |
self.assertEqual(cfg.just_some_key_1, "section2 value1"), |
self.assertEqual(cfg.just_some_key_2, "section2 value2"), |
self.assertEqual(cfg.section_headers, ['section_1', 'section_2']) |
|
def test_dangling_value(self): |
"Test if a value without a key is detected" |
self.assertRaises(ConfigFileException, ConfigFile, "./cdf_dir/wrong_files/dangling_test.txt") |
|
def test_keys_with_spaces(self): |
"Test if a key that contains spaces is detected" |
self.assertRaises(ConfigFileException, ConfigFile, "./cdf_dir/wrong_files/wrong_key_test.txt") |
|
|
class Test_reference_key_substitution(unittest.TestCase): |
"Class to the the substitution of referenced keys." |
|
def test_read_the_file(self): |
cfg = ConfigFile("./cdf_dir/referenced_files/reference_test.txt") |
self.assertEqual(cfg.early_ref_key1, "before <ref_key_2> is defined") |
self.assertEqual(cfg.simple_ref_1, "a value with <ref_key_1>") |
self.assertEqual(cfg.double_ref_1, "a value with twice <ref_key_1><ref_key_1>") |
self.assertEqual(cfg.triple_key_1, "its here <ref_key_1> and here <ref_key_1> and here <ref_key_1>!!!") |
self.assertEqual(cfg.triple_key_2, "its here <ref_key_2> and here <ref_key_1>!!!") |
self.assertEqual(cfg.ref_only_key_1, "<ref_key_1>") |
self.assertEqual(cfg.wrong_ref_1, "what will double brackets <<ref_key_2>> do?") |
self.assertEqual(cfg.undefined_key_1, "reference to <non existing key>") |
|
self.assertEqual(cfg.nested_key_1, "some_value") |
self.assertEqual(cfg.nested_key_2, "<nested_key_1>") |
self.assertEqual(cfg.nested_key_3, "<nested_key_2>") |
self.assertEqual(cfg.reverse_nested_key_1, "<reverse_nested_key_2>") |
self.assertEqual(cfg.reverse_nested_key_2, "<reverse_nested_key_3>") |
self.assertEqual(cfg.reverse_nested_key_3, "some_value") |
self.assertEqual(cfg.mutual_key_1, "<mutual_key_2>") |
self.assertEqual(cfg.mutual_key_2, "<mutual_key_1>") |
self.assertEqual(cfg.loop_key_1, "<loop_key_1>") |
|
cfg.resolve_key_references() |
|
self.assertEqual(cfg.early_ref_key1, "before multiple words in the value is defined") |
self.assertEqual(cfg.simple_ref_1, "a value with single_reference") |
self.assertEqual(cfg.double_ref_1, "a value with twice single_referencesingle_reference") |
self.assertEqual(cfg.triple_key_1, "its here single_reference and here single_reference and here single_reference!!!") |
self.assertEqual(cfg.triple_key_2, "its here multiple words in the value and here single_reference!!!") |
self.assertEqual(cfg.ref_only_key_1, "single_reference") |
self.assertEqual(cfg.wrong_ref_1, "what will double brackets <<ref_key_2>> do?") |
self.assertEqual(cfg.undefined_key_1, "reference to <non existing key>") |
|
self.assertEqual(cfg.nested_key_1, "some_value") |
self.assertEqual(cfg.nested_key_2, "some_value") |
self.assertEqual(cfg.nested_key_3, "some_value") |
self.assertEqual(cfg.reverse_nested_key_1, "<reverse_nested_key_3>") |
self.assertEqual(cfg.reverse_nested_key_2, "some_value") |
self.assertEqual(cfg.reverse_nested_key_3, "some_value") |
self.assertEqual(cfg.mutual_key_1, "<mutual_key_1>") |
self.assertEqual(cfg.mutual_key_2, "<mutual_key_1>") |
self.assertEqual(cfg.loop_key_1, "<loop_key_1>") |
|
|
class Test_hdltool_file(unittest.TestCase): |
"Class to test the hdltool class." |
|
def test_read_good_hdltool_file(self): |
tool = HdlTool("./cdf_dir/hdltool_files/hdl_tool_quartus.cfg") |
self.assertEqual(tool.user_environment_variables, "altera_hw_tcl_keep_temp_files 1") |
|
|
class Test_hdl_buildset_file(unittest.TestCase): |
"Class to test the hdltool class." |
|
def test_read_good_hdlbuildset_file(self): |
buildset = HdlBuildset("./cdf_dir/hdlbuildset_files/hdl_buildset_unb1.cfg") |
self.assertEqual(buildset.buildset_name, "unb1") |
self.assertEqual(buildset.technology_names, "ip_stratixiv") |
self.assertEqual(buildset.lib_root_dirs, "$RADIOHDL/libraries $RADIOHDL/applications $RADIOHDL/boards") |
|
def test_read_wrong_hdlbuildset_file(self): |
self.assertRaises(ConfigFileException, HdlBuildset, "./cdf_dir/hdlbuildset_files/hdl_buildset_wrong.cfg") |
|
|
class Test_hdllib_file(unittest.TestCase): |
"Class to test the hdllib class." |
|
def test_read_good_hdllib_file(self): |
lib = HdlLib("./cdf_dir/hdllib_files/test_hdllib.cfg") |
self.assertEqual(lib.hdl_lib_name, "technology") |
self.assertEqual(lib.hdl_library_clause_name, "technology_lib") |
self.assertEqual(lib.hdl_lib_technology, "") |
|
def test_read_wrong_hdllib_file(self): |
self.assertRaises(ConfigFileException, HdlLib, "./cdf_dir/hdllib_files/hdllib_wrong.cfg") |
|
if __name__ == '__main__': |
unittest.main(verbosity=2) |
|
/radiohdl/trunk/base/test/t_hdl_configtree.py
0,0 → 1,68
import unittest |
from configtree import * |
from hdl_configtree import * |
|
class Test_construction(unittest.TestCase): |
"Class to the various ways of construction" |
|
def test_wrong_filename(self): |
"Test constructor with non-existing rootdir" |
self.assertRaises(ConfigFileException, ConfigTree, "/Is/Not/A/Valid/Directory", "dict.txt") |
|
def test_empty_dictfile(self): |
"Test constructor with empty config file" |
tree = ConfigTree("./cdf_dir/empty_file", "empty_dict.txt") |
self.assertEqual(len(tree.configfiles), 1) |
|
def test_comment_only_dictfile(self): |
"Test constructor with comment-only config files" |
tree = ConfigTree("./cdf_dir/empty_file", "comment_only_dict.txt") |
self.assertEqual(len(tree.configfiles), 1) |
|
|
class Test_tree_behaviour(unittest.TestCase): |
"Class to test the 'tree' functionality of the class" |
|
def test_tree_with_configfiles(self): |
"Test constructor with a tree with configfiles all containing a 'key_1' label" |
"that holds its relative path in the tree" |
tree = ConfigTree("./cdf_dir/tree/cfgfile", "dict.txt") |
for cfg in tree.configfiles.values(): |
#print cfg.ID, cfg.content |
expected_value = cfg.ID.replace("./cdf_dir/tree/cfgfile" , "top_dir").replace("/dict.txt", "") |
self.assertEqual(expected_value, cfg.key_1) |
|
def test_hdllib_tree(self): |
"Test if we can read in a tree with hdllib files." |
tree = HdlLibTree("./cdf_dir/tree/hdllib", "test_hdllib.cfg") |
self.assertEqual(len(tree.configfiles), 2) |
util = tree.configfiles['util'] |
self.assertEqual(util.hdl_library_clause_name, 'util_lib') |
self.assertEqual(util.synth_files, 'src/vhdl/util_logic.vhd src/vhdl/util_heater_pkg.vhd src/vhdl/util_heater.vhd') |
technology = tree.configfiles['technology'] |
self.assertEqual(technology.hdl_library_clause_name, 'technology_lib') |
self.assertEqual(technology.synth_files, 'technology_pkg.vhd $HDL_BUILD_DIR/<buildset_name>/modelsim/technology/technology_select_pkg.vhd') |
|
def test_hdlbuildset_tree(self): |
"Test if we can read in a tree with hdlbuildset files." |
tree = HdlBuildsetTree("./cdf_dir/tree/hdlbuildset", "hdl_buildset_*.cfg") |
self.assertEqual(len(tree.configfiles), 2) |
rsp = tree.configfiles['rsp'] |
self.assertEqual(rsp.technology_names, 'ip_virtex4') |
self.assertEqual(rsp.model_tech_altera_lib, '/home/software/modelsim_altera_libs/<synth_tool_version>') |
unb1 = tree.configfiles['unb1'] |
self.assertEqual(unb1.technology_names, 'ip_stratixiv') |
self.assertEqual(unb1.model_tech_altera_lib, '/home/software/modelsim_altera_libs/<synth_tool_version>') |
|
def test_hdltool_tree(self): |
"Test if we can read in a tree with hdltool files." |
tree = HdlToolTree("./cdf_dir/tree/hdltool", "hdl_tool_*.cfg") |
self.assertEqual(len(tree.configfiles), 2) |
altera = tree.configfiles['./cdf_dir/tree/hdltool/hdl_tool_altera.cfg'] |
self.assertEqual(altera.altera_rootdir, "${ALTERA_DIR}/altera") |
quartus = tree.configfiles['./cdf_dir/tree/hdltool/hdl_tool_quartus.cfg'] |
self.assertEqual(quartus.quartus_rootdir, "${QUARTUS_DIR}/quartus") |
|
if __name__ == '__main__': |
unittest.main(verbosity=2) |
|
/radiohdl/trunk/base/check_config
0,0 → 1,151
#!/usr/bin/env python |
############################################################################### |
# |
# Copyright (C) 2018 |
# ASTRON (Netherlands Institute for Radio Astronomy) <http://www.astron.nl/> |
# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands |
# |
# This program is free software: you can redistribute it and/or modify |
# it under the terms of the GNU General Public License as published by |
# the Free Software Foundation, either version 3 of the License, or |
# (at your option) any later version. |
# |
# This program is distributed in the hope that it will be useful, |
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
# GNU General Public License for more details. |
# |
# You should have received a copy of the GNU General Public License |
# along with this program. If not, see <http://www.gnu.org/licenses/>. |
# |
# $Id: generate_ip_libs.py 18842 2018-08-29 10:47:05Z overeem $ |
# |
############################################################################### |
|
import os, sys |
from os.path import expandvars, isfile, isdir |
from argparse import ArgumentParser |
from hdl_configfile import HdlBuildset, HdlTool |
|
def _do_basic_key_checking(cfgfile, indent=""): |
# Check that all key references are solved |
print "{}Checking references...".format(indent), |
all_refs_solved = cfgfile.resolve_key_references() |
if all_refs_solved: |
print "OK" |
else: |
print "\n{}ERROR: The following reference cannot be solved: {}".format(indent, cfgfile.unresolved_refs) |
|
# Check that all key required keys contain values |
print "{}Checking required keys...".format(indent), |
empty_keys = [] |
for key in cfgfile.required_keys: |
if cfgfile.content[key] == "": |
empty_keys.append(key) |
if not empty_keys: |
print "OK" |
else: |
print "\n{}ERROR: The following required keys don't have a value: {}".format(indent, empty_keys) |
|
def _check_quartus_configfile(cfgfile, tool_types): |
# check required dirs |
for required_dir in [ "quartus_rootdir", "quartus_rootdir_override", "niosdir" ]: |
print " Checking {}...".format(required_dir), |
if isdir(expandvars(cfgfile[required_dir])): |
print "OK" |
else: |
print "\n ERROR: path {} does not exist!".format(cfgfile[required_dir]) |
|
# check _paths variables |
required_paths = [ "{}_paths".format(tool) for tool in tool_types ] |
for path_key in [ key for key in cfgfile.content.keys() if key.endswith("_paths") ]: |
paths = [ expandvars(pathname) for pathname in cfgfile[path_key].replace("\t"," ").split(" ") if pathname != "" ] |
print " Checking {}...".format(path_key) |
if not paths: |
print " no paths defined." |
else: |
for path in paths: |
if isdir(path): |
print " {}: OK".format(path) |
else: |
if path_key in required_paths: |
print " {}: DOES NOT EXIST!".format(path) |
else: |
print " {}: does not exist but is not required".format(path) |
|
# check IP generation |
print " Checking ip generation..." |
ip_tools = [ tool for tool in cfgfile.ip_tools.replace("\t"," ").split(" ") if tool != '' ] |
for ip_tool in ip_tools: |
opt_key = "{}_default_options".format(ip_tool) |
if not opt_key in cfgfile.content.keys(): |
print " {}: key is MISSING!".format(opt_key) |
else: |
print " {}: OK".format(opt_key) |
|
# check environment variables |
for envvar_key in [ key for key in cfgfile.content.keys() if key.endswith("_environment_variables") ]: |
items = [ item for item in cfgfile[envvar_key].replace("\t"," ").split(" ") if item != "" ] |
print " Checking {}...".format(envvar_key) |
if not items: |
print " no variables defined." |
else: |
if len(items)%2 == 0: |
print " number of values is correct" |
else: |
print " expected even number of values (not {})".format(len(items)) |
|
|
if __name__ == '__main__': |
# setup parser and parse the arguments. |
argparser = ArgumentParser(description='Check to content of your hdl_buildset file and the corresponding hdl_tool file.') |
argparser.add_argument('buildset', help="Filename like 'hdl_buildset_<buildset>.cfg'") |
args = argparser.parse_args() |
|
# construct full name of buildsetfile and read the file |
full_buildsetfile_name = expandvars("${HDL_CONFIG_DIR}/hdl_buildset_%s.cfg" % (args.buildset)) |
print "Reading {}...".format(full_buildsetfile_name) |
buildset_info = HdlBuildset(full_buildsetfile_name) |
|
_do_basic_key_checking(buildset_info) |
|
# check if lib_root_dirs exist |
print "Checking defined library directories...", |
lib_dirs = [ expandvars(libdir) for libdir in buildset_info.lib_root_dirs.replace("\t"," ").split(" ") |
if libdir != '' ] |
wrong_dirs = [] |
for libdir in lib_dirs: |
if not isdir(libdir): |
wrong_dirs.append(libdir) |
if not wrong_dirs: |
print "OK" |
else: |
print "\nERROR: The following library rootdir do not exist: ", wrong_dirs |
|
# Check tools |
subtoolnames = [ subtool for subtool in buildset_info.block_design_names.replace("\t"," ").split(" ") if subtool != '' ] |
toolnames = [ buildset_info.synth_tool_name, buildset_info.sim_tool_name ] |
for toolname in toolnames: |
print "Checking tool {}...".format(toolname), |
if not toolname in buildset_info.section_headers: |
print "\n Warning: No sectionheader found.", |
tool_dir = "{}_dir".format(toolname) |
if not tool_dir in buildset_info.content.keys(): |
print "\n ERROR: Key {} is missing.".format(tool_dir), |
else: |
os.environ[tool_dir.upper()] = buildset_info[tool_dir] |
tool_configfile = expandvars("${HDL_CONFIG_DIR}/hdl_tool_%s.cfg" % (toolname)) |
if not isfile(tool_configfile): |
print "\n Warning: File {} is missing!".format(tool_configfile), |
else: |
try: |
print "\n Reading {}...".format(tool_configfile) |
tool_info = HdlTool(tool_configfile) |
_do_basic_key_checking(tool_info, indent=" ") |
except ConfigFileException as excp: |
print "\n ERROR: File contains an error: {}".format(excp) |
|
if toolname == "quartus": |
_check_quartus_configfile(tool_info, toolnames+subtoolnames) |
|
|
radiohdl/trunk/base/check_config
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/common.py
===================================================================
--- radiohdl/trunk/base/common.py (nonexistent)
+++ radiohdl/trunk/base/common.py (revision 2)
@@ -0,0 +1,1063 @@
+###############################################################################
+#
+# Copyright (C) 2012
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+###############################################################################
+
+"""Common definitions
+
+"""
+
+################################################################################
+# System imports
+
+import time
+import math
+import operator
+import inspect
+import itertools
+import os
+import os.path
+
+################################################################################
+# Constants
+
+c_nibble_w = 4
+c_byte_w = 8
+c_halfword_w = 16
+c_word_w = 32
+c_longword_w = 64
+
+c_byte_sz = 1
+c_halfword_sz = 2
+c_word_sz = 4
+c_longword_sz = 8
+
+c_nibble_mod = 2**c_nibble_w # = 0x10
+c_nibble_mask = 2**c_nibble_w-1 # = 0x0F
+c_byte_mod = 2**c_byte_w # = 0x100
+c_byte_mask = 2**c_byte_w-1 # = 0x0FF
+c_halfword_mod = 2**c_halfword_w # = 0x10000
+c_halfword_mask = 2**c_halfword_w-1 # = 0x0FFFF
+c_word_mod = 2**c_word_w # = 0x100000000
+c_word_mask = 2**c_word_w-1 # = 0x0FFFFFFFF
+c_word_sign = 2**(c_word_w-1) # = 0x010000000
+c_longword_mod = 2**c_longword_w # = 0x10000000000000000
+c_longword_mask = 2**c_longword_w-1 # = 0x0FFFFFFFFFFFFFFFF
+c_longword_sign = 2**(c_longword_w-1) # = 0x01000000000000000
+
+c_nof_complex = 2
+
+
+################################################################################
+# Functions
+
+def greatest_common_div(A, B):
+ """
+ Find the greatest common divisor of A and B.
+ """
+ while B != 0:
+ rem = A % B
+ A = B
+ B = rem
+ return A
+
+def ceil_div(num, den):
+ """ Return integer ceil value of num / den """
+ return int(math.ceil( num / float(den) ) )
+
+def ceil_log2(num):
+ """ Return integer ceil value of log2(num) """
+ return int(math.ceil(math.log(int(num), 2) ) )
+
+def ceil_pow2(num):
+ """ Return power of 2 value that is equal or greater than num """
+ return 2**ceil_log2(num)
+
+def sel_a_b(sel, a, b):
+ if sel==True:
+ return a
+ else:
+ return b
+
+def smallest(a, b):
+ if ab:
+ return a
+ else:
+ return b
+
+def signed32(v):
+ if v < c_word_sign:
+ return v
+ else:
+ return v-c_word_mod
+
+def signed64(v):
+ if v < c_longword_sign:
+ return v
+ else:
+ return v-c_longword_mod
+
+def int_clip(inp, w):
+ # Purpose : Clip an integer value to w bits
+ # Input:
+ # - inp = Integer value
+ # - w = Output width in number of bits
+ # Description: Output range -2**(w-1) to +2**(w-1)-1
+ # Return:
+ # - outp = Clipped value
+ outp=0
+ if w>0:
+ clip_p= 2**(w-1)-1
+ clip_n=-2**(w-1)
+ if inp > clip_p:
+ outp=clip_p
+ elif inp < clip_n:
+ outp=clip_n
+ else:
+ outp=inp
+ return outp
+
+def int_wrap(inp, w):
+ # Purpose: Wrap an integer value to w bits
+ # Input:
+ # - inp = Integer value
+ # - w = Output width in number of bits
+ # Description: Remove MSbits, output range -2**(w-1) to +2**(w-1)-1
+ # Return:
+ # - outp = Wrapped value
+ outp=0
+ if w>0:
+ wrap_mask=2**(w-1)-1
+ wrap_sign=2**(w-1)
+ if (inp & wrap_sign) == 0:
+ outp=inp & wrap_mask
+ else:
+ outp=(inp & wrap_mask) - wrap_sign
+ return outp
+
+def int_round(inp, w, direction="HALF_AWAY"):
+ # Purpose : Round the w LSbits of an integer value
+ # Input:
+ # - inp = Integer value
+ # - w = Number of LSbits to round
+ # - direction = "HALF_AWAY", "HALF_UP"
+ # Description:
+ # direction = "HALF_AWAY" --> Round half away from zero so +0.5 --> 1, -0.5 --> -1.
+ # direction = "HALF_UP" --> Round half to +infinity so +0.5 --> 1, -0.5 --> 0.
+ # Return:
+ # - outp = Rounded value
+ outp=inp
+ if w>0:
+ round_factor=2**w
+ round_p=2**(w-1)
+ round_n=2**(w-1)-1
+ if direction == "HALF_UP":
+ outp=(inp+round_p)/round_factor
+ if direction == "HALF_AWAY":
+ if inp >= 0:
+ outp=(inp+round_p)/round_factor
+ else:
+ outp=(inp+round_n)/round_factor
+ return outp
+
+def int_truncate(inp, w):
+ # Purpose : Truncate the w LSbits of an integer value
+ # Input:
+ # - inp = Integer value
+ # - w = Number of LSbits to truncate
+ # Description: Remove LSBits.
+ # Return:
+ # - outp = Truncated value
+ outp=inp
+ if w>0:
+ if inp >= 0:
+ outp=inp>>w
+ else:
+ outp=-((-inp)>>w)
+ return outp
+
+
+def int_requantize(inp, inp_w, outp_w, lsb_w=0, lsb_round=False, msb_clip=False, gain_w=0):
+ # Purpose : Requantize integer value similar as common_requantize.vhd
+ # Input:
+ # - inp = Integer value
+ # - inp_w = Input data width
+ # - outp_w = Output data width
+ # - lsb_w = Number of LSbits to truncate
+ # - lsb_round = when true round else truncate the input LSbits
+ # - msb_clip = when true clip else wrap the input MSbits
+ # - gain_w = Output gain in number of bits
+ # Description: First round or truncate the LSbits, then clip or wrap the MSbits and then apply optional output gain
+ # Return:
+ # - outp = Requantized value
+
+ # Input width
+ r = int_wrap(inp, inp_w)
+ # Remove LSBits using ROUND or TRUNCATE
+ if lsb_round:
+ r = int_round(r, lsb_w)
+ else:
+ r = int_truncate(r, lsb_w)
+ # Remove MSBits using CLIP or WRAP
+ if msb_clip:
+ r = int_clip(r, outp_w)
+ else:
+ r = int_wrap(r, outp_w)
+ # Output gain
+ r = r<=0:
+ if time.time() - start >= s_timeout:
+ print('do_until: Timeout occured!')
+ return 'Timeout'
+ data = []
+ if len(kwargs) > 0:
+ data.append(method(**kwargs))
+ else:
+ data.append(method())
+ flat_data = flatten(data)
+ list_ok = 1
+ for i in range(0, len(flat_data)):
+ if not op(flat_data[i], val):
+ list_ok = 0
+ if list_ok == 0:
+ time.sleep(ms_retry/1000)
+ if list_ok == 1:
+ return flat_data[0]
+
+def do_until_lt(method, val, ms_retry=10, s_timeout=4, **kwargs): return do_until(method, val, operator.lt, ms_retry, s_timeout, **kwargs)
+def do_until_le(method, val, ms_retry=10, s_timeout=4, **kwargs): return do_until(method, val, operator.le, ms_retry, s_timeout, **kwargs)
+def do_until_eq(method, val, ms_retry=10, s_timeout=4, **kwargs): return do_until(method, val, operator.eq, ms_retry, s_timeout, **kwargs)
+def do_until_ne(method, val, ms_retry=10, s_timeout=4, **kwargs): return do_until(method, val, operator.ne, ms_retry, s_timeout, **kwargs)
+def do_until_ge(method, val, ms_retry=10, s_timeout=4, **kwargs): return do_until(method, val, operator.ge, ms_retry, s_timeout, **kwargs)
+def do_until_gt(method, val, ms_retry=10, s_timeout=4, **kwargs): return do_until(method, val, operator.gt, ms_retry, s_timeout, **kwargs)
+
+def reverse_byte(byte):
+ """
+ Fast way to reverse a byte on 64-bit platforms.
+ """
+ #return (byte * 0x0202020202L & 0x010884422010L) % 1023
+ return(byte * 0x0202020202 & 0x010884422010) % 1023 # PD: check
+
+def reverse_word(word):
+ """
+ Fast way to reverse a word on 64-bit platforms.
+ """
+ B0 = (word & 0xFF)
+ B1 = (word & 0xFF00) >> 8
+ B2 = (word & 0xFF0000) >> 16
+ B3 = (word & 0xFF000000) >> 24
+ reversed_word = (reverse_byte(B0) << 24) | (reverse_byte(B1) << 16) | (reverse_byte(B2) << 8) | (reverse_byte(B3))
+ return reversed_word
+
+def add_list(aList, bArg):
+ """
+ Element by element add list b to list a or add value b to each element in list a
+ """
+ aLen = len(aList)
+ bList = listify(bArg)
+ if len(bList)==1:
+ bList = bList[0]*aLen
+ s = []
+ for i in range(aLen):
+ s.append(aList[i] + bList[i])
+ return s
+
+def add_list_elements(in_list):
+ """
+ Multiply list elements together, e.g. [1,2,3,4,5,6] -> 1+2+3+4+5+6=21
+ """
+ return reduce(lambda x, y: x+y, in_list)
+
+def subtract_list(aList, bArg):
+ """
+ Element by element subract list b from list a or subract value b from each element in list a
+ """
+ aLen = len(aList)
+ bList = listify(bArg)
+ if len(bList)==1:
+ bList = bList[0]*aLen
+ s = []
+ for i in range(aLen):
+ s.append(aList[i] - bList[i])
+ return s
+
+def multiply_list(aList, bArg):
+ """
+ Element by element multiply list b with list a or multiply value b with each element in list a
+ """
+ aLen = len(aList)
+ bList = listify(bArg)
+ if len(bList)==1:
+ bList = bList[0]*aLen
+ s = []
+ for i in range(aLen):
+ s.append(aList[i] * bList[i])
+ return s
+
+def multiply_list_elements(in_list):
+ """
+ Multiply list elements together, e.g. [1,2,3,4,5,6] -> 1*2*3*4*5*6=720.
+ """
+ return reduce(lambda x, y: x*y, in_list)
+
+def divide_list(aList, bArg):
+ """
+ Element by element divide list a by list b or divide each element in list a by value b
+ """
+ aLen = len(aList)
+ bList = listify(bArg)
+ if len(bList)==1:
+ bList = bList[0]*aLen
+ for i in range(aLen):
+ s.append(aList[i] / bList[i])
+ return s
+
+def split_list(source_list, split_size=None, sublist_items=None, nof_output_lists=None):
+ """
+ Splits a list based on split_size. Optionally, the indices passed in sublist_items
+ are extracted from each sublist.
+ """
+ if split_size==None:
+ split_size=len(source_list)/nof_output_lists
+ sublists = [source_list[i:i+split_size] for i in xrange(0, len(source_list), split_size)]
+ if sublist_items==None:
+ return sublists
+ else:
+ if len(listify(sublist_items))==1:
+ return [listify(operator.itemgetter(*listify(sublist_items))(sl)) for sl in sublists]
+ else:
+ return [list(operator.itemgetter(*listify(sublist_items))(sl)) for sl in sublists]
+
+def create_multidimensional_list(shape, element=None):
+ """Creating a multidimensional list (obtained from create_empty_array_of_shape() at stackoverflow)
+
+ For example shape = [2, 3] returns [[None, None, None], [None, None, None]]
+ """
+ if shape:
+ return [create_multidimensional_list(shape[1:], element) for i in xrange(shape[0])]
+ else:
+ return element
+
+def index_a_in_b(a, b, duplicates=False):
+ """
+ Find the elements of list a in list b and return their indices (relative to b).
+ Does not return duplicates by default.
+ """
+ if duplicates==False:
+ return [i for i,item in enumerate(b) if item in a]
+ else:
+ hits = []
+ for item_in_a in a:
+ hits.append( [i for i,item in enumerate(b) if item == item_in_a] )
+ return flatten(hits)
+
+def index_a_in_multi_b(a, b):
+ """
+ Find a in multi-dimensional list b. Returns first hit only.
+ """
+ if a == b: return []
+ try:
+ for i,e in enumerate(b):
+ r = index_a_in_multi_b(a,e)
+ if r is not None:
+ r.insert(0,i)
+ return r
+ except TypeError:
+ pass
+ return None
+
+def unique(in_list):
+ """
+ Extract unique list elements (without changing the order like set() does)
+ """
+ seen = {}
+ result = []
+ for item in in_list:
+ if item in seen: continue
+ seen[item] = 1
+ result.append(item)
+ return result
+
+def reverse_list(in_list):
+ """Return list in reversed index order"""
+ return list(reversed(in_list))
+
+def sort_list_indices(in_list, lowToHigh = True):
+ """Return list of original indices of in_list for the sorted(in_list)"""
+ result = sorted(range(len(in_list)), key = lambda ix : in_list[ix])
+ if lowToHigh==True:
+ return result
+ else:
+ return reverse_list(result)
+
+def list_duplicates(in_list):
+ """
+ find duplicate list elements
+ """
+ # http://stackoverflow.com/questions/9835762/find-and-list-duplicates-in-python-list
+ seen = set()
+ seen_add = seen.add
+ # adds all elements it doesn't know yet to seen and all other to seen_twice
+ seen_twice = set( x for x in in_list if x in seen or seen_add(x) )
+ # turn the set into a list (as requested)
+ return list( seen_twice )
+
+def all_the_same(lst):
+ """
+ Returns True if all the list elements are identical.
+ """
+ return lst[1:] == lst[:-1]
+
+def all_equal_to(lst, value):
+ """
+ Returns True if all the list elements equal 'value'.
+ """
+ if all_the_same(lst)==True and lst[0]==value:
+ return True
+ else:
+ return False
+
+def rotate_list(in_list, n):
+ """
+ Rotates the list. Positive numbers rotate left. Negative numbers rotate right.
+ """
+ return in_list[n:] + in_list[:n]
+
+def to_uword(arg):
+ """
+ Represent 32 bit value as unsigned word. Note that:
+
+ common.to_signed(common.to_uword(-1), 32) = -1
+
+ """
+ vRet = []
+ vList = listify(arg)
+ for value in vList:
+ v = int(value) & c_word_mask # mask the 32 bits, also accept float value by converting to int
+ vRet.append(v)
+ return unlistify(vRet)
+
+def to_unsigned(arg, width):
+ """
+ Interpret value[width-1:0] as unsigned
+ """
+ c_mask = 2**width-1
+ vRet = []
+ vList = listify(arg)
+ for value in vList:
+ v = int(value) & c_mask # mask the lower [width-1:0] bits, also accept float value by converting to int
+ vRet.append(v)
+ return unlistify(vRet)
+
+def to_signed(arg, width):
+ """
+ Interpret arg value[width-1:0] or list of arg values as signed (two's complement)
+ """
+ c_wrap = 2**width
+ c_mask = 2**width-1
+ c_sign = 2**(width-1)
+ vRet = []
+ vList = listify(arg)
+ for value in vList:
+ v = int(value) & c_mask # mask the lower [width-1:0] bits, also accept float value by converting to int
+ if v & c_sign:
+ v -= c_wrap # keep negative values and wrap too large positive values
+ vRet.append(v)
+ return unlistify(vRet)
+
+def max_abs(data):
+ return max(max(data), -min(data))
+
+def insert(orig, new, pos):
+ """
+ Inserts new (string, element) inside original string or list at pos.
+ """
+ return orig[:pos] + new + orig[pos:]
+
+def deinterleave(input_stream, nof_out, block_size=1):
+ """
+ Deinterleave a stream (=flat list) into nof_out output streams based on block_size.
+ >> deinterleave( [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16], 4, 2 )
+ >> [ [1,2,9,10], [3,4,11,12], [5,6,13,14], [7,8,15,16] ]
+ Note: len(input_stream)/nof_out/block_size should yield an integer.
+ Note: This method behaves exactly like common_deinterleave.vhd.
+ """
+ # Check passed arguments:
+ if ( float(len(input_stream))/nof_out/block_size%1==0):
+ # Split the list into block_sized sublists:
+ if block_size>1:
+ block_list = split_list(input_stream, block_size)
+ else:
+ block_list = input_stream
+
+ # Split block_list into 2 sublists so we can transpose them
+ sublist_size = nof_out
+ block_2arr = split_list(block_list, sublist_size)
+ transposed = transpose(block_2arr)
+
+ # Flatten the list so we can re-split it:
+ flat_out_list = flatten(transposed)
+
+ # Divide this new stream across nof_out:
+ sublist_size = len(input_stream)/nof_out
+
+ result = split_list(flat_out_list, sublist_size)
+ return result
+ else:
+ print('deinterleave: Error: len(input_stream)/nof_out/block_size should yield an integer!')
+
+def interleave(input_streams, block_size=1):
+ """
+ Interleave a list of multiple lists into one based on block size.
+ Note: This method behaves exactly like common_interleave.vhd.
+ """
+ # flatten the list
+ flat_list = flatten(input_streams)
+
+ # Use deinterleave function to pull the stream apart block-wise and restore the correct order
+ nof_lists = len(input_streams[0]) / block_size
+ deint_block_list = deinterleave(flat_list, nof_lists, block_size)
+
+ # Flatten the list
+ result = flatten(deint_block_list)
+ return result
+
+def reinterleave(input_streams, nof_out, block_size_in=1, block_size_out=1):
+ """
+ Re-interleave X input streams across nof_out output streams. The input streams are first
+ deinterleaved with block_size_in, then nof_out interleaved streams are made with block_size_out.
+ Note: This method behaves exactly like common_reinterleave.vhd.
+ """
+ nof_in = len(input_streams)
+
+ # Array of deinterleavers:
+ # ------------------------
+ # deint_arr: [nof_in][deinterleaved streams]:
+ deint_arr = []
+ for in_no in range(nof_in):
+ deint_arr.append(deinterleave(input_streams[in_no], nof_out, block_size_in))
+
+ # Deinterleavers -> interleavers interconnect:
+ inter_in_arr = []
+ for i in range(nof_out):
+ inter_in = []
+ for j in range(nof_in):
+ inter_in.append(deint_arr[j][i])
+ inter_in_arr.append(inter_in)
+
+ # Array of interleavers:
+ # ----------------------
+ # inter_arr: [nof_out][interleaved streams]:
+ inter_out_arr = []
+ for out_no in range(nof_out):
+ inter_out_arr.append(interleave(inter_in_arr[out_no], block_size_out))
+ return inter_out_arr
+
+def transpose(matrix):
+ """
+ Transpose by using zip()
+ """
+ result = []
+ transposed = zip(*matrix)
+ # Python's zip() returns a list of tuples. We do not want that as tuples
+ # should only be used to bind items together; methods like flatten()
+ # preserve tuples (as tuples should not be broken - that's why they're
+ # tuples in the first place) wich would mean the output of a transpose
+ # could not be flattened. So, convert the list of tuples to a list of lists:
+ for i in transposed:
+ result.append(list(i))
+ return result
+
+def straighten(matrix, padding=' '):
+ """
+ Straighten a crooked matrix by padding the shorter lists with the padding
+ up to the same length as the longest list.
+ """
+ padded_matrix = []
+ max_len = len(max(matrix))
+ for row in matrix:
+ padded_matrix.append(pad(row, max_len, padding))
+ return padded_matrix
+
+def pad(lst, length, padding=' '):
+ """
+ Pad a list up to length with padding
+ """
+ return lst+[padding]*(length-len(lst))
+
+def depth(x):
+ """
+ Returns the depth of x. Returns 0 if x is not iterable (not a list or tuple).
+ """
+ if isinstance(x, list) or isinstance(x, tuple):
+ for level in itertools.count():
+ if not x:
+ return level
+ x = list(itertools.chain.from_iterable(s for s in x if isinstance(s, list) or isinstance(s, tuple)))
+ else:
+ return 0
+
+def listify(x):
+ """
+ Can be used to force method input to a list.
+ """
+ # The isinstance() built-in function is recommended over the type() built-in function for testing the type of an object
+ if isinstance(x, list):
+ return x
+ else:
+ return [x]
+
+def unlistify(x):
+ """
+ Converts 1-element list to x.
+ """
+ # The isinstance() built-in function is recommended over the type() built-in function for testing the type of an object
+ if isinstance(x, list):
+ if len(x)==1:
+ return x[0]
+ else:
+ return x
+ else:
+ return x
+
+def tuplefy(x):
+ """
+ Similar to listify().
+ This method enables user to iterate through tuples of inconsistent depth by
+ always returning a non-flat tuple.
+ Pushes a flat tuple (depth=1) e.g. (0,1,2) one level deeper: ( (0,1,2), ).
+ Non-flat tuples are returned untouched.
+ A non-tuple (depth=0) is also pushed into a tuple 2 levels deep.
+ """
+ if depth(x)==1:
+ return (x,)
+ elif depth(x)==0:
+ return ( (x,), )
+ else:
+ return x
+
+def method_name(caller_depth=0):
+ """
+ Returns the name of the caller method.
+ """
+ # Note: inspect.stack()[0][3] would return the name of this method.
+ return inspect.stack()[caller_depth+1][3]
+
+def method_arg_names(method):
+ """
+ Returns the names of the arguments of passed method.
+ """
+ return inspect.getargspec(method)[0]
+
+def concat_complex(list_complex, width_in_bits):
+ """
+ Concatenates the real and imaginary part into one integer.
+ The specifed width counts for both the real and imaginary part.
+ Real part is mapped on the LSB. Imaginary part is shifted to the MSB.
+ """
+ result = []
+ for i in range(len(list_complex)):
+ real = int(list_complex[i].real) & (2**width_in_bits-1)
+ imag = int(list_complex[i].imag) & (2**width_in_bits-1)
+ result.append((imag << width_in_bits) + real)
+ return result
+
+def unconcat_complex(list_concat, width_in_bits):
+ """
+ Unconcatenates the real and imaginary part from an integer list into
+ a complex list with separate real and imaginary part. The integers
+ in list_concat are 2*width_in_bits wide.
+ Real part is extracted from the LSB. Imaginary part is extracted the MSB.
+ Example:
+ >>> unconcat_complex(concat_complex([complex(1,2), complex(3,4)], 16), 16)
+ [(1+2j), (3+4j)]
+ """
+ result = []
+ for i in range(len(list_concat)):
+ real = list_concat[i] & (2**width_in_bits-1)
+ imag = (list_concat[i] >> width_in_bits) & (2**width_in_bits-1)
+ result.append(complex(real, imag))
+ return result
+
+def split_complex(list_complex):
+ """
+ Returns the real and imaginary part in two separate lists.
+ [list_re, list_im] = split_complex(list_complex)
+ """
+ list_real = []
+ list_imag = []
+ for i in range(len(list_complex)):
+ list_real.append(list_complex[i].real)
+ list_imag.append(list_complex[i].imag)
+ return (list_real, list_imag)
+
+
+def mac_str(n):
+ """
+ Converts MAC address integer to the hexadecimal string representation,
+ separated by ':'.
+ """
+ hexstr = "%012x" % n
+ return ':'.join([hexstr[i:i+2] for i in range(0, len(hexstr), 2)])
+
+def ip_str(n):
+ """
+ Converts IP address integer to the decimal string representation,
+ separated by '.'.
+ """
+ ip_bytes = CommonBytes(n, 4)
+ return str(ip_bytes[3])+'.'+str(ip_bytes[2])+'.'+str(ip_bytes[1])+'.'+str(ip_bytes[0])
+
+def mkdir(path):
+ """Recursively create leave directory and intermediate directories if they do not already exist."""
+ expandPath = os.path.expandvars(path) # support using environment variables in the file path
+ expandPath = os.path.expanduser(expandPath) # support using ~ in the file path
+ if not os.path.exists(expandPath):
+ os.makedirs(expandPath)
+
+def expand_file_path_name(fpn, dir_path=''):
+ """ Expand environment variables in fpn to get filePathName.
+ - if it is an absolute path return filePathName else
+ - if it still has a local file path prepend dir_path to the filePathName and return dir_path + filePathName.
+ """
+ filePathName = os.path.expandvars(fpn) # support using environment variables in the file path
+ filePathName = os.path.expanduser(filePathName) # support using ~ in the file path
+ if os.path.isabs(filePathName):
+ return filePathName # use absolute path to file
+ else:
+ return os.path.join(os.path.expandvars(dir_path), filePathName) # derive path to file from the directory path and a directory path to the file
+
+def find_string_in_file(fpn, find_str):
+ """Return index >= 0 if find_str is found in file fpn, returns -1 if find_str is not found in file fpn.
+
+ Can also find '\n'.
+ """
+ return open(fpn, 'r').read().find(find_str)
+
+def remove_from_list_string(list_str, item_str, sep=' '):
+ """Treat the string list_str as a list of items that are separated by sep and then
+ remove the specified item_str string from the list and return the list as a
+ string of items separated by sep. Also remove any duplicate items.
+ """
+ ls = list_str.split(sep)
+ ls = unique(ls)
+ ls.remove(item_str)
+ ls = sep.join(ls)
+ return ls
+
+def find_all_file_paths(rootDir, fileName):
+ """
+ Recursively search the rootDir tree to find the paths to all fileName files.
+ """
+ paths = []
+ for root, _, files in os.walk(rootDir):
+ if fileName in files:
+ paths.append(root)
+ return paths
+
+
+################################################################################
+# Classes
+
+class CommonBits:
+ """
+ The purpose of this class is to allow the user to:
+ 1) create a CommonBits object with some data, e.g:
+ >> my_bits = CommonBits(0xDEADBEEF)
+ 2) Use the bracket notation [] to extract bit ranges from that data:
+ >> print(hex(my_bits[31:0]))
+ 0xdeadbeef
+ >> print(hex(my_bits[31:4]))
+ 0xdeadbee
+ >> print(hex(my_bits[31:16]))
+ 0xdead
+ >> print(hex(my_bits[31]))
+ 0x1
+ >> print(hex(my_bits[0]))
+ 0x1
+ 3) If a (optional) data width is passed, leading zeroes are added.
+ >> my_bits = CommonBits(0xDEADBEEF, 16)
+ >> print(hex(my_bits))
+ 0xbeef
+ >> my_bits = CommonBits(0xDEADBEEF, 64)
+ >> print(hex(my_bits[63:32]))
+ 0x0
+ 4) Besides getting bit slices, setting bitslices is also possible:
+ >> my_bits = CommonBits(0xdeadbeef)
+ >> print(my_bits)
+ 0xdeadbeef
+ >> my_bits[15:0] = 0xbabe
+ >> print(my_bits)
+ 0xdeadbabe
+ 5) Use -1 to set a range of bits to all ones.
+ 6) Use VHDL-style & operator to concatenate CommonBits types.
+ >> MyBitsHi = 0xDEAD
+ >> MyBitsLo = 0xBEEF
+ >> print(MyBitsHi & MyBitsLo & CommonBits(0xCAFEBABE))
+ 0xdeadbeefcafebabe
+ """
+ def __init__(self, data, bits=0):
+
+ if data>=0:
+ self.data = data
+ else:
+ print("CommonBits: Error: Input data = %d. Only unsigned integers are supported, use to_unsigned(data, bits)." %data)
+
+ if bits>0:
+ # Set data width to passed 'bits'
+ self.data_bin_len = bits
+
+ # Check if data fits in passed nof bits
+ if self.get_bin_len(data) > self.data_bin_len:
+ print("CommonBits: Error: input data %d does not fit in passed number of bits (%d)" %(data, bits))
+
+ else:
+ # Use the minimum required data width
+ self.data_bin_len = self.get_bin_len(self.data)
+
+ def __getitem__(self, bitslice):
+ if self.check_slice(bitslice)==0:
+ if type(bitslice)==type(slice(1,2,3)):
+ # Get a bitmask for the bit range passed via the bitslice
+ bitmask = self.bitmask(bitslice.start - bitslice.stop +1)
+ return int((self.data >> bitslice.stop) & bitmask)
+ if type(bitslice)==type(0):
+ # We only want one bit
+ bitmask = self.bitmask(1)
+ return int((self.data >> bitslice) & bitmask)
+ print(bitmask)
+ else:
+ print('CommonBits: Error: invalid slice range')
+
+ def __setitem__(self, bitslice, value):
+ if self.check_slice(bitslice)==0:
+ if type(bitslice)==type(slice(1,2,3)):
+ # Get a bitmask for the bit range passed via the bitslice
+ bitmask = self.bitmask(bitslice.start - bitslice.stop +1)
+
+ if value==-1:
+ # Allow -1 to set range to all ones. Simply use the bitmask as data.
+ data=bitmask
+ elif value>=0:
+ data = value
+ else:
+ print("CommonBits: Error: Input data = %d. Only unsigned integers are supported, use to_unsigned(data, bits)." %value)
+
+ # Make sure bit length of passed data does not exceed bitmask length
+ if self.get_bin_len(data) <= self.get_bin_len(bitmask):
+ self.data = (self.data & ~(bitmask << bitslice.stop)) | (data << bitslice.stop)
+ else:
+ print('CommonBits: Error: passed value (%d) does not fit in bits [%d..%d].' %(value, bitslice.start, bitslice.stop))
+
+ if type(bitslice)==type(0):
+ # We only want to set one bit
+ bitmask = self.bitmask(1)
+ data=value
+ # Make sure bit length of passed data does not exceed bitmask length
+ if self.get_bin_len(data) <= self.get_bin_len(bitmask):
+ self.data = (self.data & ~(bitmask << bitslice)) | (data << bitslice)
+ else:
+ print('CommonBits: Error: passed value (%d) does not fit in bit [%d].' %(value, bitslice))
+
+ else:
+ print('CommonBits: Error: invalid slice range')
+
+ def __repr__(self):
+ if self.data_bin_len>1:
+ bitslice = slice(self.data_bin_len-1, 0, None)
+ else:
+ bitslice = 0
+ return str(self.__getitem__(bitslice))
+
+ def __len__(self):
+ return self.data_bin_len
+
+ def __str__(self):
+ return hex(int(self.__repr__()))
+
+ def __hex__(self):
+ return hex(int(self.__repr__()))
+
+ def __trunc__(self):
+ return int(self.data).__trunc__()
+
+ def __and__(self, other):
+ # To concatenate two CommonBits types, first create a new one with the combined length
+ ret = CommonBits(0, self.data_bin_len + other.data_bin_len )
+
+ # Now fill in the values. Self is interpreted as the MS part, other as the LS part.
+ ms_hi = ret.data_bin_len-1
+ ms_lo = other.data_bin_len
+ ls_hi = other.data_bin_len-1
+ ls_lo = 0
+ ret[ms_hi:ms_lo] = self.data
+ ret[ls_hi:ls_lo] = other.data
+ return ret
+
+ def hi(self):
+ data_bits = CommonBits(self.data)
+ result = []
+ for bit in range(0, self.data_bin_len):
+ if data_bits[bit]==1:
+ result.append(bit)
+ return result
+
+ def lo(self):
+ data_bits = CommonBits(self.data)
+ result = []
+ for bit in range(0, self.data_bin_len):
+ if data_bits[bit]==0:
+ result.append(bit)
+ return result
+
+ def bitmask(self, nof_bits):
+ # return a bitmask of nof_bits, e.g. 7 is a bitmask for 3 bits
+ return pow(2, nof_bits)-1
+
+ def check_slice(self, bitslice):
+ # Check that the user passed a valid slice e.g. [31:24], or 1 integer e.g. [31]
+ result = 0
+ if type(bitslice)==type(slice(1,2,3)):
+ # Slice type passed. Don't allow user to pass step
+ if bitslice.step!=None:
+ result+=1
+ print('CommonBits: Error: no step size allowed')
+ # We want user to pass range in [MS:LS] notation
+ if bitslice.stop > bitslice.start:
+ result+=1
+ print('CommonBits: Error: slice range should be [ms:ls]')
+ # Do not exceed MSb index
+ if bitslice.start>=self.data_bin_len:
+ result+=1
+ print('CommonBits: Error: Passed MSbit does not exist in data')
+ if type(bitslice)==type(0):
+ # One integer passed. Only check if the passed bit exists.
+ if bitslice>=self.data_bin_len:
+ result+=1
+ print('CommonBits: Error: Passed MSbit does not exist in data')
+ return result
+
+ def get_bin_len(self, value):
+ value_bin_str = bin(value)
+ # Cut the '0b' from the binary string:
+ value_bin = value_bin_str[2:len(value_bin_str)]
+ return len(value_bin)
+
+ def reversed(self):
+ format_str = '{:0%db}'%self.data_bin_len
+ res = int(format_str.format(self.data)[::-1], 2)
+ return res
+
+class CommonSymbols(CommonBits):
+ """
+ CommonBits operating on symbol boundaries.
+ """
+ def __init__(self, data, symbol_w, datasymbols=1):
+ if datasymbols>0:
+ CommonBits.__init__(self, data, datasymbols*symbol_w)
+ else:
+ CommonBits.__init__(self, data)
+
+ self.symbol_w = symbol_w
+ self.data_symbol_len = ceil_div(self.data_bin_len, symbol_w)
+
+ def __getitem__(self, symbolslice):
+ # Convert symbol range to bit range, let CommonBits do the rest
+ return CommonBits.__getitem__(self, self.symbolslice_to_bitslice(symbolslice))
+
+ def __setitem__(self, symbolslice, value):
+ # Convert symbol range to bit range, let CommonBits do the rest
+ return CommonBits.__setitem__(self, self.symbolslice_to_bitslice(symbolslice), value)
+
+ def __repr__(self):
+ if self.data_symbol_len>1:
+ symbolslice = slice(self.data_symbol_len-1, 0, None)
+ else:
+ symbolslice = 0
+ return self.__getitem__(symbolslice)
+
+ def __len__(self):
+ return self.data_symbol_len
+
+ def symbolslice_to_bitslice(self, symbolslice):
+ if type(symbolslice)==type(slice(1,2,3)):
+ MSS = symbolslice.start
+ LSS = symbolslice.stop
+ bitslice = slice((1+MSS)*self.symbol_w-1, LSS*self.symbol_w, None)
+ if type(symbolslice)==type(0):
+ MSb = (1+symbolslice)*self.symbol_w
+ LSb = (1+symbolslice)*self.symbol_w-self.symbol_w
+ bitslice = slice(MSb-1, LSb, None)
+ return bitslice
+
+
+class CommonBytes(CommonSymbols):
+ def __init__(self, data, datasymbols=1):
+ CommonSymbols.__init__(self, data, 8, datasymbols)
+
+class CommonShorts(CommonSymbols):
+ def __init__(self, data, datasymbols=1):
+ CommonSymbols.__init__(self, data, 16, datasymbols)
+
+class CommonWords(CommonSymbols):
+ def __init__(self, data, datasymbols=1):
+ CommonSymbols.__init__(self, data, 32, datasymbols)
+
+class CommonWords64(CommonSymbols):
+ def __init__(self, data, datasymbols=1):
+ CommonSymbols.__init__(self, data, 64, datasymbols)
+
+
Index: radiohdl/trunk/base/config_variable.py
===================================================================
--- radiohdl/trunk/base/config_variable.py (nonexistent)
+++ radiohdl/trunk/base/config_variable.py (revision 2)
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: config_variable.py 18844 2018-08-29 12:41:53Z overeem $
+#
+###############################################################################
+import sys
+import os
+from os.path import expandvars, dirname, basename
+from argparse import ArgumentParser
+from configfile import ConfigFile
+
+if __name__ == '__main__':
+ # setup parser and parse the arguments.
+ argparser = ArgumentParser(description='Options and arguments for showing hdl_config keys values')
+ argparser.add_argument('configfile', help="Filename like 'hdl_buildset_.cfg'")
+ argparser.add_argument('keyname', help="Name of the key to show the value of.")
+ args = argparser.parse_args()
+
+ # resolve full name of configfile and force it to be explicit absolute or relative.
+ full_configfile_name = expandvars(args.configfile)
+ if full_configfile_name[0] != '/':
+ full_configfile_name = "./" + full_configfile_name
+ # read the file
+ cfg_info = ConfigFile(full_configfile_name)
+ cfg_info.resolve_key_references()
+
+ print "{}\n".format(os.path.expandvars(cfg_info.get_value(args.keyname, must_exist=True)))
+
radiohdl/trunk/base/config_variable.py
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/configfile.py
===================================================================
--- radiohdl/trunk/base/configfile.py (nonexistent)
+++ radiohdl/trunk/base/configfile.py (revision 2)
@@ -0,0 +1,292 @@
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: configfile.py 18870 2018-09-06 13:39:53Z overeem $
+#
+###############################################################################
+
+"""Class for accessing the values of a configuration file of RadioHDL.
+
+ The contents of the configuration file consist of a series of key - value
+ pairs. These key - value pairs are read from the file and kept in a
+ single dictionary of keys and values that can be accessed via .content.
+
+ The format of the configuration file is similar to that of an ini file. For ini
+ files Python has the ConfigParser package, but that is not used here because
+ we need other parsing rules than the ones implemented in ConfigParser.
+ The parsing is done during the allocation of the class.
+
+ Like an ini file the configurationfile can contain one or more sections. The
+ first section is common, has no header and always included. The specific
+ sections have a header that is marked by [section header]. The square brackets
+ '[' and ']' are used to identify the section header. If the 'section header' is
+ included in the argument 'sections' of the constructor then the keys of that
+ section will be included in the dictionary.
+
+ 'sections' = None --> ignore fileSections to include all sections in
+ the dict.
+ 'sections' = [] --> empty list to include only the common first
+ section in the dict.
+ 'sections' = ['section header' ...] -->
+ dedicated list of one or more section header
+ strings to include these specific sections,
+ and also the common first section, in the
+ dict.
+
+ The key and value string in the dictionary file are separated by '='. Hence
+ the '=' character can not be used in keys. The '=' can be used in values,
+ because subsequent '=' on the same line are part of the value.
+ Each key must start on a new line and may not contain whitespace.
+ The value string can extend over one or multiple lines.
+
+ Comment in line is supported by preceding it with a '#'. The '#' and the
+ text after it on the line are stripped. The remainder of the line before
+ the '#' is still interpreted.
+
+ Example:
+ # This is a comment section
+ # a key starts on a new line and extends until the '='
+ # a key and its values are separated by '='
+
+ key=string # this is a comment and the key is still interpreted
+ key=string
+ key =string
+ key = string
+
+ # a key with multiple values in its string
+ key = value value value
+
+ # how the values are separated depends on the dictionary user
+ key = value, value, value
+ key = value value
+
+ # a key with many values can have its string extend on multiple lines,
+ # the newline is replaced by a ' ' and any indent is removed
+ key =
+ value
+ value
+ value
+
+ # empty lines and spaces are allowed
+ key = value
+"""
+
+import os.path
+import re
+from collections import OrderedDict
+
+__all__ = ['CFG_COMMENT_CHAR', 'CFG_ASSIGNMENT_CHAR', 'ConfigFileException', 'ConfigFile' ]
+
+CFG_COMMENT_CHAR = '#'
+CFG_ASSIGNMENT_CHAR = '='
+
+
+class ConfigFileException(Exception):
+ "Exception class used in the ConfigFile class"
+ pass
+
+
+class ConfigFile(object):
+ """
+ :filename The full filename of the configfile. May contain environment variables.
+ :sections Optional. May contain a list of sections names. The key-value pairs in the other sections
+ are ignored.
+ :required_keys Optional. May contain a list of all keys that must exist in the configfile. If one or more
+ of those keys is missing in the configfile an exception is raised.
+ """
+ _CONFIGFILE_ATTRIBUTES = [ '_own_attr_', 'filename', 'location', 'sections', 'required_keys',
+ '__content__', 'section_headers', 'unresolved_refs', 'warnings' ]
+
+ def __init__(self, filename, sections=None, required_keys=[]):
+ """
+ Store the dictionaries from all fileName files in rootDir.
+ :raise ConfigFileException
+ """
+ full_filename = os.path.expanduser(os.path.expandvars(filename))
+ if not os.path.isfile(full_filename):
+ raise ConfigFileException ("configfile '%s' not found" % full_filename)
+
+ # Crucial: define dict for storing our (simulated) attributes
+ self.__dict__['_own_attr_'] = {}
+ self.__content__ = OrderedDict()
+
+ self.filename = os.path.basename(full_filename)
+ self.location = os.path.dirname(full_filename)
+ self.sections = sections
+ self.required_keys = required_keys
+ self.section_headers = []
+ self.unresolved_refs = []
+ self.warnings = ''
+
+ # Try to read the configuration file.
+ self._read_file()
+
+ # Check if all required keys are available
+ for required_key in self.required_keys:
+ if not required_key in self.__content__:
+ raise ConfigFileException("configfile '%s' missing key '%s'" % (filename, required_key))
+
+
+ def __getattr__(self, name):
+ """
+ Catch read-access to attributes to fetch values from the content dictionary.
+ :raise AtttributeError
+ """
+ if name in self._CONFIGFILE_ATTRIBUTES:
+ return self.__dict__['_own_attr_'][name]
+
+ if name in self.__dict__['_own_attr_']['__content__']:
+ return self.__dict__['_own_attr_']['__content__'][name]
+
+ if not hasattr(self, name):
+ raise AttributeError("%r object has no attribute %r" % (self.__class__.__name__, name))
+ return getattr(self, name)
+
+
+ def __setattr__(self, name, value):
+ """
+ Catch write-access to attributes to store values in the content dictionary.
+ :raise AtttributeError
+ """
+ if name in self._CONFIGFILE_ATTRIBUTES:
+ self.__dict__['_own_attr_'][name] = value
+ elif name in self.__dict__['_own_attr_']['__content__']:
+ self.__dict__['_own_attr_']['__content__'][name] = value
+ else:
+ if not hasattr(self, name):
+ raise AttributeError("%r object has no attribute %r" % (self.__class__.__name__, name))
+ setattr(self, name, value)
+
+ def __getitem__(self, key):
+ "Also allow access to the information as item."
+ return self.__getattr__(key)
+
+ def __setitem__(self, key, value):
+ "Also allow access to the information as item."
+ self.__setattr__(key, value)
+
+
+ def _add_kv_pair(self, key, value, include_in_section):
+ """
+ Internal function for adding a key-value pair in a neat way.
+ """
+ if not include_in_section:
+ return
+ if key.find(' ') > 0:
+ self.warnings += "Error: Key may not contain spaces: file '{}/{}', key '{}'"\
+ .format(self.location, self.filename, key)
+ elif key != '':
+ self.__content__[key] = value.strip() # Update dict with key and value
+
+ def _read_file(self):
+ """
+ Read the dictionary information the filePathName file.
+ The dictionary will contain all key-value pairs as well as a
+ 'section_headers' key that contains all the sections found in the file
+ (regardless if the keys of that section where included in the dict or not).
+ :raise ConfigFileException
+ """
+ include_section = True # default include all sections
+ key = ''
+ value = ''
+ linenr = 0
+ with open("{}/{}".format(self.location, self.filename), 'r') as configfile:
+ for line in configfile:
+ linenr += 1
+ ln = line.split(CFG_COMMENT_CHAR, 1)[0] # Strip comment from line
+ if len(ln) == 0:
+ continue
+ section_begin= ln.find('[') # Search for [section] header in this line
+ section_end = ln.find(']')
+ # section MUST start at first character of the line
+ if section_begin==0 and section_end>0:
+ self._add_kv_pair(key, value, include_section) # complete action on current key
+ key = ''
+ value = ''
+ # administrate new section
+ section_header = ln[1:section_end].strip() # new section header
+ self.section_headers.append(section_header)
+ include_section = True # default include this new section
+ if self.sections!=None:
+ if section_header not in self.sections:
+ include_section = False # skip this section
+ else:
+ key_end = ln.find(CFG_ASSIGNMENT_CHAR) # Search for key in this line
+ if key_end>0:
+ self._add_kv_pair(key, value, include_section) # complete action on current key
+ key = ln[0:key_end].strip() # start with new key
+ value = ln[key_end+1:].strip() # and new value
+ else:
+ # no assignment char found, append this line to the current value
+ value += ' ' # replace newline by space to separate values
+ value += ln.strip() # append value
+ if value.strip() != '' and key == '':
+ self.warnings += "Warning: value without a key: file '{}/{}', line {} ({})"\
+ .format(self.location, self.filename, linenr, value)
+ value = ''
+ self._add_kv_pair(key, value, include_section) # complete action on current key
+
+ if self.warnings != '':
+ raise ConfigFileException(self.warnings)
+
+
+ @property
+ def content(self):
+ "The content of the configfile as ordered dictionary."
+ return self.__content__
+
+ @property
+ def ID(self):
+ "Returns uniq ID (string) to identify this particular file. Fullfilename is used."
+ return "{}/{}".format(self.location, self.filename)
+
+
+ def resolve_key_references(self):
+ """
+ Replaces in all values the references to keys () with the value of that key.
+ Note that this operation is irreversable.
+ """
+ ref_pattern = r"<(.+?)>"
+ prog = re.compile(ref_pattern)
+ # loop over all items of the dict
+ for key, value in self.__content__.items():
+ # any reference in the value?
+ matchlist = list(set(prog.findall(value)))
+ for reference in matchlist:
+ if reference in self.__content__.keys():
+ self.__content__[key] = re.sub("<{}>".format(reference), self.__content__[reference], value)
+ value = self.__content__[key]
+ else:
+ self.unresolved_refs.append("<{}>".format(reference))
+ return len(self.unresolved_refs) == 0
+
+
+ def get_value(self, key, must_exist=False):
+ """
+ Get the value of a key. If the key does not exist and that is allowed 'None' is returned, in case the
+ key should have been there an exception is raised.
+ """
+ if key in self.__content__:
+ return self.__content__[key]
+
+ if must_exist:
+ raise ConfigFileException("Key '%s' does not exist in configfile %s/%s." % (key, self.location, self.filename))
+
+ return None
+
Index: radiohdl/trunk/base/configtree.py
===================================================================
--- radiohdl/trunk/base/configtree.py (nonexistent)
+++ radiohdl/trunk/base/configtree.py (revision 2)
@@ -0,0 +1,125 @@
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: configtree.py 18891 2018-09-11 13:14:37Z overeem $
+#
+###############################################################################
+import sys
+import os
+import os.path
+import re
+from common import listify
+from configfile import ConfigFile, ConfigFileException
+
+class ConfigTree(object):
+
+ def __init__(self, rootdirs, filename, sections=None):
+ """
+ Collect the information of all configuration files that are present under the rootdirs.
+ """
+ # Save construction arguments
+ self.rootdirs = listify(rootdirs)
+ self.filename = filename
+ self.sections = listify(sections)
+
+ # Define result variables
+ self._configfiles = {}
+
+ # search and read the config files.
+ self._read_all_configfiles()
+ if len(self._configfiles) == 0:
+ raise ConfigFileException("No '%s' files found in directory tree(s) '%s'." % (filename, rootdirs))
+
+ def _read_all_configfiles(self):
+ """
+ Recursively search the rootdirs to find the configfiles and add the content to our admin.
+ """
+ ref_pattern = self.filename.replace("*", "(.+?)")
+ name_mask = re.compile(ref_pattern+"$")
+ for rootdir in self.rootdirs:
+ for root, _, files in os.walk(rootdir):
+ for some_filename in files:
+ if name_mask.search(some_filename):
+ cfgfile = self._factory_constructor("{}/{}".format(root, some_filename))
+ # check for duplicates
+ if cfgfile.ID in self._configfiles.keys():
+ raise ConfigFileException("File with id '%s' found twice (at '%s' and '%s')" %
+ (cfgfile.ID, self._configfiles[cfgfile.ID].location, cfgfile.location))
+ self._configfiles[cfgfile.ID] = cfgfile
+
+ def _factory_constructor(self, full_filename):
+ """
+ Function for returning the readin configfile. Derived classes *must* redefine this function.
+ """
+ return ConfigFile(full_filename, sections=self.sections)
+
+ @property
+ def configfiles(self):
+ return self._configfiles
+
+ def remove_files_from_tree(self, files_to_remove):
+ """
+ Remove the given list of configfiles from our configfile administration.
+ :raise KeyError when one of the files does not exist in our admin.
+ """
+ for cfgfileID in files_to_remove:
+ self._configfiles.pop(cfgfileID)
+
+
+ def limit_tree_to(self, files_to_keep):
+ """
+ Limit the configfile collection in our admin to the ones given in the files_to_keep argument.
+ """
+ for cfgfile in self._configfiles:
+ if cfgfile.ID not in files_to_keep:
+ self._configfiles.pop(cfgfile.ID)
+
+ def get_key_values(self, key, configfiles=None, must_exist=False):
+ """
+ Get the value of a key in all configfiles. If the key does not exist in a configfile and
+ the flag must_exist is False then None is added to the result list. When the flag must_exist
+ is true and the key is not defined in a configfile then an exception is raised.
+ The configfiles to search in may be limited to 'configfiles' otherwise the whole tree is used.
+ :return List of values
+ :raises ConfigFileException
+ """
+ if configfiles == None:
+ configfiles = self._configfiles
+
+ result = []
+ for cfgfile in configfiles:
+ result.append(cfgfile.get_value(key, must_exist))
+ return result
+
+
+
+ def get_configfiles(self, key, values=None, user_configfiles=None):
+ """
+ Get a list with all configfiles that contain the key with a value specified in values.
+ If values==None then a list of all configfiles is returned that contain the key.
+ The configfiles to search in may be restricted to the user_configfiles.
+ """
+ file_list = self._configfiles.values() if not user_configfiles else user_configfiles
+
+ result = []
+ for cfgfile in file_list:
+ if cfgfile not in result and key in cfgfile.content:
+ if values == None or cfgfile.content[key] in values:
+ result.append(cfgfile)
+ return result
Index: radiohdl/trunk/base/export_config_variables.py
===================================================================
--- radiohdl/trunk/base/export_config_variables.py (nonexistent)
+++ radiohdl/trunk/base/export_config_variables.py (revision 2)
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: export_config_variables.py 18837 2018-08-28 13:56:09Z overeem $
+#
+###############################################################################
+import sys
+import os
+from os.path import expandvars, dirname, basename
+from argparse import ArgumentParser
+from configfile import ConfigFile
+
+if __name__ == '__main__':
+ # setup parser and parse the arguments.
+ argparser = ArgumentParser(description="Options and arguments for constructing an 'export' command for shell based on the content of a hdltool config file.")
+ argparser.add_argument('configfile', help="Filename like 'hdl_buildset_.cfg'")
+ argparser.add_argument('keynames', help="Name(s) of the key(s) to show the value of. Use comma to seperate multiple keys.")
+ argparser.add_argument('--varnames', help="Name(s) of the environment variable(s) the keys are mapped to. Default the keynames in capitals are used as environment variable names.")
+ argparser.add_argument('--is-group', help="The keynames refer to groups of environment variables that must be set.",
+ action="store_true")
+ argparser.add_argument('--optional', help="The keynames are optional. When them do not exist not error is generated",
+ action="store_true")
+ args = argparser.parse_args()
+
+ # resolve full name of configfile and force it to be explicit absolute or relative.
+ full_configfile_name = expandvars(args.configfile)
+ if full_configfile_name[0] != '/':
+ full_configfile_name = "./" + full_configfile_name
+ # read the file
+ cfg_info = ConfigFile(full_configfile_name)
+ cfg_info.resolve_key_references()
+
+ # parse the keys if they are no group references
+ if not args.is_group:
+ # setup key- and variable- names
+ keys = args.keynames.split(',')
+ if args.varnames:
+ env_vars = args.varnames.split(',')
+ if len(keys) != len(env_vars):
+ argparser.error("Number of variable names must match the number of keys.")
+ else:
+ env_vars = []
+ for key in keys:
+ env_vars.append(key.upper())
+
+ # finally construct an export command for the key value pairs.
+ for idx, key in enumerate(keys):
+ print "export {}='{}'\n".format(env_vars[idx],
+ os.path.expandvars(cfg_info.get_value(key, must_exist=not(args.optional))))
+ sys.exit(0)
+
+ # Each key contains key-value pairs that must be exported in stead of a value
+ if args.varnames:
+ argparser.error("The option --varnames can not be used in combination with the option --is-group.")
+ keys = args.keynames.split(',')
+ for key in keys:
+ kv_pairs = cfg_info.get_value(key, must_exist=not(args.optional))
+ # currently 'by definition' the value we got has the format: [ [...]]
+ # check we have an even number of items
+ items = kv_pairs.split()
+ if len(items) % 2:
+ argparser.error("Key '{}' should contain an even number of items ({}).".format(key, items))
+ for idx in xrange(0, len(items)/2, 2):
+ print "export {}='{}'\n".format(items[idx].upper(), os.path.expandvars(items[idx+1]))
+
radiohdl/trunk/base/export_config_variables.py
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/export_hdllib_variables.py
===================================================================
--- radiohdl/trunk/base/export_hdllib_variables.py (nonexistent)
+++ radiohdl/trunk/base/export_hdllib_variables.py (revision 2)
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: export_hdllib_variables.py 18844 2018-08-29 12:41:53Z overeem $
+#
+###############################################################################
+import sys
+import os
+from os.path import expandvars, dirname, basename
+from argparse import ArgumentParser
+from hdl_configfile import HdlBuildset
+from hdl_configtree import HdlLibTree
+
+if __name__ == '__main__':
+ # setup parser and parse the arguments.
+ argparser = ArgumentParser(description='Options and arguments for exporting hdllib keys values')
+ argparser.add_argument('buildset', help="Filename like 'hdl_buildset_.cfg'")
+ argparser.add_argument('libname', help="Name of the library to search in.")
+ argparser.add_argument('keys', help="Name of variable(s) to export.")
+ args = argparser.parse_args()
+
+ # read the buildset file
+ full_buildsetfile_name = expandvars("${HDL_CONFIG_DIR}/hdl_buildset_%s.cfg" % (args.buildset))
+ buildset_info = HdlBuildset(full_buildsetfile_name)
+ buildset_info.resolve_key_references()
+
+ # find out where the hdllibs files are and read them in
+ root_dirs = [ expandvars(rootdir) for rootdir in buildset_info.lib_root_dirs.replace("\t"," ").split(" ")
+ if rootdir != '' ]
+ lib_tree = HdlLibTree(rootdirs=root_dirs, filename="hdllib.cfg")
+ for key in args.keys.split(','):
+ print "export {}='{}'\n".format(key.lower(),
+ os.path.expandvars(lib_tree.configfiles[args.libname].get_value(key=key, must_exist=True)))
+
+
radiohdl/trunk/base/export_hdllib_variables.py
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/generate_ip_libs
===================================================================
--- radiohdl/trunk/base/generate_ip_libs (nonexistent)
+++ radiohdl/trunk/base/generate_ip_libs (revision 2)
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: generate_ip_libs 18918 2018-09-26 14:30:49Z overeem $
+#
+###############################################################################
+
+import sys
+from os.path import expandvars, dirname, basename
+from subprocess import call, STDOUT
+from common import listify, mkdir
+from argparse import ArgumentParser
+from hdl_configfile import HdlBuildset, HdlTool
+from hdl_configtree import HdlLibTree
+
+def run_qmegawiz(buildset, outputdir, hdllib, vhdl_files, options):
+ """
+ Run qmegawiz for the configuration in the given hdllib.
+ The script takes care that the exit code of qmegawiz is returned to the caller.
+ """
+ extra_options = hdllib.get_value("qmegawiz_extra_options", False)
+ if not extra_options:
+ extra_options = ""
+
+ error_code = 0
+ for vhdl_file in vhdl_files:
+ script = '. ${RADIOHDL}/tools/quartus/set_quartus %s\n' % buildset
+ script += 'cd %s\n' % outputdir
+ script += 'cp %s/%s .\n' % (hdllib.location, vhdl_file)
+ script += 'set -o pipefail\n'
+ # echo line without 'Info:' to make sure grep it's exit code is 0
+ script += '(echo " " ; qmegawiz %s %s %s 2>&1) | grep -iv Info:\n' \
+ % (options, extra_options, vhdl_file)
+ script += 'exit_code=$?\n'
+ script += 'rm %s\n' % vhdl_file
+ script += 'exit $exit_code\n'
+ #execute script
+ print "compiling {} ... ".format(vhdl_file)
+ return_code = call(script, stdout=None, stderr=STDOUT, shell=True)
+ # qmegawiz is very sloppy with it's exitcodes. We assume 0 is OK although this not always the case. :-(
+ if return_code == 0:
+ print "*** Generation (probably) OK\n"
+ else:
+ print "*** Error during generation, exitcode={}\n".format(return_code)
+ error_code |= return_code
+ return error_code
+
+
+def run_qsys(buildset, outputdir, hdllib, vhdl_files, options):
+ """
+ Run qsys for the configuration in the given hdllib.
+ The script takes care that the exit code of qsys is returned to the caller.
+ """
+ extra_options = hdllib.get_value("qsys-generate_extra_options", False)
+ if not extra_options:
+ extra_options = ""
+
+ error_code = 0
+ for vhdl_file in vhdl_files:
+ script = '. ${RADIOHDL}/tools/quartus/set_quartus %s\n' % buildset
+ script += 'cd %s\n' % hdllib.location
+ script += 'set -o pipefail\n'
+ # echo line without 'Info:' to make sure grep it's exit code is 0
+ script += '(echo " " ; qsys-generate %s %s --output-directory=%s %s 2>&1) | grep -iv Info:\n' \
+ % (options, extra_options, outputdir, vhdl_file)
+ script += 'exit_code=$?\n'
+ script += 'exit $exit_code\n'
+ # execute script
+ print "compiling {} ... ".format(vhdl_file)
+ return_code = call(script, stdout=None, stderr=STDOUT, shell=True)
+ if return_code == 0:
+ print "*** Generation OK\n"
+ else:
+ print "*** Error during generation, exitcode={}\n".format(return_code)
+ error_code |= return_code
+ return error_code
+
+
+def run_quartus_sh(buildset, outputdir, hdllib, tcl_files, options):
+ """
+ Run quartus_sh for the configuration in the given hdllib.
+ The script takes care that the exit code of quartus_sh is returned to the caller.
+ """
+ extra_options = hdllib.get_value("quartus_sh_extra_options", False)
+ if not extra_options:
+ extra_options = ""
+
+ error_code = 0
+ for tcl_file in tcl_files:
+ script = '. ${RADIOHDL}/tools/quartus/set_quartus %s\n' % buildset
+ script += 'cd %s/%s\n' % (outputdir, hdllib.quartus_sh_ip_srcdir)
+ script += 'set -o pipefail\n'
+ # echo line without 'Info:' to make sure grep it's exit code is 0
+ script += '(echo " " ; quartus_sh %s %s -t %s 2>&1) | grep -iv Info:\n' \
+ % (options, extra_options, tcl_file)
+ script += 'exit_code=$?\n'
+ script += 'exit $exit_code\n'
+ # execute script
+ print "compiling {} ... ".format(tcl_file)
+ return_code = call(script, stdout=None, stderr=STDOUT, shell=True)
+ if return_code == 0:
+ print "*** Generation OK\n"
+ else:
+ print "*** Error during generation, exitcode={}\n".format(return_code)
+ error_code |= return_code
+ return error_code
+
+
+if __name__ == '__main__':
+ # setup parser and parse the arguments.
+ argparser = ArgumentParser(description='Generate the IP libraries for all technologies of the given buildset')
+ argparser.add_argument('buildset', help="Filename like 'hdl_buildset_.cfg'")
+ args = argparser.parse_args()
+
+ # resolve full name of buildsetfile and force it to be explicit absolute or relative.
+ full_buildsetfile_name = expandvars("${HDL_CONFIG_DIR}/hdl_buildset_%s.cfg" % (args.buildset))
+ if full_buildsetfile_name[0] != '/':
+ full_buildsetfile_name = "./" + full_buildsetfile_name
+ # read the file
+ buildset_info = HdlBuildset(full_buildsetfile_name)
+ buildset_info.resolve_key_references()
+
+ # read in all hdllib configfiles
+ root_dirs = [ expandvars(rootdir) for rootdir in buildset_info.lib_root_dirs.replace("\t"," ").split(" ")
+ if rootdir != '' ]
+ lib_tree = HdlLibTree(rootdirs=root_dirs, filename="hdllib.cfg", sections="generate_ip_libs")
+
+ # read in the tool environment settings
+ tool_config_file = expandvars("${HDL_CONFIG_DIR}/hdl_tool_{}.cfg".format(buildset_info.synth_tool_name))
+ tool_info = HdlTool(tool_config_file)
+ tool_info.resolve_key_references()
+ ip_tools = [ tool for tool in tool_info.ip_tools.replace("\t"," ").split(" ") if tool != '' ]
+
+ files_with_errors = []
+ for technology in listify(buildset_info.technology_names):
+ print
+ print "Generating IP libraries for technology:", technology
+ # for all tools supported by quartus
+ for ip_tool in ip_tools:
+ tool_options = tool_info['{}_default_options'.format(ip_tool)]
+ ip_tool_key = "{}_ip_files".format(ip_tool)
+ # for all hdllib.cfg files found
+ for ip_lib_name in sorted(lib_tree.configfiles.keys())[::-1]: #TODO reverse order issue!
+ ip_lib_info = lib_tree.configfiles[ip_lib_name]
+ # if technology matches and there are files defined for the current tool
+ if ip_lib_info.hdl_lib_technology == technology and ip_tool_key in ip_lib_info.content:
+ # we have a match do the compilation
+ print "==> Processing {} with {}".format(ip_lib_info.ID, ip_tool)
+ outputdir = "${HDL_BUILD_DIR}/%s/%s" % (args.buildset, ip_tool)
+ mkdir(outputdir)
+ vhdl_files = [ name for name in ip_lib_info[ip_tool_key].replace("\t"," ").split(" ") \
+ if name != '' ]
+ if ip_tool == 'qmegawiz':
+ err_code = run_qmegawiz (args.buildset, outputdir, ip_lib_info, vhdl_files, tool_options)
+ elif ip_tool == 'qsys-generate':
+ err_code = run_qsys (args.buildset, outputdir, ip_lib_info, vhdl_files, tool_options)
+ elif ip_tool == 'quartus_sh':
+ err_code = run_quartus_sh(args.buildset, outputdir, ip_lib_info, vhdl_files, tool_options)
+ else:
+ raise NameError("Hdllib file in %s contains a unknown tool (%s) for generating IP." %
+ (ip_lib_info.ID, ip_tool))
+ if err_code:
+ files_with_errors.append(ip_lib_info.ID)
+
+ if files_with_errors:
+ print "##### The following files had compile errors:"
+ print " ", files_with_errors
+ else:
+ print "+++++ No errors during compilation! +++++\n"
+
radiohdl/trunk/base/generate_ip_libs
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/generate_qsys
===================================================================
--- radiohdl/trunk/base/generate_qsys (nonexistent)
+++ radiohdl/trunk/base/generate_qsys (revision 2)
@@ -0,0 +1,217 @@
+###############################################################################
+#
+# Copyright (C) 2015
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+###############################################################################
+
+"""
+Purpose:
+. Generate an Altera QSYS file from a base QSYS file and user supplied list of regs.
+Usage:
+. python generate_qsys.py
+"""
+
+from common import *
+
+def _to_element_str(reg_name, reg_base_addr):
+ """
+ Returns a template XML 'element' filled in with reg_name and reg_base_addr.
+ """
+ ELEMENT_TEMPLATE = """
+ element RADIOHDL_REG_NAME.mem
+ {
+ datum baseAddress
+ {
+ value = RADIOHDL_REG_BASE_ADDR;
+ type = "long";
+ }
+ datum _sortIndex
+ {
+ value = "8";
+ type = "int";
+ }
+ datum sopceditor_expanded
+ {
+ value = "0";
+ type = "boolean";
+ }
+ }
+ """
+ return ELEMENT_TEMPLATE.replace('RADIOHDL_REG_NAME', reg_name).replace('RADIOHDL_REG_BASE_ADDR', str(reg_base_addr))
+
+def _to_interface_str(reg_name):
+ """
+ Returns a template XML 'interface' filled in with reg_name.
+ """
+ INTERFACE_TEMPLATE = """
+
+
+
+
+
+
+
+ """
+ return INTERFACE_TEMPLATE.replace('RADIOHDL_REG_NAME', reg_name)
+
+def _append_to_modules_mid_str(modules_mid_str, reg_name, reg_base_addr, reg_end_addr):
+ """
+ The modules_mid section contains one line (dataSlaveMapParam) for all start- and end addresses.
+ Append the start and end address of our register to this line.
+ """
+ DATASLAVEMAPPARAM_TEMPLATE = " "
+ return modules_mid_str + DATASLAVEMAPPARAM_TEMPLATE.replace('RADIOHDL_REG_NAME', reg_name).replace('RADIOHDL_REG_BASE_ADDR', str(reg_base_addr)).replace('RADIOHDL_REG_END_ADDR', str(reg_end_addr))
+
+def _to_module_str(reg_name, reg_addr_w):
+ """
+ Returns a template XML 'module' filled in with reg_name and reg_addr_w.
+ """
+ MODULE_TEMPLATE = """
+
+
+
+
+
+
+ """
+ return MODULE_TEMPLATE.replace('RADIOHDL_REG_NAME', reg_name).replace('RADIOHDL_REG_ADDR_W', str(reg_addr_w))
+
+def _to_connection_str(reg_name, reg_base_addr):
+ """
+ Returns a template XML 'connection' filled in with reg_name and reg_base_addr.
+ """
+ CONNECTION_TEMPLATE = """
+
+
+
+
+
+
+
+ """
+ return CONNECTION_TEMPLATE.replace('RADIOHDL_REG_NAME', reg_name).replace('RADIOHDL_REG_BASE_ADDR', str(reg_base_addr))
+
+
+def generate_qsys(input_qsys, regs, output_filename):
+ """
+ Creates an XML QSYS file (output_filename) from a base QSYS and a user-supplied list of registers to add (regs).
+ . regs = [ (reg_name, reg_base_addr, reg_span), .. ]
+ """
+ # Read the base QSYS contents into a string
+ with open (input_qsys, "r") as base_qsys_file:
+ data=base_qsys_file.read()
+
+ # We'll split the base QSYS string up in 5 sections.
+ # . Note that string.split() throws away the delimiter so we'll restore those later.
+ elements = data.split(']]>', 1)[0]
+ parameters = data.split(']]>', 1)[1].split('', 1)[0]
+ interfaces = data.split(']]>', 1)[1].split('', 1)[1].split('', 1)[1].split('', 1)[1].split('')[0]
+ modules_mid = data.split(']]>', 1)[1].split('', 1)[1].split('')[1].split(']]>',1)[0]
+ modules_tail = data.split(']]>', 1)[1].split('', 1)[1].split('')[1].split(']]>',1)[1]
+ connections = data.split(']]>', 1)[1].split('', 1)[1].split('\n' + \
+ parameters + \
+ '\n' + \
+ interfaces + \
+ '' + \
+ modules_mid + \
+ ']]>\n' + \
+ modules_tail + \
+ '\n'
+
+ # Write the QSYS string to the output_file.
+ output_file = open(output_filename, "w")
+ output_file.write(qsys_str)
+ output_file.close()
+
+
+################################################################################
+# Example main on execution of this file
+################################################################################
+if __name__ == '__main__':
+ base_qsys_path = 'qsys_input.qsys'
+ regs = [('reg_my_peripheral', 16384, 3), ('reg_another_peripheral', 17152, 6)]
+ generate_qsys(base_qsys_path, regs, 'qsys_generated.qsys')
Index: radiohdl/trunk/base/hdl_configfile.py
===================================================================
--- radiohdl/trunk/base/hdl_configfile.py (nonexistent)
+++ radiohdl/trunk/base/hdl_configfile.py (revision 2)
@@ -0,0 +1,85 @@
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: hdl_configfile.py 19237 2018-12-10 16:01:11Z overeem $
+#
+###############################################################################
+
+from configfile import ConfigFile
+
+__all__ = [ 'HdlTool', 'HdlBuildset', 'HdlLib']
+
+
+class HdlTool(ConfigFile):
+ """
+ Class the represents the content of a hdltool_.cfg configuration file.
+ """
+ _HDLTOOL_ATTRIBUTES = []
+
+ def __init__(self, filename, sections=None):
+ """
+ Read the hdltoolset configuration file and check presence of the required keys.
+ :raise ConfigFileException
+ """
+ super(HdlTool, self).__init__(filename, sections, required_keys=self._HDLTOOL_ATTRIBUTES)
+
+
+class HdlBuildset(ConfigFile):
+ """
+ Class the represents the content of a hdl_buildset_.cfg configuration file.
+ """
+ _HDLBUILDSET_ATTRIBUTES = [ 'buildset_name', 'technology_names', 'family_names', 'block_design_names',
+ 'lib_root_dirs', 'sim_tool_name', 'sim_tool_version',
+ 'synth_tool_name', 'synth_tool_version' ]
+
+ def __init__(self, filename, sections=None):
+ """
+ Read the hdlbuildset configuration file and check presence of the required keys.
+ :raise ConfigFileException
+ """
+ super(HdlBuildset, self).__init__(filename, sections, required_keys=self._HDLBUILDSET_ATTRIBUTES)
+
+ @property
+ def ID(self):
+ "Returns uniq ID (string) to identify this particular file."
+ return self.buildset_name
+
+
+class HdlLib(ConfigFile):
+ """
+ Class the represents the content of a hdllib.cfg configuration file.
+ """
+ _HDLLIB_ATTRIBUTES = [ 'hdl_lib_name', 'hdl_library_clause_name', 'hdl_lib_uses_synth',
+ 'hdl_lib_uses_sim', 'hdl_lib_technology', 'synth_files',
+ 'test_bench_files' ]
+
+ def __init__(self, filename, sections=None):
+ """
+ Read the hdllib configuration file and check presence of the required keys.
+ :raise ConfigFileException
+ """
+ super(HdlLib, self).__init__(filename, sections, required_keys=self._HDLLIB_ATTRIBUTES)
+
+ @property
+ def ID(self):
+ "Returns uniq ID (string) to identify this particular file."
+ return self.hdl_lib_name
+
+
+
Index: radiohdl/trunk/base/hdl_configtree.py
===================================================================
--- radiohdl/trunk/base/hdl_configtree.py (nonexistent)
+++ radiohdl/trunk/base/hdl_configtree.py (revision 2)
@@ -0,0 +1,78 @@
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: hdl_configtree.py 18619 2018-07-24 08:41:55Z overeem $
+#
+###############################################################################
+
+from hdl_configfile import HdlTool, HdlBuildset, HdlLib
+from configtree import ConfigTree
+
+__all__ = [ 'HdlToolTree', 'HdlBuildsetTree', 'HdlLibTree']
+
+
+class HdlToolTree(ConfigTree):
+ """
+ Class the represents the content of a set of hdltool_.cfg configuration files.
+ """
+ def __init__(self, rootdirs, filename, sections=None):
+ """
+ Read the hdltool configuration files and stores them in this tree.
+ :raise ConfigFileException
+ """
+ super(HdlToolTree, self).__init__(rootdirs, filename, sections)
+
+ def _factory_constructor(self, full_filename):
+ "Function for returning the readin configfile."
+ return HdlTool(full_filename)
+
+
+class HdlBuildsetTree(ConfigTree):
+ """
+ Class the represents the content of a set of hdl_buildset_.cfg configuration files.
+ """
+ def __init__(self, rootdirs, filename, sections=None):
+ """
+ Read the hdlbuildset configuration files and stores them in this tree.
+ :raise ConfigFileException
+ """
+ super(HdlBuildsetTree, self).__init__(rootdirs, filename, sections)
+
+ def _factory_constructor(self, full_filename):
+ "Function for returning the readin configfile."
+ return HdlBuildset(full_filename)
+
+
+class HdlLibTree(ConfigTree):
+ """
+ Class the represents the content of a set of hdllib.cfg configuration files.
+ """
+ def __init__(self, rootdirs, filename, sections=None):
+ """
+ Reads the hdllib configuration files and stores them in this tree.
+ :raise ConfigFileException
+ """
+ super(HdlLibTree, self).__init__(rootdirs, filename, sections)
+
+ def _factory_constructor(self, full_filename):
+ "Function for returning the readin configfile."
+ return HdlLib(full_filename)
+
+
+
Index: radiohdl/trunk/base/hdl_libraries_wizard.py
===================================================================
--- radiohdl/trunk/base/hdl_libraries_wizard.py (nonexistent)
+++ radiohdl/trunk/base/hdl_libraries_wizard.py (revision 2)
@@ -0,0 +1,629 @@
+###############################################################################
+#
+# Copyright (C) 2014
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+###############################################################################
+
+"""HDL configuration for building simulation and synthesis targets.
+
+ There should be one hdl_buildset_.cfg file per buildset somewhere in the
+ toolRootDir and at least one hdllib.cfg file somewhere in the libRootDir.
+ Every HDL library that is in the libRootDir can be found if it has a hdllib.cfg file.
+ Together the hdl_buildset_.cfg and hdllib.cfg files contain all the keys and
+ values that are sufficient to be able to build the targets for the HDL
+ library. The possible targets are:
+
+ - compile to created the library binaries for simulation
+ - synthesize to created an image that can be loaded ion the FPGA
+ - verify VHDL test benches in simulation
+ - verify Python test cases via the MM control interface in simulation
+ - validate Python test cases on hardware via the MM control interface
+
+ The contents of the cfg files consist of a series of key - value pairs
+ that are read into a dictionary as defined in hdl_configfile.py. Whether
+ the key is a valid key depends on the application that interprets the
+ dictionary.
+
+ The methods can have the library dictionary or the library name as
+ argument. The default arguments are the self.libs.dicts and the
+ corresponding self.lib_names. The argument can be a list or a single value.
+ Similar the return can be a list or a single value, because a list of one
+ element is unlistified.
+
+"""
+
+import sys
+from os import listdir
+from os.path import expandvars, isabs, join, isfile
+import shutil
+from distutils.dir_util import copy_tree
+from argparse import ArgumentParser
+import collections
+import common as cm
+from configfile import ConfigFile
+from hdl_configfile import HdlBuildset
+from hdl_configtree import HdlLibTree
+
+__all__ = [ 'HdlLibrariesWizard' ]
+
+class HdlLibrariesWizard:
+
+ def __init__(self, toolRootDir, toolFileName, libFileName='hdllib.cfg', libFileSections=None):
+ """Get tool dictionary info from toolRootDir and all HDL library dictionary info for it
+
+ - self.tool.dicts = single dictionary that contains the tool info (only one tool dict in dicts list)
+ - self.libs.dicts = list of dictionaries that contains the info of the HDL libraries.
+
+ The libRootDir parameter is defined in the hdl_buildset_.cfg file and is the root directory from where the hdllib.cfg
+ files are searched for.
+
+ - self.lib_names = the library names of self.libs.dicts
+
+ In parallel to the self.libs.dicts list of dictionaries a list of self.lib_names is created to be able to identify
+ a HDL library dict also by its library name. Iherefore it is important that the indexing of parallel lists remains
+ intact at all times.
+
+ - self.technologyNames = the technologyNames parameter is defined in the hdl_buildset_.cfg file. All generic HDL
+ libraries and these technology specific libraries are kept. If self.technologyNames is:
+ [] : Keep all HDL libraries that were found.
+ ['ip_stratixiv', 'ip_arria10'] : The HDL libraries with a hdl_lib_technology that is not '' or does not match one of the technologies
+ in technologyNames are removed from the list of HDL library dictionaries.
+
+ - self.removed_libs = contains the HDL library dicts that have been removed from self.libs.dicts, because they are for
+ a technology that is not within technologyNames.
+
+ Keep lists of all unavailable library names that were found at the hdl_lib_uses_synth, hdl_lib_uses_ip, hdl_lib_uses_sim and
+ hdl_lib_include_ip keys in the self.libs.dicts:
+
+ - self.unavailable_use_libs = self.unavailable_use_synth_libs + self.unavailable_use_ip_libs + self.unavailable_use_sim_libs
+ - self.unavailable_include_ip_libs
+
+ Unavailable used libraries can be missing for a valid reason when they are not required (e.g. IP for another technology). Being able to
+ ignore missing libraries does require that the entities from these libraries are instantiated as components in the VHDL. The difference
+ between a removed library and an unavailable library is that for a removed library the HDL config information is still known, whereas
+ for an unavailable library it is not. Therefore the library clause names for referred but unavailable HDL libraries are disclosed at the
+ 'hdl_lib_disclose_library_clause_names' keys of the libraries that use them and kept in a dictionary:
+
+ - self.disclosed_library_clause_names
+
+ """
+ print "HdlLibrariesWizard(toolRootDir=%s, toolFileName=%s, libFileName=%s, libFileSections=%s)" % \
+ (toolRootDir, toolFileName, libFileName, libFileSections)
+ self.toolRootDir = toolRootDir # TODO almost obsolete
+
+ # read the buildset file. This file contains major information about paths, technologies, and so on
+ full_buildsetfile_name = "%s/%s" % (toolRootDir, toolFileName)
+ buildset_info = HdlBuildset(full_buildsetfile_name)
+ buildset_info.resolve_key_references()
+ self.buildset = buildset_info.content
+
+ # HDL library config files
+ self.libRootDirs = [ expandvars(rootdir) for rootdir in self.buildset['lib_root_dirs'].replace("\t"," ").split(" ")
+ if rootdir != '' ]
+ self.libs = HdlLibTree(rootdirs = self.libRootDirs,
+ filename = libFileName,
+ sections = libFileSections) # library dict files
+ if len(self.libs.configfiles) == 0:
+ sys.exit('Error : No HDL library config files found')
+ print "Found %d hdllib files:" % len(self.libs.configfiles)
+
+ # Substitute key words occurring in hdllib.cfg files with their value.
+ self.substitute_key_words()
+
+ # Keep the generic HDL libraries and remove those that do not match the specified IP technologies
+ self.technologyNames = self.buildset['technology_names'].split()
+ print "### self.technologyNames = ", self.technologyNames
+ self.removed_libs = {}
+ for cfglib_name in self.libs.configfiles.keys():
+ techname = self.libs.configfiles[cfglib_name]['hdl_lib_technology']
+ if (techname != '' and techname not in self.technologyNames):
+ # keep the removed libs we need the content of those libs later.
+ self.removed_libs[cfglib_name]=self.libs.configfiles[cfglib_name]
+ self.libs.remove_files_from_tree(self.removed_libs.keys())
+ print len(self.removed_libs), "REMOVED LIBS:", sorted(self.removed_libs)
+
+ # Keep list of used HDL library names
+ self.lib_names = self.libs.configfiles.keys()
+ print len(self.lib_names), "KEPT LIBS:", sorted(self.lib_names)
+
+ # No need to check for duplicates since HdlLibTree did that already.
+
+ # create dictionary of library names with library clause names that are disclosed at the 'hdl_lib_disclose_library_clause_names' keys.
+ self.disclosed_library_clause_names = {}
+ for lib in self.libs.configfiles.values():
+ if lib.get_value('hdl_lib_disclose_library_clause_names'):
+ key_values = lib.get_value('hdl_lib_disclose_library_clause_names').split()
+ lib_name = key_values[0::2]
+ lib_clause_name = key_values[1::2]
+ lib_pairs = zip(lib_name, lib_clause_name)
+ # No need to check for duplicate lib_names, because a dictionary cannot have duplicate keys
+ for lp in lib_pairs:
+ self.disclosed_library_clause_names[lp[0]] = lp[1]
+ # Check whether the used libraries from the self.libs.dicts keys indeed exist, otherwise remove them from the dictionary key
+ # string and add the used library name to the list of unavailable used library names and check that the library use clause
+ # name was disclosed at the 'hdl_lib_disclose_library_clause_names' key. In this way other methods do not have to check a
+ # used library does indeed exist.
+ self.unavailable_use_synth_libs = []
+ self.unavailable_use_ip_libs = []
+ self.unavailable_use_sim_libs = []
+ self.unavailable_include_ip_libs = []
+ for lib in self.libs.configfiles.values():
+ use_synth_libs = []
+ use_ip_libs = []
+ use_sim_libs = []
+ include_ip_libs = []
+ if lib.get_value('hdl_lib_uses_synth'):
+ use_synth_libs = lib.get_value('hdl_lib_uses_synth').split()
+ if lib.get_value('hdl_lib_uses_ip'):
+ use_ip_libs += lib.get_value('hdl_lib_uses_ip').split()
+ if lib.get_value('hdl_lib_uses_sim'):
+ use_sim_libs += lib.get_value('hdl_lib_uses_sim').split()
+ if lib.get_value('hdl_lib_include_ip'):
+ include_ip_libs = lib.get_value('hdl_lib_include_ip').split()
+ for use_name in use_synth_libs:
+ if (use_name not in self.lib_names) and (use_name not in self.removed_libs):
+ lib['hdl_lib_uses_synth']=cm.remove_from_list_string(lib['hdl_lib_uses_synth'], use_name)
+ self.unavailable_use_synth_libs.append(use_name)
+ if use_name not in self.disclosed_library_clause_names.keys():
+ sys.exit("Error : Unavailable library %s at 'hdl_lib_uses_synth' key is not disclosed at 'hdl_lib_disclose_library_clause_names' key in library %s" % (use_name, lib_name))
+ for use_name in use_ip_libs:
+ if (use_name not in self.lib_names) and (use_name not in self.removed_libs):
+ lib['hdl_lib_uses_ip']=cm.remove_from_list_string(lib['hdl_lib_uses_ip'], use_name)
+ self.unavailable_use_ip_libs.append(use_name)
+ if use_name not in self.disclosed_library_clause_names.keys():
+ sys.exit("Error : Unavailable library %s at 'hdl_lib_uses_ip' key is not disclosed at 'hdl_lib_disclose_library_clause_names' key in library %s" % (use_name, lib_name))
+ for use_name in use_sim_libs:
+ if (use_name not in self.lib_names) and (use_name not in self.removed_libs):
+ lib['hdl_lib_uses_sim']=cm.remove_from_list_string(lib['hdl_lib_uses_sim'], use_name)
+ self.unavailable_use_sim_libs.append(use_name)
+ if use_name not in self.disclosed_library_clause_names.keys():
+ sys.exit("Error : Unavailable library %s at 'hdl_lib_uses_sim' key is not disclosed at 'hdl_lib_disclose_library_clause_names' key in library %s" % (use_name, lib_name))
+ for use_name in include_ip_libs:
+ if (use_name not in self.lib_names) and (use_name not in self.removed_libs):
+ lib['hdl_lib_include_ip']=cm.remove_from_list_string(lib['hdl_lib_include_ip'], use_name)
+ self.unavailable_include_ip_libs.append(use_name)
+ if use_name not in self.disclosed_library_clause_names.keys():
+ sys.exit("Error : Unavailable library %s at 'hdl_lib_include_ip' key in library %s is not disclosed at any 'hdl_lib_disclose_library_clause_names' key" % (use_name, lib_name))
+ # remove all duplicates from the list
+ self.unavailable_use_synth_libs = cm.unique(self.unavailable_use_synth_libs)
+ self.unavailable_use_ip_libs = cm.unique(self.unavailable_use_ip_libs)
+ self.unavailable_use_sim_libs = cm.unique(self.unavailable_use_sim_libs)
+ self.unavailable_include_ip_libs = cm.unique(self.unavailable_include_ip_libs) # list of include_ip_use_libs
+ self.unavailable_use_libs = self.unavailable_use_synth_libs + self.unavailable_use_ip_libs + self.unavailable_use_sim_libs
+ self.unavailable_use_libs = cm.unique(self.unavailable_use_libs) # aggregate list of use_*_libs
+
+ # The Key value pairs defined in hdltool.cfg can be used in hdllib.cfg files. See hdllib.cfg of technology library
+ def substitute_key_words(self):
+ for lib in self.libs.configfiles.values():
+ for lib_key, lib_value in lib.content.items():
+ for tool_key, tool_value in self.buildset.items():
+ tool_key_string = '<%s>' % tool_key
+ if tool_key_string in lib_value:
+ lib[lib_key] = lib_value.replace(tool_key_string,tool_value)
+
+ def check_library_names(self, check_lib_names, lib_names=None):
+ """Check that HDL library names exists within the list of library names, if not then exit with Error message.
+ The list of library names can be specified via the argument lib_names, or it defaults to the list of
+ self.lib_names of HDL libraries that were found in the toolRootDir for the libFileName of this object.
+ """
+ if lib_names==None: lib_names=self.lib_names
+ for check_lib_name in cm.listify(check_lib_names):
+ if check_lib_name not in cm.listify(lib_names):
+ sys.exit('Error : Unknown HDL library name %s found with %s' % (check_lib_name, cm.method_name()))
+
+
+ def get_used_libs(self, build_type, lib_dict, arg_include_ip_libs=[]):
+ """Get the list of used HDL libraries from the lib_dict that this library directly depends on, so only at this HDL library hierachy level.
+
+ Which libraries are actually used depends on the build_type. The build_type can be:
+ '' uses all libraries from 'hdl_lib_uses_synth', 'hdl_lib_uses_ip' and 'hdl_lib_uses_sim' in the lib_dict
+ 'sim' uses all libraries from 'hdl_lib_uses_synth', 'hdl_lib_uses_ip' and 'hdl_lib_uses_sim' in the lib_dict
+ 'synth' uses all libraries from 'hdl_lib_uses_synth' in the lib_dict and from 'hdl_lib_uses_ip' it only uses the IP
+ libraries that are mentioned in the local 'hdl_lib_include_ip' key or in the global arg_include_ip_libs
+
+ The 'hdl_lib_uses_*' keys all appear locally in the same hdllib.cfg file. The 'hdl_lib_include_ip' key appears at this level or at
+ a higher level (design) library hdllib.cfg file to select which of all available 'hdl_lib_uses_ip' IP libraries will actually be
+ used in the design. The 'hdl_lib_include_ip' cannot appear in a lower level hdllib.cfg, because a lower level HDL library cannot
+ depend on a higher level HDL library. Therefore the IP libraries that need to be included from 'hdl_lib_uses_ip' will be known in
+ include_ip_libs.
+ """
+ # Get local library dependencies
+ use_synth_libs = []
+ use_ip_libs = []
+ use_sim_libs = []
+ include_ip_libs = []
+ if 'hdl_lib_uses_synth' in lib_dict.content:
+ use_synth_libs = lib_dict['hdl_lib_uses_synth'].split()
+ if 'hdl_lib_uses_ip' in lib_dict.content:
+ use_ip_libs += lib_dict['hdl_lib_uses_ip'].split()
+ if 'hdl_lib_uses_sim' in lib_dict.content:
+ use_sim_libs += lib_dict['hdl_lib_uses_sim'].split()
+ if 'hdl_lib_include_ip' in lib_dict.content:
+ include_ip_libs = lib_dict['hdl_lib_include_ip'].split()
+
+ # Append include_ip_libs from this level to the global list of arg_include_ip_libs
+ include_ip_libs = list(arg_include_ip_libs) + include_ip_libs
+
+ # Get the actually use_libs for lib_dict
+ use_libs = use_synth_libs + use_ip_libs + use_sim_libs # default include all IP, so ignore include_ip_libs
+ if build_type=='sim':
+ use_libs = use_synth_libs + use_ip_libs + use_sim_libs # for simulation included all IP, so ignore include_ip_libs
+ if build_type=='synth':
+ use_libs = use_synth_libs
+ # For synthesis only keep the local use_ip_libs if it is mentioned in the global include_ip_libs. Vice versa also only
+ # include the global include_ip_libs if they appear in a local use_ip_libs, to avoid that an IP library that is mentioned
+ # in the global include_ip_libs gets included while it is not instantiated anywhere in the design.
+ for ip_lib in use_ip_libs:
+ if ip_lib in include_ip_libs:
+ use_libs += [ip_lib]
+
+ # Remove any duplicate library names from the lists
+ use_libs = cm.unique(use_libs)
+ include_ip_libs = cm.unique(include_ip_libs)
+
+ # Remove libraries that are in the removed technologies (use list() to take copy)
+ for use_name in list(use_libs):
+ if use_name in self.removed_libs:
+ use_libs.remove(use_name)
+ for use_name in list(include_ip_libs):
+ if use_name in self.removed_libs:
+ include_ip_libs.remove(use_name)
+
+ return use_libs, include_ip_libs
+
+
+ def derive_all_use_libs(self, build_type, lib_name, arg_include_ip_libs=[]):
+ """Recursively derive a complete list of all HDL libraries that the specified HDL lib_name library depends on, so from this
+ HDL library down the entire hierachy.
+
+ The hdl_lib_uses_* key only needs to contain all libraries that are declared at the VHDL LIBRARY clauses of the
+ source files in this VHDL library. This derive_all_use_libs() will recursively find all deeper level VHDL libraries as well.
+
+ The arg_include_ip_libs selects the IP library to keep from 'hdl_lib_uses_ip'. The include_ip_libs is passed on
+ through the recursion hierarchy via arg_include_ip_libs to ensure that the from the top level library down all
+ multiple choice IP libraries in 'hdl_lib_uses_ip' that need to be included are indeed included. The multiple choice IP
+ libraries in 'hdl_lib_uses_ip' that are not in include_ip_libs are excluded.
+
+ Note:
+ . Only the generic HDL libraries and the technology specific libraries that match self.technologyNames are used,
+ because the other technology libraries have been removed from self.libs.dicts already at __init__() and from the
+ library dependency lists in get_used_libs()
+ . If Python breaks because recursion limit is reached, then two hdllib.cfg probably mutually use eachother which is
+ not allowed.
+ """
+ # use list() to take local copy, to avoid next that default empty list argument arg_include_ip_libs=[] gets disturbed
+ include_ip_libs = list(arg_include_ip_libs)
+ if lib_name in self.lib_names:
+ all_use_libs = [lib_name]
+ lib_dict = self.libs.configfiles[lib_name]
+ use_libs, include_ip_libs = self.get_used_libs(build_type, lib_dict, include_ip_libs)
+
+ for use_lib in use_libs:
+ if use_lib not in all_use_libs:
+ all_use_libs.append(use_lib)
+ # use recursion to include all used libs
+ all_use_libs += self.derive_all_use_libs(build_type, use_lib, include_ip_libs)
+ # remove all duplicates from the list
+ return cm.unique(all_use_libs)
+ else:
+ sys.exit('Error : Unknown HDL library name %s in %s()' % (lib_name, cm.method_name()))
+
+
+ def derive_lib_order(self, build_type, lib_name, lib_names=None):
+ """Derive the dependency order for all HDL libraries in lib_names that HDL library lib_name depends on.
+ """
+ if lib_names==None:
+ # At first entry derive the list of all HDL libraries that lib_name depends on
+ lib_names = self.derive_all_use_libs(build_type, lib_name)
+
+ # Derive the order of all HDL libraries that lib_name depends on, start with the order of lib_names
+ lib_dicts = self.libs.get_configfiles('hdl_lib_name', values=lib_names)
+ # use list() to take local copy to avoid modifying list order of self.lib_names which matches self.libs.dicts list order
+ lib_order = list(lib_names)
+ for lib_dict in lib_dicts:
+ lib_name = lib_dict['hdl_lib_name']
+ use_libs, _ = self.get_used_libs('', lib_dict, [])
+ for use_lib in use_libs:
+ if use_lib in lib_names:
+ if lib_order.index(use_lib) > lib_order.index(lib_name):
+ lib_order.remove(use_lib)
+ lib_order.insert(lib_order.index(lib_name), use_lib) # move used lib to just before this lib
+ # use recursion to keep on reordering the lib_order until it is stable
+ if lib_names != lib_order:
+ lib_order = self.derive_lib_order(build_type, lib_name, lib_order)
+ return lib_order
+
+
+ def get_lib_dicts_from_lib_names(self, lib_names=None):
+ """Get list the HDL libraries lib_dicts from list of HDL libraries lib_names and preseve the library order.
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+
+ # Cannot use:
+ #lib_dicts = self.libs.get_configfiles('hdl_lib_name', values=lib_names)
+ # because then the order of self.libs.dicts is used
+ lib_dicts = []
+ for lib_name in cm.listify(lib_names):
+ lib_dicts.append(self.libs.configfiles[lib_name])
+ return lib_dicts
+
+
+ def get_lib_names_from_lib_dicts(self, lib_dicts=None):
+ """Get list the HDL libraries lib_names from list of HDL libraries lib_dicts and preseve the library order.
+ """
+ lib_names = self.libs.get_key_values('hdl_lib_name', lib_dicts)
+ return lib_names
+
+
+ def get_tool_build_dir(self, build_type):
+ """Get the central tool build directory.
+
+ The build_type can be:
+ 'sim' uses the 'sim_tool_name' key in the self.buildset
+ 'synth' uses the 'synth_tool_name' key in the self.buildset
+ When another name is used that name is used directly as toolname in the construction of the path.
+
+ The function returns a tuple with the following four components:
+ - the absolute path to the central main build directory
+ - the buildset_name key value as subdirectory
+ - the toolname as subdirectory (derived from *_tool_name or the given value of 'build_type')
+ - project_deeper_subdir. See explanation below.
+
+ The project file will be located in the build dir or at some levels deeper in the build dir.
+ These optional extra subdirectory levels allow for relative file reference from project file
+ location. This is useful to be able to keep memory initialisation files in the library build
+ directory that are referred to using some fixed ../../ path in the HDL code.
+ - project_deeper_subdir = '' when project_dir_depth_ = 0 or not in buildset
+ - project_deeper_subdir = 'p/' when project_dir_depth_ = 1
+ - project_deeper_subdir = 'p/p/' when project_dir_depth_ = 2,
+ - project_deeper_subdir = 'p/p/p/' when project_dir_depth_ = 3, etc
+ """
+ # Determine build_maindir
+ build_maindir = expandvars('${HDL_BUILD_DIR}')
+ if not isabs(build_maindir):
+ sys.exit('Error : The build_dir value must be an absolute path')
+
+ # Determine build_buildset_dir
+ build_buildset_dir = self.buildset['buildset_name']
+
+ # Determine build_tooldir
+ tool_name_key = build_type + '_tool_name'
+ if tool_name_key in self.buildset:
+ build_tooldir = self.buildset[tool_name_key]
+ else:
+ build_tooldir = build_type
+
+ # Determine project_deeper_subdir
+ project_dir_depth_key = 'project_dir_depth_' + build_type
+ if project_dir_depth_key not in self.buildset:
+ project_deeper_subdir = ''
+ else:
+ project_deeper_subdir = 'p/' * int(self.buildset[project_dir_depth_key])
+
+ return build_maindir, build_buildset_dir, build_tooldir, project_deeper_subdir
+
+
+ def get_lib_build_dirs(self, build_type, lib_dicts=None):
+ """Get the subdirectories within the central tool build directory for all HDL libraries in the specified list of lib_dicts.
+
+ The build_type can be:
+ 'sim' uses the 'sim_tool_name' key in the self.buildset
+ 'synth' uses the 'synth_tool_name' key in the self.buildset
+
+ The build dir key value must be an absolute directory path. The lib build dir consists of
+ - the absolute path to the central main build directory
+ - the buildset_name key value as subdirectory
+ - the tool_name_key value as subdirectory
+ - the library name as library subdirectory
+ - zero or more extra subdirectory levels to allow for relative file reference from project file location
+ """
+ if lib_dicts==None:
+ lib_dicts = self.libs.configfiles.values()
+ build_maindir, build_buildset_dir, build_tooldir, project_deeper_subdir = self.get_tool_build_dir(build_type)
+ build_dirs = []
+ for lib_dict in cm.listify(lib_dicts):
+ lib_name = lib_dict['hdl_lib_name']
+ build_dirs.append(join(build_maindir, build_buildset_dir, build_tooldir, lib_name, project_deeper_subdir)) # central build main directory with subdirectory per library
+ return cm.unlistify(build_dirs)
+
+
+ def create_lib_order_files(self, build_type, lib_names=None):
+ """Create the compile order file '_lib_order.txt' for all HDL libraries in the specified list of lib_names.
+
+ The file is stored in the sim build directory of the HDL library.
+ The file is read by commands.do in Modelsim to avoid having to derive the library compile order in TCL.
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ for lib_dict in lib_dicts:
+ lib_name = lib_dict['hdl_lib_name']
+ lib_order = self.derive_lib_order(build_type, lib_name)
+ file_name = lib_name + '_lib_order.txt'
+ file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
+ cm.mkdir(file_path)
+ filePathName = join(file_path, file_name)
+ with open(filePathName, 'w') as fp:
+ for lib in lib_order:
+ fp.write('%s ' % lib)
+ print "Created {} lib-order files".format(len(lib_dicts))
+
+ # Methods to create sub directories at various levels in the build directory as defined in HDL tool config file
+ def create_sub_directory_in_build_main_dir(self, build_type, subdir_name):
+ """Create / in the central / directory
+ """
+ build_maindir, build_buildset_dir, build_tooldir, project_deeper_subdir = self.get_tool_build_dir(build_type)
+ subdir_path = join(build_maindir, subdir_name)
+ cm.mkdir(subdir_path)
+
+ def create_sub_directory_in_build_buildset_dir(self, build_type, subdir_name):
+ """Create / in the central // directory.
+ """
+ build_maindir, build_buildset_dir, build_tooldir, project_deeper_subdir = self.get_tool_build_dir(build_type)
+ subdir_path = join(build_maindir, build_buildset_dir, subdir_name)
+ cm.mkdir(subdir_path)
+
+ def create_sub_directory_in_build_tool_dir(self, build_type, subdir_name):
+ """Create / in the central /// directory.
+ """
+ build_maindir, build_buildset_dir, build_tooldir, project_deeper_subdir = self.get_tool_build_dir(build_type)
+ subdir_path = join(build_maindir, build_buildset_dir, build_tooldir, subdir_name)
+ cm.mkdir(subdir_path)
+
+ def create_sub_directory_in_build_lib_dir(self, build_type, subdir_name, lib_names=None):
+ """Create / in project local build directory / for all HDL libraries in the specified list of lib_names.
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+ lib_dicts = self.libs.get_configfiles('hdl_lib_name', values=lib_names)
+ for lib_dict in lib_dicts:
+ lib_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
+ subdir_path = join(lib_path, subdir_name)
+ cm.mkdir(subdir_path)
+ print "Created {} subdirectories".format(len(lib_dicts))
+
+ def copy_files(self, build_type, lib_names=None):
+ """
+ Copy all source directories and source files listed at the _copy_files key.
+ The build_type selects the _copy_files key using the _tool_name key value
+ from the hdl_buildset_.cfg.
+ The _copy_files key expects a source and a destination pair per listed directory or file:
+ - The sources need to be specified with absolute path or relative to the HDL library source directory
+ where the hdllib.cfg is stored
+ - The destinations need to be specified with absolute path or relative to HDL library build directory
+ where the project file (e.g. mpf, qpf) gets stored
+
+ Arguments:
+ - lib_names : zero or more HDL libraries
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ tool_name_key = build_type + '_tool_name'
+ tool_name_value = self.buildset[tool_name_key]
+ tool_name_copy_key = tool_name_value + '_copy_files'
+ lib_count = 0
+ dir_count = 0
+ file_count = 0
+ for lib_dict in lib_dicts:
+ if tool_name_copy_key in lib_dict.content:
+ lib_count += 1
+ lib_path = lib_dict.location
+ build_dir_path = self.get_lib_build_dirs(build_type, lib_dicts=lib_dict)
+ cm.mkdir(build_dir_path)
+ key_values = lib_dict[tool_name_copy_key].split()
+ sources = key_values[0::2]
+ destinations = key_values[1::2]
+ file_io = zip(sources, destinations)
+ for fpn_io in file_io:
+ sourcePathName = cm.expand_file_path_name(fpn_io[0], lib_path)
+ destinationPath = cm.expand_file_path_name(fpn_io[1], build_dir_path)
+ if isfile(sourcePathName):
+ file_count += 1
+ shutil.copy(sourcePathName, destinationPath) # copy file
+ else:
+ dir_count += 1
+ copy_tree(sourcePathName, destinationPath) # copy directory tree (will create new destinationPath directory)
+ print "Copied {} files and {} directories for {} libraries".format(file_count, dir_count, lib_count)
+
+
+if __name__ == '__main__':
+ # Parse command line arguments
+ buildsetSelect = sorted([cfgfile[13:-4] for cfgfile in listdir(expandvars('$HDL_CONFIG_DIR'))
+ if cfgfile.startswith("hdl_buildset_") and cfgfile.endswith(".cfg")])
+ argparser = ArgumentParser(description='Hdl_config shows several selections of all of your hdllib.cfg files.')
+ argparser.add_argument('buildset', help='choose buildset %s' % (buildsetSelect))
+ argparser.add_argument('--toplib', default=None, required=False, help='top library to show more information about.')
+ args = argparser.parse_args()
+
+ # check arguments
+ if args.buildset not in buildsetSelect:
+ print 'buildset %s is not supported' % args.buildset
+ print "Supported buildset are:", buildsetSelect
+ sys.exit(1)
+ args.buildsetFile = 'hdl_buildset_' + args.buildset + '.cfg'
+
+ # Read the dictionary info from all HDL tool and library configuration files in the current directory and the sub directories
+ hdl = HdlLibrariesWizard(toolRootDir = expandvars('${HDL_CONFIG_DIR}'),
+ toolFileName = args.buildsetFile,
+ libFileName = 'hdllib.cfg')
+
+ print '#'
+ print '# HdlLibrariesWizard:'
+ print '#'
+ for libname in hdl.libs.configfiles.keys():
+ print "\n", libname
+ libinfo = hdl.libs.configfiles[libname]
+ for k,v in libinfo.content.iteritems():
+ print k, '=', v
+ print ''
+
+ print ''
+ print 'Library paths :'
+ for libname in hdl.libs.configfiles.keys():
+ print ' ', hdl.libs.configfiles[libname].location
+
+ print ''
+ print 'Library file names :"'
+ for libname in hdl.libs.configfiles.keys():
+ print ' ', libname
+
+ print ''
+ print 'Library section headers :'
+ for libname,libinfo in hdl.libs.configfiles.iteritems():
+ print ' %-52s : %s' % (libname, libinfo['section_headers'])
+
+ print ''
+ print 'Build directories for simulation:'
+ for build_dir in hdl.get_lib_build_dirs('sim'):
+ print ' ', build_dir
+
+ print ''
+ print 'Build directories for synthesis:'
+ for build_dir in hdl.get_lib_build_dirs('synth'):
+ print ' ', build_dir
+
+ print ''
+ print 'Removed library names = \n', hdl.removed_libs.keys()
+
+ print ''
+ print "Unavailable library names in any 'hdl_lib_uses_synth' key = \n", hdl.unavailable_use_synth_libs
+ print "Unavailable library names in any 'hdl_lib_uses_ip' key = \n", hdl.unavailable_use_ip_libs
+ print "Unavailable library names in any 'hdl_lib_uses_sim' key = \n", hdl.unavailable_use_sim_libs
+ print "Unavailable library names in any 'hdl_lib_uses_*' key = \n", hdl.unavailable_use_libs
+ print ''
+ print "Unavailable library names in any 'hdl_lib_include_ip' key = \n", hdl.unavailable_include_ip_libs
+
+ print ''
+ print "Used library clause names that are explicitly disclosed at the 'hdl_lib_disclose_library_clause_names' keys:"
+ for key in hdl.disclosed_library_clause_names.keys():
+ print ' %-52s : %s' % (key, hdl.disclosed_library_clause_names[key])
+
+ if args.toplib:
+ for build_type in ['sim', 'synth']:
+ print ''
+ print 'derive_all_use_libs for %s of %s = \n' % (build_type, args.toplib), \
+ hdl.derive_all_use_libs(build_type, args.toplib)
+ print ''
+ print 'derive_lib_order for %s of %s = \n' % (build_type, args.toplib), \
+ hdl.derive_lib_order(build_type, args.toplib)
+
+
Index: radiohdl/trunk/base/hdl_raw_access.py
===================================================================
--- radiohdl/trunk/base/hdl_raw_access.py (nonexistent)
+++ radiohdl/trunk/base/hdl_raw_access.py (revision 2)
@@ -0,0 +1,209 @@
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: hdl_raw_access.py 18619 2018-07-24 08:41:55Z overeem $
+#
+###############################################################################
+
+import re
+import os.path
+from configfile import *
+from configtree import ConfigTree
+
+__all__ = [ 'RawConfigFile', 'RawConfigTree' ]
+
+"""
+File that implements the configfile <-> configtree concept but this time with the
+intention to modify the keys and/or values in those configfiles. So the content of
+the configfiels is kept as raw as possible.
+"""
+
+class RawConfigFile(object):
+ """
+ Class that holds the raw content of a configfile. To simplify the manipulation of the
+ content the file is internally stored as lines.
+ """
+ def __init__(self, filename):
+ """
+ Read the hdl configuration file and check presence of the required keys.
+ :raise ConfigFileException
+ """
+ full_filename = os.path.expanduser(os.path.expandvars(filename))
+ if not os.path.isfile(full_filename):
+ raise ConfigFileException ("configfile '%s' not found" % full_filename)
+
+ self.filename = full_filename
+ self.content = open(self.filename, 'r').readlines()
+
+ @property
+ def ID(self):
+ "Returns uniq ID (string) to identify this particular file. Fullfilename is used."
+ return self.filename
+
+ def _save_content(self, verbose=False):
+ """
+ Write the content to the file.
+ """
+ with open(self.filename, 'w') as fp:
+ fp.writelines(self.content)
+
+ if verbose:
+ print self.filename
+
+
+ def change_value(self, key, new_value, verbose):
+ """
+ Change the value of the given key. The old value may be empty, be on one or multiple lines.
+ The new value may use \n characters to create a new multiline value.
+ """
+ # Search the key in the lines
+ prog = re.compile("^{}[ \t]*{}".format(key, CFG_ASSIGNMENT_CHAR))
+ for (linenr, line) in enumerate(self.content):
+ # search if line starts with this key
+ match = prog.search(line)
+ if not match:
+ continue
+
+ # replace the value that is on this line with the whole new value
+ value_start = match.end()
+ line = line[0:value_start] + ' ' + new_value.replace("\\n", "\n") + "\n"
+ self.content[linenr] = line
+
+ # to support removing old multiline values we have to skip lines until we find a
+ # sectionheader|other key|empty line or are at the end of the file
+ linenr += 1
+ while linenr < len(self.content):
+ if not re.match(r"(\[[a-zA-Z0-9_]+\]|[a-zA-Z0-9_]+[ \t]*{}|^[ \t]*$)".format(CFG_ASSIGNMENT_CHAR),
+ self.content[linenr]):
+ self.content.pop(linenr)
+ else:
+ break
+
+ self._save_content(verbose)
+ break
+
+
+ def append_key_value(self, key, value, verbose):
+ """
+ Append the given key and value to the end of the configfile.
+ \n characters can be used both in the key and the value to start at a new line.
+ """
+ self.content.append("{} {} {}\n".format(key.replace("\\n","\n"),
+ CFG_ASSIGNMENT_CHAR,
+ value.replace("\\n","\n")))
+ self._save_content(verbose)
+
+
+ def rename_key(self, old_key, new_key, verbose):
+ """
+ Change the name of a key.
+ """
+ # Search the key in the lines
+ prog = re.compile("^{}[ \t]*{}".format(old_key, CFG_ASSIGNMENT_CHAR))
+ for (linenr, line) in enumerate(self.content):
+ # search if line starts with this old_key
+ match = prog.search(line)
+ if not match:
+ continue
+
+ # change the name of the key
+ value_start = match.end()
+ line = "{} {}{}\n".format(new_key.replace("\\n", "\n"), CFG_ASSIGNMENT_CHAR, line[value_start:])
+ self.content[linenr] = line
+ self._save_content(verbose)
+ break
+
+ def remove_key(self, key,verbose):
+ """
+ Remove a key and value pair from the dictfile.
+ """
+ # difficulty here is that we can delete the lines immediately because the enumeration over the
+ # file content gets messed up. So we setup the variables first_line and last_line that mark
+ # the part of the list that has to be removed. After the enumeration we cut the lines.
+
+ # First search the key in the lines
+ searching_key = True
+ last_line = len(self.content)
+ key_mask = re.compile(r"^{}[ \t]*{}".format(key, CFG_ASSIGNMENT_CHAR))
+ end_of_value_mask = re.compile(r"(\[[a-zA-Z0-9_]+\]|[a-zA-Z0-9_]+[ \t]*{})".format(CFG_ASSIGNMENT_CHAR))
+ for (linenr, line) in enumerate(self.content):
+ if searching_key:
+ # search if line starts with this key
+ match = key_mask.search(line)
+ if match:
+ first_line = linenr
+ searching_key = False
+ else:
+ # not searching for the key anymore, we now have to find the and of the value.
+ # to support removing old multiline values we have to skip lines until we find a
+ # sectionheader|other key or are at the end of the file
+ if not end_of_value_mask.match(self.content[linenr]):
+ continue
+ last_line = linenr
+ break
+
+ del self.content[first_line:last_line]
+ self._save_content(verbose)
+
+
+ def insert_key_at_linenr(self, new_key, new_value, linenumber, verbose):
+ """
+ Insert a new key = value pair in the configfile at linenumber. The first line has number 1.
+ """
+ if linenumber > len(self.content):
+ return self.append_key_value(new_key, new_value, verbose)
+
+ # Read dict file into string and insert new key = value pair at insertLineNr
+ new_line = "{} {} {}\n".format(new_key.replace("\\n", "\n"), CFG_ASSIGNMENT_CHAR, new_value)
+ self.content.insert(linenumber-1, new_line)
+ self._save_content(verbose)
+
+
+ def insert_key_value_before_key(self, new_key, new_value, before_key, verbose):
+ """
+ Insert a new key = value pair in the configfile just before another key..
+ """
+ # Search the key in the lines
+ prog = re.compile("^{}[ \t]*{}".format(before_key, CFG_ASSIGNMENT_CHAR))
+ for (linenr, line) in enumerate(self.content):
+ # search if line starts with this key
+ match = prog.search(line)
+ if not match:
+ continue
+
+ return self.insert_key_at_linenr(new_key, new_value, linenr+1, verbose)
+
+
+class RawConfigTree(ConfigTree):
+ """
+ Class the represents the collection of 'raw content' configfiles.
+ """
+ def __init__(self, rootdirs, filename, sections=None):
+ """
+ Read the raw configuration files and stores them in this tree.
+ :raise ConfigFileException
+ """
+ super(RawConfigTree, self).__init__(rootdirs, filename, sections)
+
+ def _factory_constructor(self, full_filename):
+ "Function for returning the readin configfile."
+ return RawConfigFile(full_filename)
+
+
+
Index: radiohdl/trunk/base/modelsim_config
===================================================================
--- radiohdl/trunk/base/modelsim_config (nonexistent)
+++ radiohdl/trunk/base/modelsim_config (revision 2)
@@ -0,0 +1,464 @@
+#! /usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2014
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+###############################################################################
+
+"""HDL configuration for building Modelsim simulation targets.
+
+ Usage:
+ > python $RADIOHDL/tools/oneclick/base/modelsim_config.py -t unb1
+"""
+
+import sys
+import os.path
+from os import listdir
+from argparse import ArgumentParser
+import common as cm
+import hdl_libraries_wizard
+
+class ModelsimConfig(hdl_config.HdlLibrariesWizard):
+
+ def __init__(self, toolRootDir, buildsetFile, libFileName):
+ """Get Modelsim tool info from toolRootDir and all HDL library info from libRootDir.
+
+ This class uses the default keys and the keys from the libFileSections in the libFileName config file.
+
+ Arguments:
+ - toolRootDir : Root directory from where the hdl_buildset_.cfg file is searched for.
+ - buildsetFile : Default HDL tools configuration file name
+ - libFileName : Default HDL library configuration file name
+
+ The libRootDir is defined in the hdl_buildset_.cfg file and is the root directory from where the hdllib.cfg
+ files are searched for.
+
+ Files:
+ - hdl_buildset_.cfg : HDL tool configuration dictionary file. One central file per buildset.
+
+ - hdllib.cfg : HDL library configuration dictionary file. One file for each HDL library.
+
+ - modelsim_project_files.txt
+ The modelsim_project_files.txt file is a dictionary file with the list the Modelsim project files for all HDL
+ libraries that were found in the libRootDir. The keys are the library names and the values are the paths to the
+ corresponding modelsim project files. The modelsim_project_files.txt file is created by
+ create_modelsim_project_files_file() and is read by the TCL commands.do file in Modelsim. Creating the file in
+ Python and then reading this in TCL makes the commands.do much simpler.
+
+ - .mpf : Modelsim project file for a certain HDL library based on the hdllib.cfg. The file is created by
+ create_modelsim_project_file().
+
+ - _lib_order.txt
+ The _lib_order.txt file contains the library compile order for a certain HDL library. The files are
+ created by create_lib_order_files() in the same build directory as where the Modelsim project file is stored.
+ The _lib_order.txt files are read by the TCL commands.do file in Modelsim. Creating the files in Python
+ and then reading them in TCL makes the commands.do much simpler.
+ """
+ libFileSections=['modelsim_project_file']
+ hdl_config.HdlLibrariesWizard.__init__(self, toolRootDir, buildsetFile, libFileName, libFileSections)
+
+ def read_compile_order_from_mpf(self, mpfPathName):
+ """Utility to read the compile order of the project files from an existing .mpf."""
+ # read .mpf to find all project files
+ project_file_indices = []
+ project_file_names = []
+ with open(mpfPathName, 'r') as fp:
+ for line in fp:
+ words = line.split()
+ if len(words)>0:
+ key = words[0]
+ if key.find('Project_File_')>=0 and key.find('Project_File_P_')==-1:
+ project_file_indices.append(key[len('Project_File_'):])
+ project_file_names.append(words[2])
+ # read .mpf again to find compile order for the project files
+ compile_order = range(len(project_file_names))
+ with open(mpfPathName, 'r') as fp:
+ for line in fp:
+ words = line.split()
+ if len(words)>0:
+ key = words[0]
+ if key.find('Project_File_P_')>=0:
+ project_file_index = project_file_indices.index(key[len('Project_File_P_'):])
+ project_file_name = project_file_names[project_file_index]
+ k = words.index('compile_order')
+ k = int(words[k+1])
+ compile_order[k]=project_file_name
+ return compile_order
+
+ def read_hdl_libraries_technology_file(self, technologyName, filePath=None):
+ """Read the list of technology HDL libraries from a file.
+
+ Arguments:
+ - technologyName : refers to the hdl_libraries_.txt file
+ - filePath : path to hdl_libraries_.txt, when None then the file is
+ read in the default toolRootDir
+ """
+ fileName = 'hdl_libraries_' + technologyName + '.txt' # use fixed file name format
+ if filePath==None:
+ toolDir = os.path.expandvars('$HDL_BUILD_DIR')
+ toolSubDir = self.buildset['buildset_name']
+ fileNamePath=join(toolDir, toolSubDir, fileName) # default file path
+ else:
+ fileNamePath=join(filePath, fileName) # specified file path
+ tech_dict = ConfigFile(fileNamePath).content
+ return tech_dict
+
+
+ def create_modelsim_lib_compile_ip_files(self, lib_names=None):
+ """
+ Create the '_lib_compile_ip.txt' file for all HDL libraries in the specified list of lib_names.
+ The file is stored in the sim build directory of the HDL library.
+ The file is read by commands.do in Modelsim to know which IP needs to be compiled before the library is compiled.
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+
+ count = 0
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ for lib_dict in lib_dicts:
+ if 'modelsim_compile_ip_files' in lib_dict.content:
+ count += 1
+ compile_ip_files = lib_dict['modelsim_compile_ip_files'].split()
+ lib_name = lib_dict['hdl_lib_name']
+ file_name = lib_name + '_lib_compile_ip.txt'
+ file_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
+ cm.mkdir(file_path)
+ filePathName = os.path.join(file_path, file_name)
+ with open(filePathName, 'w') as fp:
+ for fpn in compile_ip_files:
+ # Write the expanded file path name for _lib_compile_ip.txt so that it can be executed directly from its location in SVN using the Modelsim "do"-command in the commands.do.
+ # An alternative would be to write the basename, so only _lib_compile_ip.txt, but that would require copying the basename file to the mpf build directory
+ efpn = os.path.expandvars(fpn)
+ fp.write('%s ' % efpn)
+ print "Created {} compile-ip files".format(count)
+
+
+ def simulation_configuration(self, list_mode=False):
+ """Prepare settings for simulation configuration.
+ The output format is string or list, dependent on list_mode.
+ Return tuple of project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization.
+ """
+ # project_sim_p_defaults
+ project_sim_p_defaults = 'Generics {} timing default -std_output {} -nopsl 0 +notimingchecks 0 selected_du {} -hazards 0 -sdf {} ok 1 -0in 0 -nosva 0 +pulse_r {} -absentisempty 0 -multisource_delay {} +pulse_e {} vopt_env 1 -coverage 0 -sdfnoerror 0 +plusarg {} -vital2.2b 0 -t default -memprof 0 is_vopt_flow 0 -noglitch 0 -nofileshare 0 -wlf {} -assertdebug 0 +no_pulse_msg 0 -0in_options {} -assertfile {} -sdfnowarn 0 -Lf {} -std_input {}'
+
+ # project_sim_p_search_libraries
+ if list_mode:
+ project_sim_p_search_libraries = self.buildset['modelsim_search_libraries'].split()
+ else:
+ project_sim_p_search_libraries = '-L {}'
+ if 'modelsim_search_libraries' in self.buildset:
+ project_sim_p_search_libraries = '-L {'
+ for sl in self.buildset['modelsim_search_libraries'].split():
+ project_sim_p_search_libraries += sl
+ project_sim_p_search_libraries += ' '
+ project_sim_p_search_libraries += '}'
+
+ # project_sim_p_otherargs
+ # Note:
+ # E.g. the vsim-8684 load warning does not occur when the simulation is loaded via double click, but it
+ # does occur when the simulation is relaoded via the command line, because in the command line history
+ # the +nowarn8684 is then for some reason not preserved by Modelsim.
+ otherargs = ''
+ otherargs = '+nowarn8684 +nowarn8683 -quiet'
+ otherargs = '+nowarn8684 +nowarn8683'
+ otherargs = '+nowarn8684 +nowarn8683 +nowarnTFMPC +nowarnPCDPC' # nowarn on verilog IP connection mismatch warnings
+ if list_mode:
+ project_sim_p_otherargs = otherargs.split()
+ else:
+ project_sim_p_otherargs = 'OtherArgs {' + otherargs + '}'
+
+ # project_sim_p_optimization
+ project_sim_p_optimization = 'is_vopt_opt_used 2' # = when 'Enable optimization' is not selected in GUI
+ project_sim_p_optimization = 'is_vopt_opt_used 1 voptargs {OtherVoptArgs {} timing default VoptOutFile {} -vopt_keep_delta 0 -0in 0 -fvopt {} VoptOptimize:method 1 -vopt_00 2 +vopt_notimingcheck 0 -Lfvopt {} VoptOptimize:list .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.listbox -Lvopt {} +vopt_acc {} VoptOptimize .vopt_opt.nb.canvas.notebook.cs.page1.cs -vopt_hazards 0 VoptOptimize:Buttons .vopt_opt.nb.canvas.notebook.cs.page1.cs.g.spec.bf 0InOptionsWgt .vopt_opt.nb.canvas.notebook.cs.page3.cs.zf.ze -0in_options {}}' # = when 'Enable optimization' is selected in GUI for full visibility
+
+ return project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization
+
+
+ def create_modelsim_project_file(self, lib_names=None):
+ """
+ Create the Modelsim project file for all technology libraries and RTL HDL libraries.
+
+ Arguments:
+ - lib_names : one or more HDL libraries
+
+ Library mapping:
+ - Technology libraries that are available, but not used are mapped to work.
+ - Unavailable libraries are also mapped to work. The default library clause name is
+ with postfix '_lib'. This is a best effort guess, because it is impossible to know the library clause name
+ for an unavailable library. If the best effort guess is not suitable, then the workaround is to create a
+ place holder directory with hdllib.cfg that defines the actual library clause name as it appears in the
+ VHDL for the unavailable HDL library. unavailable library names occur when e.g. a technology IP library
+ is not available in the toolRootDir because it is not needed, or it may indicate a spelling error.
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ print "SELF.BUILDSET=", self.buildset
+ for lib_dict in lib_dicts:
+ # Open mpf
+ lib_name = lib_dict['hdl_lib_name']
+ mpf_name = lib_name + '.mpf'
+ mpf_path = self.get_lib_build_dirs('sim', lib_dicts=lib_dict)
+ cm.mkdir(mpf_path)
+ mpfPathName = os.path.join(mpf_path, mpf_name)
+ with open(mpfPathName, 'w') as fp:
+ # Write [Library] section for all used libraries
+ fp.write('[Library]\n')
+
+ # . map used vendor technology libs to their target directory
+ for technologyName in self.technologyNames:
+ tech_dict = self.read_hdl_libraries_technology_file(technologyName)
+ for lib_clause, lib_work in tech_dict.iteritems():
+ fp.write('%s = %s\n' % (lib_clause, lib_work))
+
+ # . not used vendor technology libs are not compiled but are mapped to work to avoid compile error when mentioned in the LIBRARY clause
+ for removed_lib in sorted(self.removed_libs):
+ fp.write('%s = work\n' % self.removed_libs[removed_lib]['hdl_library_clause_name'])
+
+ # . unavailable used libs are not compiled but are mapped to work to avoid compile error when mentioned in the LIBRARY clause
+ for unavailable_use_name in sorted(self.unavailable_use_libs):
+ # if the unavailable library is not in the dictionary of disclosed unavailable library clause names, then assume that the library clause
+ # name has the default postfix '_lib'.
+ if unavailable_use_name in self.disclosed_library_clause_names:
+ fp.write('%s = work\n' % self.disclosed_library_clause_names[unavailable_use_name])
+ else:
+ fp.write('%s_lib = work\n' % unavailable_use_name)
+
+ # . all used libs for this lib_name
+ use_lib_names = self.derive_all_use_libs('sim', lib_name)
+ use_lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=use_lib_names)
+ use_lib_build_sim_dirs = self.get_lib_build_dirs('sim', lib_dicts=use_lib_dicts)
+ use_lib_clause_names = self.libs.get_key_values('hdl_library_clause_name', use_lib_dicts)
+ for lib_clause, lib_dir in zip(use_lib_clause_names, cm.listify(use_lib_build_sim_dirs)):
+ lib_work = os.path.join(lib_dir, 'work')
+ fp.write('%s = %s\n' % (lib_clause, lib_work))
+
+ # . work
+ fp.write('work = work\n')
+
+ # . others modelsim default libs
+ model_tech_dir = os.path.expandvars(self.buildset['modelsim_dir'])
+ fp.write('others = %s\n' % os.path.join(model_tech_dir, 'modelsim.ini'))
+
+ # Write [Project] section for all used libraries
+ fp.write('[Project]\n')
+ fp.write('Project_Version = 6\n') # must be >= 6 to fit all
+ fp.write('Project_DefaultLib = work\n')
+ fp.write('Project_SortMethod = unused\n')
+
+ # - project files
+ synth_files = lib_dict['synth_files'].split()
+ test_bench_files = lib_dict['test_bench_files'].split()
+ project_files = synth_files + test_bench_files
+ if 'modelsim_compile_ip_files' in lib_dict.content:
+ compile_ip_files = lib_dict['modelsim_compile_ip_files'].split()
+ project_files += compile_ip_files
+ fp.write('Project_Files_Count = %d\n' % len(project_files))
+ for i, fn in enumerate(project_files):
+ filePathName = cm.expand_file_path_name(fn, lib_dict.location)
+ fp.write('Project_File_%d = %s\n' % (i, filePathName))
+
+ project_file_p_defaults_hdl = 'vhdl_novitalcheck 0 group_id 0 cover_nofec 0 vhdl_nodebug 0 vhdl_1164 1 vhdl_noload 0 vhdl_synth 0 vhdl_enable0In 0 vlog_1995compat 0 last_compile 0 vhdl_disableopt 0 cover_excludedefault 0 vhdl_vital 0 vhdl_warn1 1 vhdl_warn2 1 vhdl_explicit 1 vhdl_showsource 0 cover_covercells 0 vhdl_0InOptions {} vhdl_warn3 1 vlog_vopt {} cover_optlevel 3 voptflow 1 vhdl_options {} vhdl_warn4 1 toggle - ood 0 vhdl_warn5 1 cover_noshort 0 compile_to work cover_nosub 0 dont_compile 0 vhdl_use93 2002 cover_stmt 1'
+ project_file_p_defaults_vhdl = 'file_type vhdl'
+ project_file_p_defaults_verilog = 'file_type verilog'
+ project_file_p_defaults_tcl = 'last_compile 0 compile_order -1 file_type tcl group_id 0 dont_compile 1 ood 1'
+
+ project_folders = []
+ offset = 0
+
+ nof_synth_files = len(synth_files)
+ if nof_synth_files>0:
+ project_folders.append('synth_files')
+ for i in range(nof_synth_files):
+
+ # Add file type specific settings
+ file_ext = synth_files[i].split('.')[-1]
+ if file_ext=='vhd' or file_ext=='vhdl':
+ project_file_p_defaults_file_specific = project_file_p_defaults_vhdl
+ elif file_ext=='v':
+ project_file_p_defaults_file_specific = project_file_p_defaults_verilog
+ else:
+ print '\nERROR - Undefined file extension in synth_files:', lib_name, synth_files[i]
+ sys.exit()
+
+ fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_hdl+' '+project_file_p_defaults_file_specific))
+ offset = nof_synth_files
+
+ nof_test_bench_files = len(test_bench_files)
+ if nof_test_bench_files>0:
+ project_folders.append('test_bench_files')
+ for i in range(nof_test_bench_files):
+
+ # Add file type specific settings
+ file_ext = test_bench_files[i].split('.')[-1]
+ if file_ext=='vhd' or file_ext=='vho' or file_ext=='vhdl':
+ project_file_p_defaults_file_specific = project_file_p_defaults_vhdl
+ elif file_ext=='v':
+ project_file_p_defaults_file_specific = project_file_p_defaults_verilog
+ else:
+ print '\nERROR - Undefined file extension in test_bench_files:', lib_name, test_bench_files[i]
+ sys.exit()
+
+ fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_hdl+' '+project_file_p_defaults_file_specific))
+ offset += nof_test_bench_files
+
+ if 'modelsim_compile_ip_files' in lib_dict.content:
+ nof_compile_ip_files = len(compile_ip_files)
+ if nof_compile_ip_files>0:
+ project_folders.append('compile_ip_files')
+ for i in range(nof_compile_ip_files):
+ fp.write('Project_File_P_%d = folder %s compile_order %d %s\n' % (offset+i, project_folders[-1], offset+i, project_file_p_defaults_tcl))
+ offset += nof_compile_ip_files
+
+ # - project folders
+ fp.write('Project_Folder_Count = %d\n' % len(project_folders))
+ for i, fd in enumerate(project_folders):
+ fp.write('Project_Folder_%d = %s\n' % (i, fd))
+ fp.write('Project_Folder_P_%d = folder {Top Level}\n' % i)
+
+ # - simulation configurations
+ fp.write('Project_Sim_Count = %d\n' % len(test_bench_files))
+ project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization = self.simulation_configuration()
+ for i, fn in enumerate(test_bench_files):
+ fName = os.path.basename(fn)
+ tbName = os.path.splitext(fName)[0]
+ fp.write('Project_Sim_%d = %s\n' % (i, tbName))
+ for i, fn in enumerate(test_bench_files):
+ fName = os.path.basename(fn)
+ tbName = os.path.splitext(fName)[0]
+ fp.write('Project_Sim_P_%d = folder {Top Level} additional_dus work.%s %s %s %s %s\n' % (i, tbName, project_sim_p_defaults, project_sim_p_search_libraries, project_sim_p_otherargs, project_sim_p_optimization))
+
+ # Write [vsim] section
+ fp.write('[vsim]\n')
+ fp.write('RunLength = 0 ps\n')
+ fp.write('resolution = 1fs\n')
+ fp.write('IterationLimit = 5000\n') # According to 'verror 3601' the default is 5000, typically 100 is enough, but e.g. the ip_stratixiv_phy_xaui_0 requires more.
+ fp.write('DefaultRadix = decimal\n')
+ print "Created {} project files".format(len(lib_dicts))
+
+ def create_modelsim_project_files_file(self, lib_names=None):
+ """Create file with list of the Modelsim project files for all HDL libraries.
+
+ Arguments:
+ - lib_names : one or more HDL libraries
+ """
+ fileName = 'modelsim_project_files.txt' # use fixed file name
+ build_maindir, build_buildsetdir, build_tooldir, project_deeper_subdir = self.get_tool_build_dir('sim')
+ fileNamePath=os.path.join(build_maindir, build_buildsetdir, build_tooldir, fileName) # and use too build dir for file path
+ if lib_names==None:
+ lib_names=self.lib_names
+ with open(fileNamePath, 'w') as fp:
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ mpf_paths = self.get_lib_build_dirs('sim', lib_dicts=lib_dicts)
+ for lib_name, mpf_path in zip(cm.listify(lib_names),cm.listify(mpf_paths)):
+ fp.write('%s = %s\n' % (lib_name, mpf_path))
+ print "Created project file {}".format(fileNamePath)
+
+
+if __name__ == '__main__':
+ # Mode
+ # 0 = Create Modelsim mpf files for all hdllib.cfg
+ # 1 = Read compile order from mpf for a single and write itinto the hdllib.cfg.
+ # This is useful to avoid having to manually edit the compile order for an existing $UNB .mpf into the hdllib.cfg.
+ # The compile order is important for the synth_files that need to be in hierarchical order. The test_bench_files are typically
+ # independent so these may be put in alphabetical order. The compile order is read from the .mpf and saved in the
+ # hdllib.cfg. The hdllib.cfg still does need some manual editing to set the proper key and paths.
+ mode = 0
+
+ buildsetSelect = sorted([cfgfile[13:-4] for cfgfile in listdir(os.path.expandvars('$HDL_CONFIG_DIR'))
+ if cfgfile.startswith("hdl_buildset_") and cfgfile.endswith(".cfg")])
+ # Parse command line arguments
+ argparser = ArgumentParser(description='Modelsim creates/updates all your modelsim environment(s).')
+ argparser.add_argument('buildset', help='choose buildset %s' % (buildsetSelect))
+ argparser.add_argument('-v','--verbosity', required=False, type=int, default=0, help='verbosity >= 0 for more info')
+ args = argparser.parse_args()
+
+ # check arguments
+ if args.buildset not in buildsetSelect:
+ print 'buildset %s is not supported' % args.buildset
+ print "Supported buildset are:", buildsetSelect
+ sys.exit(1)
+ args.buildsetFile = 'hdl_buildset_' + args.buildset + '.cfg'
+
+ # Read the dictionary info from all HDL tool and library configuration files in the current directory and the sub directories
+ msim = ModelsimConfig(toolRootDir = os.path.expandvars('$HDL_CONFIG_DIR'),
+ buildsetFile = args.buildsetFile,
+ libFileName = 'hdllib.cfg')
+
+ if mode==0:
+ # Read the dictionary info from all HDL tool and library configuration files in the current directory and the sub directories
+ if args.verbosity>=2:
+ print '#'
+ print '# ModelsimConfig:'
+ print '#'
+ print ''
+ print 'HDL library paths that are found in %s:' % msim.libRootDirs
+ for lib in sorted(msim.libs.configfiles.values()):
+ print ' ', lib.location
+
+ if args.verbosity>=2:
+ print ''
+ print 'Build directories for simulation:'
+ for sim_dir in msim.get_lib_build_dirs('sim'):
+ print ' ', sim_dir
+
+ print ''
+ print 'Create library compile order files for simulation...'
+ msim.create_lib_order_files('sim')
+ sys.exit(0)
+
+ print ''
+ print 'Create library compile ip files...'
+ msim.create_modelsim_lib_compile_ip_files()
+
+ print ''
+ print 'Create modelsim projects list file...'
+ msim.create_modelsim_project_files_file()
+
+ print ''
+ print 'Create sub directory in project dir for all HDL libraries that are found in %s...' % msim.libRootDirs
+ msim.create_sub_directory_in_build_lib_dir('sim', 'mmfiles') # should match c_mmf_local_dir_path in mm_file_pkg.vhd
+
+ print ''
+ print 'Copy directories and files from HDL library source tree to project dir for all HDL libraries that are found in %s...' % msim.libRootDirs
+ msim.copy_files('sim')
+
+ print ''
+ print 'Create Modelsim Project Files for technology %s and all HDL libraries in %s...' % (msim.technologyNames, msim.libRootDirs)
+ msim.create_modelsim_project_file()
+
+ if mode==1:
+ #for lib_name in ['ado','ap','bf','bist','blp','bp','cdo','cim','cir','cp','cr','dc','eth','fmf','i2c','lvds','pfs','pft2','rcuh','ri','rsp','rsr','rsu','sens','serdes','si','st','tbbi','tdsh']:
+ for lib_name in ['tst']:
+ # Read compile order from existing .mpf
+ # First manually create rudimentary hdllib.cfg file for the library with lib name and clause filled in. Then run this script to get
+ # the ordered list of src and tb files. Then manualy edit the hdllib.cfg to put the files at the synth or sim key.
+ #mpfPathName = os.path.expandvars('$UNB/Firmware/designs/%s/build/synth/quartus/sopc_%s_sim/%s.mpf' % (lib_name, lib_name, lib_name))
+ #mpfPathName = os.path.expandvars('$UNB/Firmware/modules/Lofar/%s/build/sim/modelsim/%s.mpf' % (lib_name, lib_name))
+ #mpfPathName = os.path.expandvars('$UNB/Firmware/modules/%s/build/sim/modelsim/%s.mpf' % (lib_name, lib_name))
+ mpfPathName = os.path.expandvars('$RSP/%s/build/sim/modelsim/%s.mpf' % (lib_name, lib_name))
+ compile_order = msim.read_compile_order_from_mpf(mpfPathName)
+ # Append the compile_order list to the lib_name dictionary hdllib.cfg file
+ lib_dict = msim.libs.get_configfiles(key='hdl_lib_name', values=lib_name)
+ lib_path = msim.libs.get_filePath(lib_dict)
+ filePathName = os.path.join(lib_path, 'hdllib.cfg')
+ print ''
+ print 'Save modelsim compile order for', lib_name, 'in HDL library config file', filePathName
+ msim.libs.append_key_to_dict_file(filePathName, 'files', compile_order)
+
radiohdl/trunk/base/modelsim_config
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/modify_configfiles
===================================================================
--- radiohdl/trunk/base/modify_configfiles (nonexistent)
+++ radiohdl/trunk/base/modify_configfiles (revision 2)
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2014-2018
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+# $Id: configfile_modifier.py 18609 2018-07-18 08:06:30Z overeem $
+#
+###############################################################################
+
+"""
+Interactive program to modify all configfiles found in the hierarchical directory
+structure under the given top directory.
+"""
+
+from argparse import ArgumentParser
+from hdl_raw_access import RawConfigTree
+
+# Define a simple menu system that is based on a dict that contains the menu actions:
+# the keys are the valid strings the user may enter
+# the values are lists: the first element is the menu tekst, the second the function to call and
+# the remaining elememts (if any) are the arguments to ask the user
+# All arguments are stored in a kwargs structure and passed to the functions that is called.
+
+def get_menu_choice(menu, title):
+ """
+ Iterate over the menu dict, show the menu choices and ask for input till valid input is received.
+ """
+ print "\n", title
+ print "-" * len(title)
+ input_ok = False
+ while not input_ok:
+ for key in sorted(menu.iterkeys()):
+ print "{} {}".format(key, menu[key][0])
+ choice = raw_input("\n >>: ")
+ if choice not in menu.keys():
+ print "ERROR: This input is not a valid menu choice. Try again."
+ else:
+ input_ok = True
+ return menu[choice]
+
+def execute_menu_line(line_spec, verbose):
+ """
+ Given a menu line specification it asks the user for the specified arguments, stores the values in a
+ kwargs structure and finally call the specified function with this kwargs.
+ """
+ print "\n--- {} ---".format(line_spec[0])
+ nr_args = len(line_spec) - 2
+ if nr_args < 0:
+ raise SyntaxError("Invalid formatted menuline definition: {}.\nNeed at least two items in the list".format(line_spec))
+ if nr_args == 0:
+ return line_spec[1]()
+ # Iterate over the remaining items and get values for them
+ kwargs = { "verbose": verbose }
+ for spec_idx in xrange(2, len(line_spec)):
+ answer = raw_input(line_spec[spec_idx].capitalize().replace("_", " ") + ": ")
+ kwargs[line_spec[spec_idx]] = answer
+ line_spec[1](**kwargs)
+
+
+### Implementation of the menu commands
+# Note: cleaner would to implement this with the Strategy pattern but we want to keep the code
+# to be readable for everyone. ;-)
+def change_value_of_key(**kwargs):
+ key = kwargs.pop("key")
+ new_value = kwargs.pop("new_value")
+ verbose = kwargs.pop("verbose")
+ global tree
+ for filename in sorted(tree.configfiles.keys()):
+ tree.configfiles[filename].change_value(key, new_value, verbose)
+
+def append_key_value(**kwargs):
+ key = kwargs.pop("new_key")
+ new_value = kwargs.pop("new_value")
+ verbose = kwargs.pop("verbose")
+ global tree
+ for filename in sorted(tree.configfiles.keys()):
+ tree.configfiles[filename].append_key_value(key, new_value, verbose)
+
+def insert_key_at_linenr(**kwargs):
+ new_key = kwargs.pop("new_key")
+ new_value = kwargs.pop("new_value")
+ linenumber = int(kwargs.pop("linenumber"))
+ verbose = kwargs.pop("verbose")
+ global tree
+ for filename in sorted(tree.configfiles.keys()):
+ tree.configfiles[filename].insert_key_at_linenr(new_key, new_value, linenumber, verbose)
+
+def insert_key_value_before_key(**kwargs):
+ new_key = kwargs.pop("new_key")
+ new_value = kwargs.pop("new_value")
+ before_key = kwargs.pop("before_key")
+ verbose = kwargs.pop("verbose")
+ global tree
+ for filename in sorted(tree.configfiles.keys()):
+ tree.configfiles[filename].insert_key_value_before_key(new_key, new_value, before_key, verbose)
+
+def rename_key(**kwargs):
+ old_key = kwargs.pop("old_key")
+ new_key = kwargs.pop("new_key")
+ verbose = kwargs.pop("verbose")
+ global tree
+ for filename in sorted(tree.configfiles.keys()):
+ tree.configfiles[filename].rename_key(old_key, new_key, verbose)
+
+def remove_key(**kwargs):
+ key = kwargs.pop("key")
+ verbose = kwargs.pop("verbose")
+ global tree
+ for filename in sorted(tree.configfiles.keys()):
+ tree.configfiles[filename].remove_key(key, verbose)
+
+def end_menu():
+ global running
+ running = False
+
+
+if __name__ == '__main__':
+ # setup parser and parse the arguments.
+ argparser = ArgumentParser(description='Options and arguments for modifying collections of configfiles.')
+ argparser.add_argument('filename', help="Filename like 'hdl_buildset_.cfg'")
+ argparser.add_argument('rootdir', help="Top directory to start the search for configfiles.")
+ argparser.add_argument('-v', '--verbose', help="Show more information on what happens.", action="store_true")
+ args = argparser.parse_args()
+
+ tree = RawConfigTree(args.rootdir, args.filename)
+ print "Found {} configfiles in {}".format(len(tree.configfiles), args.rootdir)
+ if args.verbose:
+ for filename in sorted(tree.configfiles.keys()):
+ print filename
+
+ # define the menu including actions
+ # choice choice description function to call arguments to ask for
+ menu = {'1': [ "Change value", change_value_of_key, "key", "new_value"],
+ '2': [ "Append key", append_key_value, "new_key", "new_value"],
+ '3': [ "Insert key at linenr", insert_key_at_linenr, "new_key", "new_value", "linenumber"],
+ '4': [ "Insert key before other key", insert_key_value_before_key, "new_key", "new_value", "before_key"],
+ '5': [ "Rename key", rename_key, "old_key", "new_key"],
+ '6': [ "Remove key", remove_key, "key"],
+ 'q': [ "Exit", end_menu ]
+ }
+
+ running = True
+ while running:
+ execute_menu_line(get_menu_choice(menu, "Menu for changing multiple configfiles"), args.verbose)
+
radiohdl/trunk/base/modify_configfiles
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/base/qsys_input.qsys
===================================================================
--- radiohdl/trunk/base/qsys_input.qsys (nonexistent)
+++ radiohdl/trunk/base/qsys_input.qsys (revision 2)
@@ -0,0 +1,669 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ qsys_input_onchip_memory2_0
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ INTERACTIVE_ASCII_OUTPUT
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ SIMPLE_PERIODIC_INTERRUPT
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ cpu_0.jtag_debug_module
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ ]]>
+ ]]>
+
+
+
+
+ ADDRESS_STALL 1 CELL_LEVEL_BACK_ANNOTATION_DISABLED 0 COMPILER_SUPPORT 1 DSP 1 DSP_SHIFTER_BLOCK 1 DUMP_ASM_LAB_BITS_FOR_POWER 1 EMUL 0 ENABLE_ADVANCED_IO_ANALYSIS_GUI_FEATURES 1 EPCS 1 ESB 0 FAKE1 0 FAKE2 0 FAKE3 0 FITTER_USE_FALLING_EDGE_DELAY 1 GENERATE_DC_ON_CURRENT_WARNING_FOR_INTERNAL_CLAMPING_DIODE 1 HARDCOPY 0 HAS_18_BIT_MULTS 1 HAS_ACE_SUPPORT 1 HAS_ADJUSTABLE_OUTPUT_IO_TIMING_MEAS_POINT 1 HAS_ADVANCED_IO_INVERTED_CORNER 1 HAS_ADVANCED_IO_POWER_SUPPORT 1 HAS_ADVANCED_IO_TIMING_SUPPORT 1 HAS_ALM_SUPPORT 1 HAS_ATOM_AND_ROUTING_POWER_MODELED_TOGETHER 0 HAS_AUTO_DERIVE_CLOCK_UNCERTAINTY_SUPPORT 1 HAS_AUTO_FIT_SUPPORT 1 HAS_BALANCED_OPT_TECHNIQUE_SUPPORT 1 HAS_BENEFICIAL_SKEW_SUPPORT 1 HAS_BITLEVEL_DRIVE_STRENGTH_CONTROL 1 HAS_BSDL_FILE_GENERATION 1 HAS_CGA_SUPPORT 1 HAS_CHECK_NETLIST_SUPPORT 1 HAS_CLOCK_REGION_CHECKER_ENABLED 1 HAS_CORE_JUNCTION_TEMP_DERATING 0 HAS_CROSSTALK_SUPPORT 0 HAS_CUSTOM_REGION_SUPPORT 1 HAS_DATA_DRIVEN_ACVQ_HSSI_SUPPORT 0 HAS_DDB_FDI_SUPPORT 0 HAS_DESIGN_ANALYZER_SUPPORT 1 HAS_DETAILED_IO_RAIL_POWER_MODEL 1 HAS_DETAILED_LEIM_STATIC_POWER_MODEL 0 HAS_DETAILED_LE_POWER_MODEL 1 HAS_DETAILED_ROUTING_MUX_STATIC_POWER_MODEL 0 HAS_DETAILED_THERMAL_CIRCUIT_PARAMETER_SUPPORT 1 HAS_DEVICE_MIGRATION_SUPPORT 1 HAS_DIAGONAL_MIGRATION_SUPPORT 0 HAS_EMIF_TOOLKIT_SUPPORT 1 HAS_FAMILY_VARIANT_MIGRATION_SUPPORT 0 HAS_FANOUT_FREE_NODE_SUPPORT 1 HAS_FAST_FIT_SUPPORT 1 HAS_FITTER_EARLY_TIMING_ESTIMATE_SUPPORT 1 HAS_FITTER_ECO_SUPPORT 1 HAS_FIT_NETLIST_OPT_RETIME_SUPPORT 1 HAS_FIT_NETLIST_OPT_SUPPORT 1 HAS_FORMAL_VERIFICATION_SUPPORT 1 HAS_FPGA_XCHANGE_SUPPORT 1 HAS_FSAC_LUTRAM_REGISTER_PACKING_SUPPORT 0 HAS_FULL_DAT_MIN_TIMING_SUPPORT 1 HAS_FULL_INCREMENTAL_DESIGN_SUPPORT 1 HAS_FUNCTIONAL_SIMULATION_SUPPORT 1 HAS_GLITCH_FILTERING_SUPPORT 1 HAS_HC_READY_SUPPORT 1 HAS_HIGH_SPEED_LOW_POWER_TILE_SUPPORT 1 HAS_HOLD_TIME_AVOIDANCE_ACROSS_CLOCK_SPINE_SUPPORT 1 HAS_HSPICE_WRITER_SUPPORT 1 HAS_HSSI_POWER_CALCULATOR 1 HAS_IBISO_WRITER_SUPPORT 0 HAS_INCREMENTAL_DAT_SUPPORT 1 HAS_INCREMENTAL_SYNTHESIS_SUPPORT 1 HAS_IO_ASSIGNMENT_ANALYSIS_SUPPORT 1 HAS_IO_DECODER 1 HAS_IO_PLACEMENT_OPTIMIZATION_SUPPORT 1 HAS_IO_SMART_RECOMPILE_SUPPORT 0 HAS_JITTER_SUPPORT 1 HAS_JTAG_SLD_HUB_SUPPORT 1 HAS_LOGIC_LOCK_SUPPORT 1 HAS_MICROPROCESSOR 0 HAS_MIF_SMART_COMPILE_SUPPORT 1 HAS_MINMAX_TIMING_MODELING_SUPPORT 1 HAS_MIN_TIMING_ANALYSIS_SUPPORT 1 HAS_MUX_RESTRUCTURE_SUPPORT 1 HAS_NEW_HC_FLOW_SUPPORT 0 HAS_NEW_SERDES_MAX_RESOURCE_COUNT_REPORTING_SUPPORT 1 HAS_NEW_VPR_SUPPORT 1 HAS_NONSOCKET_TECHNOLOGY_MIGRATION_SUPPORT 1 HAS_NO_JTAG_USERCODE_SUPPORT 0 HAS_OPERATING_SETTINGS_AND_CONDITIONS_REPORTING_SUPPORT 1 HAS_PAD_LOCATION_ASSIGNMENT_SUPPORT 0 HAS_PARTIAL_RECONFIG_SUPPORT 0 HAS_PHYSICAL_NETLIST_OUTPUT 0 HAS_PHYSICAL_ROUTING_SUPPORT 1 HAS_PIN_SPECIFIC_VOLTAGE_SUPPORT 1 HAS_PLDM_REF_SUPPORT 1 HAS_POWER_ESTIMATION_SUPPORT 1 HAS_PRELIMINARY_CLOCK_UNCERTAINTY_NUMBERS 0 HAS_PRE_FITTER_FPP_SUPPORT 0 HAS_PRE_FITTER_LUTRAM_NETLIST_CHECKER_ENABLED 1 HAS_PVA_SUPPORT 1 HAS_RCF_SUPPORT 1 HAS_RCF_SUPPORT_FOR_DEBUGGING 0 HAS_RED_BLACK_SEPARATION_SUPPORT 0 HAS_RE_LEVEL_TIMING_GRAPH_SUPPORT 1 HAS_RISEFALL_DELAY_SUPPORT 1 HAS_SIGNAL_PROBE_SUPPORT 1 HAS_SIGNAL_TAP_SUPPORT 1 HAS_SIMULATOR_SUPPORT 0 HAS_SPLIT_IO_SUPPORT 1 HAS_SPLIT_LC_SUPPORT 1 HAS_SYNTH_FSYN_NETLIST_OPT_SUPPORT 1 HAS_SYNTH_NETLIST_OPT_RETIME_SUPPORT 1 HAS_SYNTH_NETLIST_OPT_SUPPORT 1 HAS_TECHNOLOGY_MIGRATION_SUPPORT 1 HAS_TEMPLATED_REGISTER_PACKING_SUPPORT 1 HAS_TIME_BORROWING_SUPPORT 0 HAS_TIMING_DRIVEN_SYNTHESIS_SUPPORT 1 HAS_TIMING_INFO_SUPPORT 1 HAS_TIMING_OPERATING_CONDITIONS 1 HAS_TIMING_SIMULATION_SUPPORT 1 HAS_TITAN_BASED_MAC_REGISTER_PACKER_SUPPORT 1 HAS_USER_HIGH_SPEED_LOW_POWER_TILE_SUPPORT 1 HAS_USE_FITTER_INFO_SUPPORT 0 HAS_VCCPD_POWER_RAIL 1 HAS_VERTICAL_MIGRATION_SUPPORT 1 HAS_VIEWDRAW_SYMBOL_SUPPORT 0 HAS_VIO_SUPPORT 1 HAS_VIRTUAL_DEVICES 0 HAS_WYSIWYG_DFFEAS_SUPPORT 1 HAS_XIBISO_WRITER_SUPPORT 1 INCREMENTAL_DESIGN_SUPPORTS_COMPATIBLE_CONSTRAINTS 1 INSTALLED 0 IS_CONFIG_ROM 0 IS_DEFAULT_FAMILY 0 IS_HARDCOPY_FAMILY 0 LVDS_IO 1 M10K_MEMORY 0 M144K_MEMORY 1 M20K_MEMORY 0 M4K_MEMORY 0 M512_MEMORY 0 M9K_MEMORY 1 MLAB_MEMORY 1 MRAM_MEMORY 0 NOT_LISTED 0 NO_RPE_SUPPORT 0 NO_SUPPORT_FOR_LOGICLOCK_CONTENT_BACK_ANNOTATION 1 NO_SUPPORT_FOR_STA_CLOCK_UNCERTAINTY_CHECK 0 NO_TDC_SUPPORT 0 POSTFIT_BAK_DATABASE_EXPORT_ENABLED 1 POSTMAP_BAK_DATABASE_EXPORT_ENABLED 1 PROGRAMMER_SUPPORT 1 QFIT_IN_DEVELOPMENT 0 QMAP_IN_DEVELOPMENT 0 RAM_LOGICAL_NAME_CHECKING_IN_CUT_ENABLED 1 REPORTS_METASTABILITY_MTBF 1 REQUIRES_INSTALLATION_PATCH 0 REQUIRES_LIST_OF_TEMPERATURE_AND_VOLTAGE_OPERATING_CONDITIONS 1 RESERVES_SIGNAL_PROBE_PINS 0 RESOLVE_MAX_FANOUT_EARLY 1 RESOLVE_MAX_FANOUT_LATE 0 RESPECTS_FIXED_SIZED_LOCKED_LOCATION_LOGICLOCK 1 RESTRICTED_USER_SELECTION 0 RISEFALL_SUPPORT_IS_HIDDEN 0 SUPPORTS_ADDITIONAL_OPTIONS_FOR_UNUSED_IO 1 SUPPORTS_CRC 1 SUPPORTS_DIFFERENTIAL_AIOT_BOARD_TRACE_MODEL 1 SUPPORTS_DSP_BALANCING_BACK_ANNOTATION 0 SUPPORTS_GENERATION_OF_EARLY_POWER_ESTIMATOR_FILE 1 SUPPORTS_GLOBAL_SIGNAL_BACK_ANNOTATION 1 SUPPORTS_MAC_CHAIN_OUT_ADDER 1 SUPPORTS_RAM_PACKING_BACK_ANNOTATION 0 SUPPORTS_REG_PACKING_BACK_ANNOTATION 0 SUPPORTS_SIGNALPROBE_REGISTER_PIPELINING 1 SUPPORTS_SINGLE_ENDED_AIOT_BOARD_TRACE_MODEL 1 SUPPORTS_USER_MANUAL_LOGIC_DUPLICATION 1 TMV_RUN_CUSTOMIZABLE_VIEWER 1 TMV_RUN_INTERNAL_DETAILS 1 TMV_RUN_INTERNAL_DETAILS_ON_IO 0 TMV_RUN_INTERNAL_DETAILS_ON_IOBUF 1 TMV_RUN_INTERNAL_DETAILS_ON_LCELL 0 TMV_RUN_INTERNAL_DETAILS_ON_LRAM 0 TRANSCEIVER_3G_BLOCK 1 TRANSCEIVER_6G_BLOCK 1 USES_ACV_FOR_FLED 1 USES_ADB_FOR_BACK_ANNOTATION 1 USES_ASIC_ROUTING_POWER_CALCULATOR 0 USES_DATA_DRIVEN_PLL_COMPUTATION_UTIL 1 USES_DEV 1 USES_ICP_FOR_ECO_FITTER 0 USES_LIBERTY_TIMING 0 USES_POWER_SIGNAL_ACTIVITIES 1 USES_THIRD_GENERATION_TIMING_MODELS_TIS 1 USE_ADVANCED_IO_POWER_BY_DEFAULT 1 USE_ADVANCED_IO_TIMING_BY_DEFAULT 1 USE_BASE_FAMILY_DDB_PATH 0 USE_OCT_AUTO_CALIBRATION 0 USE_RISEFALL_ONLY 1 USE_SEPARATE_LIST_FOR_TECH_MIGRATION 1 USE_SINGLE_COMPILER_PASS_PLL_MIF_FILE_WRITER 1 USE_TITAN_IO_BASED_IO_REGISTER_PACKER_UTIL 1 WYSIWYG_BUS_WIDTH_CHECKING_IN_CUT_ENABLED 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Index: radiohdl/trunk/base/quartus_config
===================================================================
--- radiohdl/trunk/base/quartus_config (nonexistent)
+++ radiohdl/trunk/base/quartus_config (revision 2)
@@ -0,0 +1,288 @@
+#! /usr/bin/env python
+###############################################################################
+#
+# Copyright (C) 2014
+# ASTRON (Netherlands Institute for Radio Astronomy)
+# P.O.Box 2, 7990 AA Dwingeloo, The Netherlands
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+###############################################################################
+
+"""HDL configuration for building Quartus synthesis targets.
+
+ Usage:
+ > python $RADIOHDL/tools/oneclick/base/quartus_config.py -t unb1
+"""
+
+import common as cm
+import hdl_libraries_wizard
+import sys
+from os import listdir
+import os.path
+from argparse import ArgumentParser
+
+class QuartusConfig(hdl_config.HdlLibrariesWizard):
+
+ def __init__(self, toolRootDir, toolFileName, libFileName='hdllib.cfg'):
+ """Get Quartus tool info from toolRootDir and all HDL library info from libRootDir.
+
+ This class uses the default keys and the keys from the libFileSections in the libFileName config file.
+
+ Arguments:
+ - toolRootDir : Root directory from where the hdl_buildset_.cfg file is searched for.
+ - toolFileName : Default HDL tools configuration file name
+ - libFileName : Default HDL library configuration file name
+
+ The libRootDir is defined in the hdl_buildset_.cfg file and is the root directory from where the hdllib.cfg
+ files are searched for.
+
+ The technologyNames parameter is defined in the hdl_buildset_.cfg file. All generic HDL libraries and these
+ technology specific libraries are kept.
+
+ Files:
+ - hdl_buildset_.cfg : HDL tool configuration dictionary file. One central file per buildset.
+
+ - hdllib.cfg : HDL library configuration dictionary file. One file for each HDL library.
+
+ - .qpf : Quartus project file (QPF) for a certain HDL library based on the hdllib.cfg. The file is created by
+ create_quartus_project_file().
+
+ - .qsf : Quartus settings file (QSF) for a certain HDL library based on the hdllib.cfg. The file is created by
+ create_quartus_settings_file(). There is one QSF per Quartus synthesis project.
+ """
+ print "QuartusConfig(toolRootDir=%s, toolFileName=%s, libFileName=%s)" % (toolRootDir, toolFileName, libFileName)
+ libFileSections=['quartus_project_file']
+ hdl_config.HdlLibrariesWizard.__init__(self, toolRootDir, toolFileName, libFileName, libFileSections)
+
+ def create_quartus_ip_lib_file(self, lib_names=None):
+ """Create the Quartus IP file _lib.qip for all HDL libraries. The .qip file contains the list of files that are given
+ by the synth_files key and the quartus_*_file keys.
+
+ Note:
+ . Use post fix '_lib' in QIP file name *_lib.qip to avoid potential conflict with *.qip that may come with the IP.
+ . The HDL library *_lib.qip files contain all files that are listed by the synth_files key. Hence when these qip files are included then
+ the Quartus project will analyse all files even if there entity is not instantiated in the design. This is fine, it is unnecessary
+ to parse the hierarchy of the synth_top_level_entity VHDL file to find and include only the source files that are actually used.
+
+ Arguments:
+ - lib_names : one or more HDL libraries
+ """
+ if lib_names==None:
+ lib_names=self.lib_names
+
+ lib_dicts = self.libs.get_configfiles('hdl_lib_name', values=lib_names)
+ for lib_dict in lib_dicts:
+ # Open qip
+ lib_name = lib_dict['hdl_lib_name']
+ qip_name = lib_name + '_lib.qip'
+ qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict)
+ cm.mkdir(qip_path)
+ qipPathName = cm.expand_file_path_name(qip_name, qip_path)
+ with open(qipPathName, 'w') as fp:
+ if lib_dict.get_value('synth_files'):
+ fp.write('# synth_files\n')
+ synth_files = lib_dict['synth_files'].split()
+ for fn in synth_files:
+ filePathName = cm.expand_file_path_name(fn, lib_dict.location)
+
+ file_ext = fn.split('.')[-1]
+ if file_ext=='vhd' or file_ext=='vhdl':
+ file_type = 'VHDL_FILE'
+ elif file_ext=='v':
+ file_type = 'VERILOG_FILE'
+ else:
+ print '\nERROR - Undefined file extension in synth_files:', fn
+ sys.exit()
+
+ fp.write('set_global_assignment -name %s %s -library %s\n' % (file_type, filePathName, lib_name + '_lib'))
+
+ if lib_dict.get_value('quartus_vhdl_files'):
+ fp.write('\n')
+ fp.write('# quartus_vhdl_files\n')
+ quartus_vhdl_files = lib_dict['quartus_vhdl_files'].split()
+ for fn in quartus_vhdl_files:
+ filePathName = cm.expand_file_path_name(fn, lib_dict.location)
+
+ file_ext = fn.split('.')[-1]
+ if file_ext=='vhd' or file_ext=='vhdl':
+ file_type = 'VHDL_FILE'
+ elif file_ext=='v':
+ file_type = 'VERILOG_FILE'
+ else:
+ print '\nERROR - Undefined file extension in quartus_vhdl_files:', fn
+ sys.exit()
+
+ fp.write('set_global_assignment -name VHDL_FILE %s -library %s\n' % (filePathName, lib_name + '_lib'))
+
+ if lib_dict.get_value('quartus_qip_files'):
+ fp.write('\n')
+ fp.write('# quartus_qip_files\n')
+ quartus_qip_files = lib_dict['quartus_qip_files'].split()
+ for fn in quartus_qip_files:
+ filePathName = cm.expand_file_path_name(fn, lib_dict.location)
+ fp.write('set_global_assignment -name QIP_FILE %s\n' % filePathName)
+
+ if lib_dict.get_value('quartus_tcl_files'):
+ fp.write('\n')
+ fp.write('# quartus_tcl_files\n')
+ quartus_tcl_files = lib_dict['quartus_tcl_files'].split()
+ for fn in quartus_tcl_files:
+ filePathName = cm.expand_file_path_name(fn, lib_dict.location)
+ fp.write('set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName)
+
+ if lib_dict.get_value('quartus_sdc_files'):
+ fp.write('\n')
+ fp.write('# quartus_sdc_files\n')
+ quartus_sdc_files = lib_dict['quartus_sdc_files'].split()
+ for fn in quartus_sdc_files:
+ filePathName = cm.expand_file_path_name(fn, lib_dict.location)
+ fp.write('set_global_assignment -name SDC_FILE %s\n' % filePathName)
+ print "Created {} .qip files".format(len(lib_dicts))
+
+
+ def create_quartus_project_file(self, lib_names=None):
+ """Create the Quartus project file (QPF) for all HDL libraries that have a toplevel entity key synth_top_level_entity.
+
+ Note:
+ . Default if the synth_top_level_entity key is defined but left empty then the top level entity has the same name as the lib_name in hdl_lib_name.
+ Otherwise synth_top_level_entity can specify another top level entity name in the library. Each HDL library can only have one Quartus project
+ file
+ . The project revision has the same name as the lib_name and will result in a .sof FPGA image file.
+ . For each additional revision a subdirectory can be used.
+ This subdirectory can be named 'revisions/' and lists a number of revisions as subdirectories. Each revision will have a separate hdllib.cfg file and a
+ .vhd file with the toplevel entity. The toplevel .vhd file specifies the for the revision in the generics.
+
+ Arguments:
+ - lib_names : one or more HDL libraries
+ """
+ if lib_names==None: lib_names=self.lib_names
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ syn_dicts = self.libs.get_configfiles(key='synth_top_level_entity', values=None, user_configfiles=lib_dicts)
+ for syn_dict in syn_dicts:
+ # Open qpf for each HDL library that has a synth_top_level_entity
+ lib_name = syn_dict['hdl_lib_name']
+ qpf_name = lib_name + '.qpf'
+ qpf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict)
+ cm.mkdir(qpf_path)
+ qpfPathName = cm.expand_file_path_name(qpf_name, qpf_path)
+ with open(qpfPathName, 'w') as fp:
+ fp.write('PROJECT_REVISION = "%s"\n' % lib_name)
+ print "Created {} .qpf files".format(len(syn_dicts))
+
+
+ def create_quartus_settings_file(self, lib_names=None):
+ """Create the Quartus settings file (QSF) for all HDL libraries that have a toplevel entity key synth_top_level_entity.
+
+ Note:
+ . No support for revisions, so only one qsf per qpf
+
+ Arguments:
+ - lib_names : one or more HDL libraries
+ """
+ if lib_names==None: lib_names=self.lib_names
+ lib_dicts = self.libs.get_configfiles(key='hdl_lib_name', values=lib_names)
+ syn_dicts = self.libs.get_configfiles(key='synth_top_level_entity', values=None, user_configfiles=lib_dicts)
+ for syn_dict in syn_dicts:
+ # Open qsf for each HDL library that has a synth_top_level_entity
+ lib_name = syn_dict['hdl_lib_name']
+ top_level_entity = syn_dict['synth_top_level_entity']
+ if top_level_entity=='':
+ top_level_entity = lib_name
+ qsf_path = self.get_lib_build_dirs('synth', lib_dicts=syn_dict)
+ cm.mkdir(qsf_path)
+
+ # One qsf per lib_name
+ qsf_name = lib_name + '.qsf'
+ qsfPathName = cm.expand_file_path_name(qsf_name, qsf_path)
+ with open(qsfPathName, 'w') as fp:
+ fp.write('# synth_top_level_entity\n')
+ fp.write('set_global_assignment -name TOP_LEVEL_ENTITY %s\n' % top_level_entity)
+
+ fp.write('\n')
+ fp.write('# quartus_qsf_files\n')
+ quartus_qsf_files = syn_dict['quartus_qsf_files'].split()
+ for fn in quartus_qsf_files:
+ filePathName = cm.expand_file_path_name(fn, syn_dict.location)
+ fp.write('set_global_assignment -name SOURCE_TCL_SCRIPT_FILE %s\n' % filePathName)
+
+ fp.write('\n')
+ fp.write('# All used HDL library *_lib.qip files in order with top level last\n')
+ use_lib_order = self.derive_lib_order('synth', lib_name)
+ #use_lib_dicts = self.libs.get_configfiles('hdl_lib_name', values=use_lib_order) # uses original libs.dicts order, but
+ use_lib_dicts = self.get_lib_dicts_from_lib_names(lib_names=use_lib_order) # must preserve use_lib_order order to ensure that top level design qip with sdc file is include last in qsf
+ for lib_dict in cm.listify(use_lib_dicts):
+ qip_path = self.get_lib_build_dirs('synth', lib_dicts=lib_dict)
+ qip_name = lib_dict['hdl_lib_name'] + '_lib.qip'
+ qipPathName = cm.expand_file_path_name(qip_name, qip_path)
+ fp.write('set_global_assignment -name QIP_FILE %s\n' % qipPathName)
+ print "Created {} .qsf files".format(len(syn_dicts))
+
+
+if __name__ == '__main__':
+ # Parse command line arguments
+ buildsetSelect = sorted([cfgfile[13:-4] for cfgfile in listdir(os.path.expandvars('$HDL_CONFIG_DIR'))
+ if cfgfile.startswith("hdl_buildset_") and cfgfile.endswith(".cfg")])
+ argparser = ArgumentParser(description='Quartus_config creates/updates all your quartus projectfiles.')
+ argparser.add_argument('buildset', help='choose buildset %s' % (buildsetSelect))
+ argparser.add_argument('-v','--verbosity', required=False, type=int, default=0, help='verbosity >= 0 for more info')
+ args = argparser.parse_args()
+
+ # check arguments
+ if args.buildset not in buildsetSelect:
+ print 'buildset %s is not supported' % args.buildset
+ print "Supported buildset are:", buildsetSelect
+ sys.exit(1)
+ args.buildsetFile = 'hdl_buildset_' + args.buildset + '.cfg'
+
+ # Read the dictionary info from all HDL tool and library configuration files in the current directory and the sub directories
+ qsyn = QuartusConfig(toolRootDir = os.path.expandvars('${HDL_CONFIG_DIR}'),
+ toolFileName= args.buildsetFile,
+ libFileName = 'hdllib.cfg')
+
+ if args.verbosity>=2:
+ print '#'
+ print '# QuartusConfig:'
+ print '#'
+ print ''
+ print 'HDL library paths that are found in $%s:' % qsyn.libRootDirs
+ for p in sorted(qsyn.libs.configfiles.values()):
+ print ' ', p.location
+
+ if args.verbosity>=1:
+ print ''
+ print 'HDL libraries with a top level entity for synthesis that are found in $%s:' % qsyn.libRootDirs
+ print ' %-40s' % 'HDL library', ': Top level entity'
+ syn_dicts = qsyn.libs.get_configfiles(key='synth_top_level_entity')
+ for d in syn_dicts:
+ if d['synth_top_level_entity']=='':
+ print ' %-40s' % d['hdl_lib_name'], ':', d['hdl_lib_name']
+ else:
+ print ' %-40s' % d['hdl_lib_name'], ':', d['synth_top_level_entity']
+
+ print ''
+ print 'Create Quartus IP library qip files for all HDL libraries in $%s.' % qsyn.libRootDirs
+ qsyn.create_quartus_ip_lib_file()
+
+ print ''
+ print 'Copy Quartus directories and files from HDL library source tree to build_dir for all HDL libraries that are found in $%s.' % qsyn.libRootDirs
+ qsyn.copy_files('synth')
+
+ print ''
+ print 'Create Quartus project files (QPF) for technology %s and all HDL libraries with a top level entity for synthesis that are found in $%s.' % (qsyn.technologyNames, qsyn.libRootDirs)
+ qsyn.create_quartus_project_file()
+
+ print ''
+ print 'Create Quartus settings files (QSF) for technology %s and all HDL libraries with a top level entity for synthesis that are found in $%s.' % (qsyn.technologyNames, qsyn.libRootDirs)
+ qsyn.create_quartus_settings_file()
radiohdl/trunk/base/quartus_config
Property changes :
Added: svn:executable
## -0,0 +1 ##
+*
\ No newline at end of property
Index: radiohdl/trunk/doc/RadioHDL_programmers_manual.docx
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream
Index: radiohdl/trunk/doc/RadioHDL_programmers_manual.docx
===================================================================
--- radiohdl/trunk/doc/RadioHDL_programmers_manual.docx (nonexistent)
+++ radiohdl/trunk/doc/RadioHDL_programmers_manual.docx (revision 2)
radiohdl/trunk/doc/RadioHDL_programmers_manual.docx
Property changes :
Added: svn:mime-type
## -0,0 +1 ##
+application/octet-stream
\ No newline at end of property
Index: radiohdl/trunk/doc/RadioHDL_programmers_manual.pdf
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream
Index: radiohdl/trunk/doc/RadioHDL_programmers_manual.pdf
===================================================================
--- radiohdl/trunk/doc/RadioHDL_programmers_manual.pdf (nonexistent)
+++ radiohdl/trunk/doc/RadioHDL_programmers_manual.pdf (revision 2)
radiohdl/trunk/doc/RadioHDL_programmers_manual.pdf
Property changes :
Added: svn:mime-type
## -0,0 +1 ##
+application/octet-stream
\ No newline at end of property
Index: radiohdl/trunk/doc/RadioHDL_user_manual.docx
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream
Index: radiohdl/trunk/doc/RadioHDL_user_manual.docx
===================================================================
--- radiohdl/trunk/doc/RadioHDL_user_manual.docx (nonexistent)
+++ radiohdl/trunk/doc/RadioHDL_user_manual.docx (revision 2)
radiohdl/trunk/doc/RadioHDL_user_manual.docx
Property changes :
Added: svn:mime-type
## -0,0 +1 ##
+application/octet-stream
\ No newline at end of property
Index: radiohdl/trunk/doc/RadioHDL_user_manual.pdf
===================================================================
Cannot display: file marked as a binary type.
svn:mime-type = application/octet-stream
Index: radiohdl/trunk/doc/RadioHDL_user_manual.pdf
===================================================================
--- radiohdl/trunk/doc/RadioHDL_user_manual.pdf (nonexistent)
+++ radiohdl/trunk/doc/RadioHDL_user_manual.pdf (revision 2)
radiohdl/trunk/doc/RadioHDL_user_manual.pdf
Property changes :
Added: svn:mime-type
## -0,0 +1 ##
+application/octet-stream
\ No newline at end of property