Skip to content

Commit

Permalink
Merge pull request #2192 from XavierAubert/cv32e40p/dev_dd_W36a
Browse files Browse the repository at this point in the history
CV32E40P regress, corev-dv & TB improvments
  • Loading branch information
MikeOpenHWGroup authored Sep 13, 2023
2 parents 6972751 + 2faed93 commit 1acda87
Show file tree
Hide file tree
Showing 52 changed files with 3,604 additions and 509 deletions.
16 changes: 14 additions & 2 deletions bin/cv_regress
Original file line number Diff line number Diff line change
Expand Up @@ -154,14 +154,24 @@ def read_file(args, file):
if not test.builds and test.build:
test.builds = [ test.build ]

if args.force_test_cfg:
test.test_cfg = args.force_test_cfg
if args.add_test_cfg:
arg_list = [test.test_cfg if hasattr(test, 'test_cfg') else "", args.add_test_cfg]
test.test_cfg = ",".join(a for a in arg_list if a)

# Determine if a test is valid, skip for compliance tests
# Since it is not possible to determine apriori if a compliance test is valid
if not 'compliance' in test.cmd:
check_valid_test(args.project, test.testname)

# Determine if a test is indexed for setting test iterations
if args.num:
test.num = int(args.num)
# if num is not specified in the yaml, it can be overriden by args.num or set to 1 to avoid issues
# else, if it is not set to 1 (unique), it can be overriden by args.num if argument is defined
if test.num == None:
test.num = int(args.num or 1)
elif test.num != 1:
test.num = int(args.num or test.num)

regression.add_test(test)

Expand Down Expand Up @@ -193,6 +203,8 @@ parser.add_argument('--results', help='Set a non-standard results directory')
parser.add_argument('-s', '--simulator', help='Select simulator', choices=VALID_SIMULATORS, default=DEFAULT_SIMULATOR)
parser.add_argument('-c', '--cov', help='Enable coverage', action='store_true')
parser.add_argument('--cfg', default=None, help='Override configuration for all builds and tests in regression')
parser.add_argument('--force_test_cfg', default=None, help='Override test_cfg file(s) for all builds and tests in regression. Each cfg file should be separated by a comma')
parser.add_argument('--add_test_cfg', default=None, help='Additional test_cfg file(s) for all builds and tests in regression. Each cfg file should be separated by a comma')
parser.add_argument('--iss', default=None, help='Force USE_ISS flag to each test run')
parser.add_argument('--parallel', default=DEFAULT_PARALLEL, help='For VSIF only, set number of parallel jobs')
parser.add_argument('-m', '--metrics', help='Select Metrics waves output', action='store_true')
Expand Down
3 changes: 2 additions & 1 deletion bin/lib/cv_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ def __init__(self, **kwargs):
self.simulation_passed = DEFAULT_SIMULATION_PASSED
self.simulation_failed = DEFAULT_SIMULATION_FAILED
self.iss = DEFAULT_ISS
self.num = 1
# value is handled in main cv_regress script
self.num = None
self.builds = []

for k, v in kwargs.items():
Expand Down
87 changes: 63 additions & 24 deletions bin/templates/regress_rmdb.j2
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@

{% import 'regress_macros.j2' as regress_macros -%}


<rmdb loadtcl="getUCDBFilename getParameterByPriority getParameterByPriorityYesOrNo">
<rmdb version="1.0" toprunnables="{{project}}" loadtcl="getUCDBFilename getParameterByPriority getParameterByPriorityYesOrNo getTestName getTestCfgName">
<usertcl name="getUCDBFilename">
proc getUCDBFilename {base_name test_cfg} {
if {[string trim $test_cfg] != ""} {
return [concat $base_name "-" $test_cfg ]
return ${base_name}_${test_cfg}
}
return $base_name
}
Expand Down Expand Up @@ -37,6 +36,24 @@
}
</usertcl>

<usertcl name="getTestCfgName">
proc getTestCfgName { test_config_from_cmd } {
return [join [lsort -unique [split $test_config_from_cmd ",+ "]] "__"]
}
</usertcl>

<usertcl name="getTestName">
proc getTestName {testname config test_config seed} {
if {$test_config == ""} {
return [format "%s" ${testname}__${config}__$seed]
} else {
set test_list_name [getTestCfgName $test_config]
return [format "%s" ${testname}__${test_list_name}__${config}__$seed]
}

}
</usertcl>

<runnable name="{{project}}" type="group" sequential="yes">
<parameters>
<parameter name="results_sim_path">{{results_path}}/{{simulator}}_results</parameter>
Expand All @@ -45,16 +62,21 @@
{% for r in regressions %}
<member>{{r.name}}</member>
{% endfor %}
<member>report</member>
</members>
</runnable>


{% for r in regressions %}
<!-- =========== Regression =========== START -->
<runnable name="{{r.name}}" type="group" sequential="yes">
<members>
{% for build in r.get_builds() %}
<member>{{build.name}}</member>
{% endfor %}
{% for build in r.get_builds() %}
{% if coverage != false %}
<member>cov_report_{{build.name}}</member>
{% endif %}
{% endfor %}
</members>
<preScript launch="exec">
Expand All @@ -65,6 +87,7 @@
</runnable>


<!-- =========== Builds =========== START -->
{% for build in r.get_builds() %}
<runnable name="{{build.name}}" type="group" sequential="no">
<!-- set of parameters to be given to leaf runnables -->
Expand All @@ -81,22 +104,28 @@
{% endfor %}
</members>
<preScript launch="exec">
<command> echo "BUILD RUNCMD: {{build.cmd}} CV_CORE={{project}} CFG={{build.cfg}} {{toolchain|upper}}=1 SIMULATOR={{build.simulator}} USE_ISS={{regress_macros.yesorno(build.iss)}} COV={{regress_macros.yesorno(build.cov)}} {{regress_macros.cv_results(results)}} {{makeargs}}"</command>
<command> cd {{build.abs_dir}} &amp;&amp; {{build.cmd}} CV_CORE={{project}} CFG={{build.cfg}} {{toolchain|upper}}=1 SIMULATOR={{build.simulator}} USE_ISS={{regress_macros.yesorno(build.iss)}} COV={{regress_macros.yesorno(build.cov)}} {{regress_macros.cv_results(results)}} {{makeargs}} </command>
</preScript>
</runnable>

{% endfor %}

<!-- =========== Builds =========== END -->


<!-- =========== Tests =========== START -->
{% for k,t in unique_tests.items() %}
<runnable name="{{t.name}}" type="task" repeat="{{t.num}}">
<runnable name="{{t.name}}" type="task" foreach="(%seeds%)">
<parameters>
<parameter name="t_cfg" type="tcl">[getParameterByPriority "{{config}}" "{{t.cfg}}" "(%build_config:%)"]</parameter>
<parameter name="t_test_cfg" type="tcl">[getParameterByPriority "" "{{t.test_cfg}}" "(%build_test_cfg:%)"]</parameter>
<parameter name="t_iss" type="tcl">[getParameterByPriorityYesOrNo "{{iss}}" "{{t.iss}}" "(%build_iss:%)"]</parameter>
<parameter name="t_cov" type="tcl">[getParameterByPriorityYesOrNo "{{coverage}}" "{{t.cov}}" "(%build_cov:%)"]</parameter>
<parameter name="ucdb_path" type="tcl">[file join "(%results_sim_path%)" "(%t_cfg%)" "{{t.testname}}" "(%t_test_cfg:%)" "(%ITERATION%)"]</parameter>
<parameter name="ucdb_basename" type="tcl">[getUCDBFilename "{{t.testname}}" "(%t_test_cfg:%)"]</parameter>
<parameter name="t_cfg" type="tcl">[getParameterByPriority "{{config}}" "{{t.cfg}}" "(%build_config:%)"]</parameter>
<parameter name="t_test_cfg" type="tcl">[getParameterByPriority "" "{{t.test_cfg}}" "(%build_test_cfg:%)"]</parameter>
<parameter name="t_test_cfg_name" type="tcl">[getTestCfgName (%t_test_cfg:%)]</parameter>
<parameter name="t_iss" type="tcl">[getParameterByPriorityYesOrNo "{{iss}}" "{{t.iss}}" "(%build_iss:%)"]</parameter>
<parameter name="t_cov" type="tcl">[getParameterByPriorityYesOrNo "{{coverage}}" "{{t.cov}}" "(%build_cov:%)"]</parameter>
<parameter name="seeds" type="tcl">[GetRandomValues {{t.num}}]</parameter>
<parameter name="ucdb_path" type="tcl">[file join "(%results_sim_path%)" "(%t_cfg%)" "{{t.testname}}" "(%t_test_cfg_name:%)" "(%ITERATION%)"]</parameter>
<parameter name="testname" type="tcl">[getTestName "{{t.testname}}" "(%t_cfg%)" "(%t_test_cfg_name:%)" "(%ITERATION%)"]</parameter>
<parameter name="ucdb_basename" type="tcl">[getUCDBFilename "{{t.testname}}" "(%t_test_cfg_name:%)"]</parameter>
{% if coverage != false %}
<parameter name="ucdbfile" >(%ucdb_path%)/(%ucdb_basename%).ucdb</parameter>
{% endif %}
Expand All @@ -108,38 +137,48 @@
</method>
{% endif %}
<execScript launch="exec" usestderr="no">
<command> cd {{t.abs_dir}} &amp;&amp; {{t.cmd}} CHECK_SIM_RESULT={{regress_macros.yesorno(check_sim_results)}} CHECK_SIM_LOG=(%log_file%) COMP=0 CV_CORE={{project}} {{toolchain|upper}}=1 CFG=(%t_cfg%) TEST_CFG_FILE=(%t_test_cfg:%) SIMULATOR={{t.simulator}} USE_ISS=(%t_iss:%) COV=(%t_cov:%) RUN_INDEX=(%ITERATION%) GEN_START_INDEX=(%ITERATION%) SEED=random {{regress_macros.cv_results(results)}} {{makeargs}} {{t.makearg}}</command>
<command> echo " TEST RUNCMD: {{t.cmd}} CHECK_SIM_RESULT={{regress_macros.yesorno(check_sim_results)}} COMP=0 CV_CORE={{project}} {{toolchain|upper}}=1 CFG=(%t_cfg%) TEST_CFG_FILE=(%t_test_cfg:%) SIMULATOR={{t.simulator}} USE_ISS=(%t_iss:%) COV=(%t_cov:%) RUN_INDEX=(%ITERATION%) GEN_START_INDEX=(%ITERATION%) SEED=(%ITERATION%) {{regress_macros.cv_results(results)}} {{makeargs}} {{t.makearg}}"</command>
<command> echo " logfile: (%log_file%)"</command>
<command> cd {{t.abs_dir}} &amp;&amp; {{t.cmd}} CHECK_SIM_RESULT={{regress_macros.yesorno(check_sim_results)}} CHECK_SIM_LOG=(%log_file%) COMP=0 CV_CORE={{project}} {{toolchain|upper}}=1 CFG=(%t_cfg%) TEST_CFG_FILE=(%t_test_cfg:%) SIMULATOR={{t.simulator}} USE_ISS=(%t_iss:%) COV=(%t_cov:%) RUN_INDEX=(%ITERATION%) GEN_START_INDEX=(%ITERATION%) SEED=(%ITERATION%) {{regress_macros.cv_results(results)}} {{makeargs}} {{t.makearg}}</command>
</execScript>
</runnable>

{% endfor %}
<!-- =========== Tests =========== END -->


{% endfor %}
<!-- =========== Regression =========== END -->

<!-- =========== Reporting =========== -->
<runnable name="report" type="group">

<!-- =========== Coverage Reports =========== START -->
{% for r in regressions %}
{% for build in r.get_builds() %}
<runnable name="cov_report_{{build.name}}" type="group">
<parameters>
<parameter name="merged_file">(%results_sim_path%)/merged/merged.ucdb</parameter>
<parameter name="merged_file">(%results_sim_path%)/{{build.cfg}}/merged/merged.ucdb</parameter>
<!-- <parameter name="tplan_file">(%results_sim_path%)/merged.ucdb</parameter> -->
</parameters>
{% if coverage != false %}
<preScript launch="exec">
<command> cd {{results_path}} &amp;&amp; make cov_merge SIMULATOR={{simulator}}</command>
<command> cd {{results_path}} &amp;&amp; make cov_merge CFG={{build.cfg}} SIMULATOR={{simulator}}</command>
</preScript>
{% endif %}
<members>
<member>html_report</member>
<member>html_report_{{build.name}}</member>
</members>
<postScript mintimeout="3000">
<command>vrun -vrmdata (%DATADIR%) -status -full -html -htmldir (%DATADIR%)/vrun</command>
<command>vrun -vrmdata (%DATADIR%) -status -testname -full -html -htmldir (%DATADIR%)/vrun</command>
</postScript>
</runnable>

<runnable name="html_report" type="task">
<execScript>
<command> if {[file exists (%merged_file%)]} {vcover report -annotate -testdetails -details -html (%merged_file%) -output [file join {{results_path}} cov_html_summary]} </command>
<runnable name="html_report_{{build.name}}" type="task">
<execScript>
<command> if {[file exists (%merged_file%)]} {vcover report -annotate -testdetails -details -html (%merged_file%) -output (%results_sim_path%)/{{build.cfg}}/cov_html_summary} </command>
</execScript>
</runnable>
</runnable>

{% endfor %}
{% endfor %}
<!-- =========== Coverage Reports =========== END -->

</rmdb>
Loading

0 comments on commit 1acda87

Please sign in to comment.