Tests: add CMake option to run Cycles regression tests on GPU devices

CYCLES_TEST_DEVICES is a list of devices (CPU, CUDA, OPTIX, OPENCL). It is set
to CPU only by default.

Test output is now writen to build/tests/cycles/<device>, and the HTML report
has separate report pages for the different devices, with option to compare
between CPU and GPU renders.

Various GPU tests are still failing due to CPU/GPU differences, these are to be
fixed or blacklisted still.

Ref T82193
This commit is contained in:
Brecht Van Lommel 2020-10-28 16:19:03 +01:00
parent c986e46be7
commit dd391d38f6
Notes: blender-bot 2023-02-13 20:42:01 +01:00
Referenced by issue #82193, Cycles regression testing for GPU devices
7 changed files with 70 additions and 29 deletions

View File

@ -380,6 +380,7 @@ option(WITH_CYCLES_CUDA_BINARIES "Build Cycles CUDA binaries" OFF)
option(WITH_CYCLES_CUBIN_COMPILER "Build cubins with nvrtc based compiler instead of nvcc" OFF)
option(WITH_CYCLES_CUDA_BUILD_SERIAL "Build cubins one after another (useful on machines with limited RAM)" OFF)
mark_as_advanced(WITH_CYCLES_CUDA_BUILD_SERIAL)
set(CYCLES_TEST_DEVICES CPU CACHE STRING "Run regression tests on the specified device types (CPU CUDA OPTIX OPENCL)" )
set(CYCLES_CUDA_BINARIES_ARCH sm_30 sm_35 sm_37 sm_50 sm_52 sm_60 sm_61 sm_70 sm_75 sm_86 compute_75 CACHE STRING "CUDA architectures to build binaries for")
mark_as_advanced(CYCLES_CUDA_BINARIES_ARCH)
unset(PLATFORM_DEFAULT)

View File

@ -598,20 +598,28 @@ if(WITH_CYCLES OR WITH_OPENGL_RENDER_TESTS)
list(APPEND render_tests grease_pencil)
endif()
# Cycles
if(WITH_CYCLES)
foreach(render_test bake;${render_tests})
add_python_test(
cycles_${render_test}
${CMAKE_CURRENT_LIST_DIR}/cycles_render_tests.py
-blender "${TEST_BLENDER_EXE}"
-testdir "${TEST_SRC_DIR}/render/${render_test}"
-idiff "${OPENIMAGEIO_IDIFF}"
-outdir "${TEST_OUT_DIR}/cycles"
)
foreach(_cycles_device ${CYCLES_TEST_DEVICES})
string(TOLOWER "${_cycles_device}" _cycles_device_lower)
set(_cycles_render_tests bake;${render_tests})
foreach(render_test ${_cycles_render_tests})
add_python_test(
cycles_${render_test}_${_cycles_device_lower}
${CMAKE_CURRENT_LIST_DIR}/cycles_render_tests.py
-blender "${TEST_BLENDER_EXE}"
-testdir "${TEST_SRC_DIR}/render/${render_test}"
-idiff "${OPENIMAGEIO_IDIFF}"
-outdir "${TEST_OUT_DIR}/cycles"
-device ${_cycles_device}
)
endforeach()
endforeach()
endif()
if(WITH_OPENGL_RENDER_TESTS)
# Eevee
foreach(render_test ${render_tests})
add_python_test(
eevee_${render_test}_test
@ -624,6 +632,7 @@ if(WITH_CYCLES OR WITH_OPENGL_RENDER_TESTS)
endforeach()
foreach(render_test ${render_tests})
# Workbench
add_python_test(
workbench_${render_test}_test
${CMAKE_CURRENT_LIST_DIR}/workbench_render_tests.py

View File

@ -50,6 +50,7 @@ def create_argparse():
parser.add_argument("-testdir", nargs=1)
parser.add_argument("-outdir", nargs=1)
parser.add_argument("-idiff", nargs=1)
parser.add_argument("-device", nargs=1)
return parser
@ -61,12 +62,16 @@ def main():
test_dir = args.testdir[0]
idiff = args.idiff[0]
output_dir = args.outdir[0]
device = args.device[0]
from modules import render_report
report = render_report.Report("Cycles", output_dir, idiff)
report = render_report.Report('Cycles', output_dir, idiff, device)
report.set_pixelated(True)
report.set_reference_dir("cycles_renders")
report.set_compare_engines('cycles', 'eevee')
if device == 'CPU':
report.set_compare_engine('eevee')
else:
report.set_compare_engine('cycles', 'CPU')
# Increase threshold for motion blur, see T78777.
test_dir_name = Path(test_dir).name

View File

@ -137,7 +137,7 @@ def main():
report = render_report.Report("Eevee", output_dir, idiff)
report.set_pixelated(True)
report.set_reference_dir("eevee_renders")
report.set_compare_engines('eevee', 'cycles')
report.set_compare_engines('cycles', 'CPU')
ok = report.run(test_dir, blender, get_arguments, batch=True)
sys.exit(not ok)

View File

@ -39,6 +39,7 @@ def _write_html(output_dir):
<div class="container">
<br/>
<h1>{title}</h1>
<nav aria-label="breadcrumb"><ol class="breadcrumb"><li class="breadcrumb-item active" aria-current="page">Test Reports</li></ol></nav>
{combined_reports}
<br/>
</div>

View File

@ -102,6 +102,7 @@ class Report:
__slots__ = (
'title',
'output_dir',
'global_dir',
'reference_dir',
'idiff',
'pixelated',
@ -112,17 +113,24 @@ class Report:
'failed_tests',
'passed_tests',
'compare_tests',
'compare_engines'
'compare_engine',
'device'
)
def __init__(self, title, output_dir, idiff):
def __init__(self, title, output_dir, idiff, device=None):
self.title = title
self.output_dir = output_dir
self.global_dir = os.path.dirname(output_dir)
self.reference_dir = 'reference_renders'
self.idiff = idiff
self.compare_engines = None
self.compare_engine = None
self.fail_threshold = 0.016
self.fail_percent = 1
self.device = device
if device:
self.title = self._engine_title(title, device)
self.output_dir = self._engine_path(self.output_dir, device.lower())
self.pixelated = False
self.verbose = os.environ.get("BLENDER_VERBOSE") is not None
@ -147,8 +155,8 @@ class Report:
def set_reference_dir(self, reference_dir):
self.reference_dir = reference_dir
def set_compare_engines(self, engine, other_engine):
self.compare_engines = (engine, other_engine)
def set_compare_engine(self, other_engine, other_device=None):
self.compare_engine = (other_engine, other_device)
def run(self, dirpath, blender, arguments_cb, batch=False):
# Run tests and output report.
@ -156,7 +164,7 @@ class Report:
ok = self._run_all_tests(dirname, dirpath, blender, arguments_cb, batch)
self._write_data(dirname)
self._write_html()
if self.compare_engines:
if self.compare_engine:
self._write_html(comparison=True)
return ok
@ -171,7 +179,7 @@ class Report:
filepath = os.path.join(outdir, "passed.data")
pathlib.Path(filepath).write_text(self.passed_tests)
if self.compare_engines:
if self.compare_engine:
filepath = os.path.join(outdir, "compare.data")
pathlib.Path(filepath).write_text(self.compare_tests)
@ -181,12 +189,26 @@ class Report:
else:
return """<li class="breadcrumb-item"><a href="%s">%s</a></li>""" % (href, title)
def _engine_title(self, engine, device):
if device:
return engine.title() + ' ' + device
else:
return engine.title()
def _engine_path(self, path, device):
if device:
return os.path.join(path, device.lower())
else:
return path
def _navigation_html(self, comparison):
html = """<nav aria-label="breadcrumb"><ol class="breadcrumb">"""
html += self._navigation_item("Test Reports", "../report.html", False)
base_path = os.path.relpath(self.global_dir, self.output_dir)
global_report_path = os.path.join(base_path, "report.html")
html += self._navigation_item("Test Reports", global_report_path, False)
html += self._navigation_item(self.title, "report.html", not comparison)
if self.compare_engines:
compare_title = "Compare with %s" % self.compare_engines[1].capitalize()
if self.compare_engine:
compare_title = "Compare with %s" % self._engine_title(*self.compare_engine)
html += self._navigation_item(compare_title, "compare.html", comparison)
html += """</ol></nav>"""
@ -233,8 +255,8 @@ class Report:
if comparison:
title = self.title + " Test Compare"
engine_self = self.compare_engines[0].capitalize()
engine_other = self.compare_engines[1].capitalize()
engine_self = self.title
engine_other = self._engine_title(*self.compare_engine)
columns_html = "<tr><th>Name</th><th>%s</th><th>%s</th>" % (engine_self, engine_other)
else:
title = self.title + " Test Report"
@ -300,9 +322,8 @@ class Report:
# Update global report
if not comparison:
global_output_dir = os.path.dirname(self.output_dir)
global_failed = failed if not comparison else None
global_report.add(global_output_dir, "Render", self.title, filepath, global_failed)
global_report.add(self.global_dir, "Render", self.title, filepath, global_failed)
def _relative_url(self, filepath):
relpath = os.path.relpath(filepath, self.output_dir)
@ -340,8 +361,9 @@ class Report:
else:
self.passed_tests += test_html
if self.compare_engines:
ref_url = os.path.join("..", self.compare_engines[1], new_url)
if self.compare_engine:
base_path = os.path.relpath(self.global_dir, self.output_dir)
ref_url = os.path.join(base_path, self._engine_path(*self.compare_engine), new_url)
test_html = """
<tr{tr_style}>
@ -445,6 +467,9 @@ class Report:
if not batch:
break
if self.device:
command.extend(['--', '--cycles-device', self.device])
# Run process
crash = False
output = None

View File

@ -72,7 +72,7 @@ def main():
report = render_report.Report("Workbench", output_dir, idiff)
report.set_pixelated(True)
report.set_reference_dir("workbench_renders")
report.set_compare_engines('workbench', 'eevee')
report.set_compare_engine('eevee')
ok = report.run(test_dir, blender, get_arguments, batch=True)
sys.exit(not ok)