Tests: add CMake option to run Cycles regression tests on GPU devices
CYCLES_TEST_DEVICES is a list of devices (CPU, CUDA, OPTIX, OPENCL). It is set to CPU only by default. Test output is now writen to build/tests/cycles/<device>, and the HTML report has separate report pages for the different devices, with option to compare between CPU and GPU renders. Various GPU tests are still failing due to CPU/GPU differences, these are to be fixed or blacklisted still. Ref T82193
This commit is contained in:
@@ -380,6 +380,7 @@ option(WITH_CYCLES_CUDA_BINARIES "Build Cycles CUDA binaries" OFF)
|
|||||||
option(WITH_CYCLES_CUBIN_COMPILER "Build cubins with nvrtc based compiler instead of nvcc" OFF)
|
option(WITH_CYCLES_CUBIN_COMPILER "Build cubins with nvrtc based compiler instead of nvcc" OFF)
|
||||||
option(WITH_CYCLES_CUDA_BUILD_SERIAL "Build cubins one after another (useful on machines with limited RAM)" OFF)
|
option(WITH_CYCLES_CUDA_BUILD_SERIAL "Build cubins one after another (useful on machines with limited RAM)" OFF)
|
||||||
mark_as_advanced(WITH_CYCLES_CUDA_BUILD_SERIAL)
|
mark_as_advanced(WITH_CYCLES_CUDA_BUILD_SERIAL)
|
||||||
|
set(CYCLES_TEST_DEVICES CPU CACHE STRING "Run regression tests on the specified device types (CPU CUDA OPTIX OPENCL)" )
|
||||||
set(CYCLES_CUDA_BINARIES_ARCH sm_30 sm_35 sm_37 sm_50 sm_52 sm_60 sm_61 sm_70 sm_75 sm_86 compute_75 CACHE STRING "CUDA architectures to build binaries for")
|
set(CYCLES_CUDA_BINARIES_ARCH sm_30 sm_35 sm_37 sm_50 sm_52 sm_60 sm_61 sm_70 sm_75 sm_86 compute_75 CACHE STRING "CUDA architectures to build binaries for")
|
||||||
mark_as_advanced(CYCLES_CUDA_BINARIES_ARCH)
|
mark_as_advanced(CYCLES_CUDA_BINARIES_ARCH)
|
||||||
unset(PLATFORM_DEFAULT)
|
unset(PLATFORM_DEFAULT)
|
||||||
|
@@ -598,20 +598,28 @@ if(WITH_CYCLES OR WITH_OPENGL_RENDER_TESTS)
|
|||||||
list(APPEND render_tests grease_pencil)
|
list(APPEND render_tests grease_pencil)
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
|
# Cycles
|
||||||
if(WITH_CYCLES)
|
if(WITH_CYCLES)
|
||||||
foreach(render_test bake;${render_tests})
|
foreach(_cycles_device ${CYCLES_TEST_DEVICES})
|
||||||
add_python_test(
|
string(TOLOWER "${_cycles_device}" _cycles_device_lower)
|
||||||
cycles_${render_test}
|
set(_cycles_render_tests bake;${render_tests})
|
||||||
${CMAKE_CURRENT_LIST_DIR}/cycles_render_tests.py
|
|
||||||
-blender "${TEST_BLENDER_EXE}"
|
foreach(render_test ${_cycles_render_tests})
|
||||||
-testdir "${TEST_SRC_DIR}/render/${render_test}"
|
add_python_test(
|
||||||
-idiff "${OPENIMAGEIO_IDIFF}"
|
cycles_${render_test}_${_cycles_device_lower}
|
||||||
-outdir "${TEST_OUT_DIR}/cycles"
|
${CMAKE_CURRENT_LIST_DIR}/cycles_render_tests.py
|
||||||
)
|
-blender "${TEST_BLENDER_EXE}"
|
||||||
|
-testdir "${TEST_SRC_DIR}/render/${render_test}"
|
||||||
|
-idiff "${OPENIMAGEIO_IDIFF}"
|
||||||
|
-outdir "${TEST_OUT_DIR}/cycles"
|
||||||
|
-device ${_cycles_device}
|
||||||
|
)
|
||||||
|
endforeach()
|
||||||
endforeach()
|
endforeach()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(WITH_OPENGL_RENDER_TESTS)
|
if(WITH_OPENGL_RENDER_TESTS)
|
||||||
|
# Eevee
|
||||||
foreach(render_test ${render_tests})
|
foreach(render_test ${render_tests})
|
||||||
add_python_test(
|
add_python_test(
|
||||||
eevee_${render_test}_test
|
eevee_${render_test}_test
|
||||||
@@ -624,6 +632,7 @@ if(WITH_CYCLES OR WITH_OPENGL_RENDER_TESTS)
|
|||||||
endforeach()
|
endforeach()
|
||||||
|
|
||||||
foreach(render_test ${render_tests})
|
foreach(render_test ${render_tests})
|
||||||
|
# Workbench
|
||||||
add_python_test(
|
add_python_test(
|
||||||
workbench_${render_test}_test
|
workbench_${render_test}_test
|
||||||
${CMAKE_CURRENT_LIST_DIR}/workbench_render_tests.py
|
${CMAKE_CURRENT_LIST_DIR}/workbench_render_tests.py
|
||||||
|
@@ -50,6 +50,7 @@ def create_argparse():
|
|||||||
parser.add_argument("-testdir", nargs=1)
|
parser.add_argument("-testdir", nargs=1)
|
||||||
parser.add_argument("-outdir", nargs=1)
|
parser.add_argument("-outdir", nargs=1)
|
||||||
parser.add_argument("-idiff", nargs=1)
|
parser.add_argument("-idiff", nargs=1)
|
||||||
|
parser.add_argument("-device", nargs=1)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
@@ -61,12 +62,16 @@ def main():
|
|||||||
test_dir = args.testdir[0]
|
test_dir = args.testdir[0]
|
||||||
idiff = args.idiff[0]
|
idiff = args.idiff[0]
|
||||||
output_dir = args.outdir[0]
|
output_dir = args.outdir[0]
|
||||||
|
device = args.device[0]
|
||||||
|
|
||||||
from modules import render_report
|
from modules import render_report
|
||||||
report = render_report.Report("Cycles", output_dir, idiff)
|
report = render_report.Report('Cycles', output_dir, idiff, device)
|
||||||
report.set_pixelated(True)
|
report.set_pixelated(True)
|
||||||
report.set_reference_dir("cycles_renders")
|
report.set_reference_dir("cycles_renders")
|
||||||
report.set_compare_engines('cycles', 'eevee')
|
if device == 'CPU':
|
||||||
|
report.set_compare_engine('eevee')
|
||||||
|
else:
|
||||||
|
report.set_compare_engine('cycles', 'CPU')
|
||||||
|
|
||||||
# Increase threshold for motion blur, see T78777.
|
# Increase threshold for motion blur, see T78777.
|
||||||
test_dir_name = Path(test_dir).name
|
test_dir_name = Path(test_dir).name
|
||||||
|
@@ -137,7 +137,7 @@ def main():
|
|||||||
report = render_report.Report("Eevee", output_dir, idiff)
|
report = render_report.Report("Eevee", output_dir, idiff)
|
||||||
report.set_pixelated(True)
|
report.set_pixelated(True)
|
||||||
report.set_reference_dir("eevee_renders")
|
report.set_reference_dir("eevee_renders")
|
||||||
report.set_compare_engines('eevee', 'cycles')
|
report.set_compare_engines('cycles', 'CPU')
|
||||||
ok = report.run(test_dir, blender, get_arguments, batch=True)
|
ok = report.run(test_dir, blender, get_arguments, batch=True)
|
||||||
|
|
||||||
sys.exit(not ok)
|
sys.exit(not ok)
|
||||||
|
@@ -39,6 +39,7 @@ def _write_html(output_dir):
|
|||||||
<div class="container">
|
<div class="container">
|
||||||
<br/>
|
<br/>
|
||||||
<h1>{title}</h1>
|
<h1>{title}</h1>
|
||||||
|
<nav aria-label="breadcrumb"><ol class="breadcrumb"><li class="breadcrumb-item active" aria-current="page">Test Reports</li></ol></nav>
|
||||||
{combined_reports}
|
{combined_reports}
|
||||||
<br/>
|
<br/>
|
||||||
</div>
|
</div>
|
||||||
|
@@ -102,6 +102,7 @@ class Report:
|
|||||||
__slots__ = (
|
__slots__ = (
|
||||||
'title',
|
'title',
|
||||||
'output_dir',
|
'output_dir',
|
||||||
|
'global_dir',
|
||||||
'reference_dir',
|
'reference_dir',
|
||||||
'idiff',
|
'idiff',
|
||||||
'pixelated',
|
'pixelated',
|
||||||
@@ -112,17 +113,24 @@ class Report:
|
|||||||
'failed_tests',
|
'failed_tests',
|
||||||
'passed_tests',
|
'passed_tests',
|
||||||
'compare_tests',
|
'compare_tests',
|
||||||
'compare_engines'
|
'compare_engine',
|
||||||
|
'device'
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(self, title, output_dir, idiff):
|
def __init__(self, title, output_dir, idiff, device=None):
|
||||||
self.title = title
|
self.title = title
|
||||||
self.output_dir = output_dir
|
self.output_dir = output_dir
|
||||||
|
self.global_dir = os.path.dirname(output_dir)
|
||||||
self.reference_dir = 'reference_renders'
|
self.reference_dir = 'reference_renders'
|
||||||
self.idiff = idiff
|
self.idiff = idiff
|
||||||
self.compare_engines = None
|
self.compare_engine = None
|
||||||
self.fail_threshold = 0.016
|
self.fail_threshold = 0.016
|
||||||
self.fail_percent = 1
|
self.fail_percent = 1
|
||||||
|
self.device = device
|
||||||
|
|
||||||
|
if device:
|
||||||
|
self.title = self._engine_title(title, device)
|
||||||
|
self.output_dir = self._engine_path(self.output_dir, device.lower())
|
||||||
|
|
||||||
self.pixelated = False
|
self.pixelated = False
|
||||||
self.verbose = os.environ.get("BLENDER_VERBOSE") is not None
|
self.verbose = os.environ.get("BLENDER_VERBOSE") is not None
|
||||||
@@ -147,8 +155,8 @@ class Report:
|
|||||||
def set_reference_dir(self, reference_dir):
|
def set_reference_dir(self, reference_dir):
|
||||||
self.reference_dir = reference_dir
|
self.reference_dir = reference_dir
|
||||||
|
|
||||||
def set_compare_engines(self, engine, other_engine):
|
def set_compare_engine(self, other_engine, other_device=None):
|
||||||
self.compare_engines = (engine, other_engine)
|
self.compare_engine = (other_engine, other_device)
|
||||||
|
|
||||||
def run(self, dirpath, blender, arguments_cb, batch=False):
|
def run(self, dirpath, blender, arguments_cb, batch=False):
|
||||||
# Run tests and output report.
|
# Run tests and output report.
|
||||||
@@ -156,7 +164,7 @@ class Report:
|
|||||||
ok = self._run_all_tests(dirname, dirpath, blender, arguments_cb, batch)
|
ok = self._run_all_tests(dirname, dirpath, blender, arguments_cb, batch)
|
||||||
self._write_data(dirname)
|
self._write_data(dirname)
|
||||||
self._write_html()
|
self._write_html()
|
||||||
if self.compare_engines:
|
if self.compare_engine:
|
||||||
self._write_html(comparison=True)
|
self._write_html(comparison=True)
|
||||||
return ok
|
return ok
|
||||||
|
|
||||||
@@ -171,7 +179,7 @@ class Report:
|
|||||||
filepath = os.path.join(outdir, "passed.data")
|
filepath = os.path.join(outdir, "passed.data")
|
||||||
pathlib.Path(filepath).write_text(self.passed_tests)
|
pathlib.Path(filepath).write_text(self.passed_tests)
|
||||||
|
|
||||||
if self.compare_engines:
|
if self.compare_engine:
|
||||||
filepath = os.path.join(outdir, "compare.data")
|
filepath = os.path.join(outdir, "compare.data")
|
||||||
pathlib.Path(filepath).write_text(self.compare_tests)
|
pathlib.Path(filepath).write_text(self.compare_tests)
|
||||||
|
|
||||||
@@ -181,12 +189,26 @@ class Report:
|
|||||||
else:
|
else:
|
||||||
return """<li class="breadcrumb-item"><a href="%s">%s</a></li>""" % (href, title)
|
return """<li class="breadcrumb-item"><a href="%s">%s</a></li>""" % (href, title)
|
||||||
|
|
||||||
|
def _engine_title(self, engine, device):
|
||||||
|
if device:
|
||||||
|
return engine.title() + ' ' + device
|
||||||
|
else:
|
||||||
|
return engine.title()
|
||||||
|
|
||||||
|
def _engine_path(self, path, device):
|
||||||
|
if device:
|
||||||
|
return os.path.join(path, device.lower())
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
def _navigation_html(self, comparison):
|
def _navigation_html(self, comparison):
|
||||||
html = """<nav aria-label="breadcrumb"><ol class="breadcrumb">"""
|
html = """<nav aria-label="breadcrumb"><ol class="breadcrumb">"""
|
||||||
html += self._navigation_item("Test Reports", "../report.html", False)
|
base_path = os.path.relpath(self.global_dir, self.output_dir)
|
||||||
|
global_report_path = os.path.join(base_path, "report.html")
|
||||||
|
html += self._navigation_item("Test Reports", global_report_path, False)
|
||||||
html += self._navigation_item(self.title, "report.html", not comparison)
|
html += self._navigation_item(self.title, "report.html", not comparison)
|
||||||
if self.compare_engines:
|
if self.compare_engine:
|
||||||
compare_title = "Compare with %s" % self.compare_engines[1].capitalize()
|
compare_title = "Compare with %s" % self._engine_title(*self.compare_engine)
|
||||||
html += self._navigation_item(compare_title, "compare.html", comparison)
|
html += self._navigation_item(compare_title, "compare.html", comparison)
|
||||||
html += """</ol></nav>"""
|
html += """</ol></nav>"""
|
||||||
|
|
||||||
@@ -233,8 +255,8 @@ class Report:
|
|||||||
|
|
||||||
if comparison:
|
if comparison:
|
||||||
title = self.title + " Test Compare"
|
title = self.title + " Test Compare"
|
||||||
engine_self = self.compare_engines[0].capitalize()
|
engine_self = self.title
|
||||||
engine_other = self.compare_engines[1].capitalize()
|
engine_other = self._engine_title(*self.compare_engine)
|
||||||
columns_html = "<tr><th>Name</th><th>%s</th><th>%s</th>" % (engine_self, engine_other)
|
columns_html = "<tr><th>Name</th><th>%s</th><th>%s</th>" % (engine_self, engine_other)
|
||||||
else:
|
else:
|
||||||
title = self.title + " Test Report"
|
title = self.title + " Test Report"
|
||||||
@@ -300,9 +322,8 @@ class Report:
|
|||||||
|
|
||||||
# Update global report
|
# Update global report
|
||||||
if not comparison:
|
if not comparison:
|
||||||
global_output_dir = os.path.dirname(self.output_dir)
|
|
||||||
global_failed = failed if not comparison else None
|
global_failed = failed if not comparison else None
|
||||||
global_report.add(global_output_dir, "Render", self.title, filepath, global_failed)
|
global_report.add(self.global_dir, "Render", self.title, filepath, global_failed)
|
||||||
|
|
||||||
def _relative_url(self, filepath):
|
def _relative_url(self, filepath):
|
||||||
relpath = os.path.relpath(filepath, self.output_dir)
|
relpath = os.path.relpath(filepath, self.output_dir)
|
||||||
@@ -340,8 +361,9 @@ class Report:
|
|||||||
else:
|
else:
|
||||||
self.passed_tests += test_html
|
self.passed_tests += test_html
|
||||||
|
|
||||||
if self.compare_engines:
|
if self.compare_engine:
|
||||||
ref_url = os.path.join("..", self.compare_engines[1], new_url)
|
base_path = os.path.relpath(self.global_dir, self.output_dir)
|
||||||
|
ref_url = os.path.join(base_path, self._engine_path(*self.compare_engine), new_url)
|
||||||
|
|
||||||
test_html = """
|
test_html = """
|
||||||
<tr{tr_style}>
|
<tr{tr_style}>
|
||||||
@@ -445,6 +467,9 @@ class Report:
|
|||||||
if not batch:
|
if not batch:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if self.device:
|
||||||
|
command.extend(['--', '--cycles-device', self.device])
|
||||||
|
|
||||||
# Run process
|
# Run process
|
||||||
crash = False
|
crash = False
|
||||||
output = None
|
output = None
|
||||||
|
@@ -72,7 +72,7 @@ def main():
|
|||||||
report = render_report.Report("Workbench", output_dir, idiff)
|
report = render_report.Report("Workbench", output_dir, idiff)
|
||||||
report.set_pixelated(True)
|
report.set_pixelated(True)
|
||||||
report.set_reference_dir("workbench_renders")
|
report.set_reference_dir("workbench_renders")
|
||||||
report.set_compare_engines('workbench', 'eevee')
|
report.set_compare_engine('eevee')
|
||||||
ok = report.run(test_dir, blender, get_arguments, batch=True)
|
ok = report.run(test_dir, blender, get_arguments, batch=True)
|
||||||
|
|
||||||
sys.exit(not ok)
|
sys.exit(not ok)
|
||||||
|
Reference in New Issue
Block a user