Move tests into tests/ top-level dir

This commit is contained in:
Campbell Barton
2014-06-18 21:44:40 +10:00
parent 7259ac821e
commit 0eb060c7b4
22 changed files with 25 additions and 16 deletions

4
tests/CMakeLists.txt Normal file
View File

@@ -0,0 +1,4 @@
# Python CTests
add_subdirectory(python)

149
tests/check_deprecated.py Normal file
View File

@@ -0,0 +1,149 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import os
from os.path import splitext
DEPRECATE_DAYS = 120
SKIP_DIRS = ("extern",
"scons",
"tests", # not this dir
)
def is_c_header(filename):
ext = splitext(filename)[1]
return (ext in {".h", ".hpp", ".hxx", ".hh"})
def is_c(filename):
ext = splitext(filename)[1]
return (ext in {".c", ".cpp", ".cxx", ".m", ".mm", ".rc", ".cc", ".inl"})
def is_c_any(filename):
return is_c(filename) or is_c_header(filename)
def is_py(filename):
ext = splitext(filename)[1]
return (ext == ".py")
def is_source_any(filename):
return is_c_any(filename) or is_py(filename)
def source_list(path, filename_check=None):
for dirpath, dirnames, filenames in os.walk(path):
# skip '.svn'
if dirpath.startswith("."):
continue
for filename in filenames:
if filename_check is None or filename_check(filename):
yield os.path.join(dirpath, filename)
def deprecations():
"""
Searches out source code for lines like
/* *DEPRECATED* 2011/7/17 bgl.Buffer.list info text */
Or...
# *DEPRECATED* 2010/12/22 some.py.func more info */
"""
import datetime
SOURCE_DIR = os.path.normpath(os.path.abspath(os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))))
SKIP_DIRS_ABS = [os.path.join(SOURCE_DIR, p) for p in SKIP_DIRS]
deprecations_ls = []
scan_tot = 0
print("scanning in %r for '*DEPRECATED* YYYY/MM/DD info'" % SOURCE_DIR)
for fn in source_list(SOURCE_DIR, is_source_any):
# print(fn)
skip = False
for p in SKIP_DIRS_ABS:
if fn.startswith(p):
skip = True
break
if skip:
continue
file = open(fn, 'r', encoding="utf8")
for i, l in enumerate(file):
# logic for deprecation warnings
if '*DEPRECATED*' in l:
try:
l = l.strip()
data = l.split('*DEPRECATED*', 1)[-1].strip().strip()
data = [w.strip() for w in data.split('/', 2)]
data[-1], info = data[-1].split(' ', 1)
info = info.split("*/", 1)[0]
if len(data) != 3:
print(" poorly formatting line:\n"
" %r:%d\n"
" %s" %
(fn, i + 1, l)
)
else:
data = datetime.datetime(*tuple([int(w) for w in data]))
deprecations_ls.append((data, (fn, i + 1), info))
except:
print("Error file - %r:%d" % (fn, i + 1))
import traceback
traceback.print_exc()
scan_tot += 1
print(" scanned %d files" % scan_tot)
return deprecations_ls
def main():
import datetime
now = datetime.datetime.now()
deps = deprecations()
print("\nAll deprecations...")
for data, fileinfo, info in deps:
days_old = (now - data).days
if days_old > DEPRECATE_DAYS:
info = "*** REMOVE! *** " + info
print(" %r, days-old(%.2d), %s:%d - %s" % (data, days_old, fileinfo[0], fileinfo[1], info))
if deps:
print("\ndone!")
else:
print("\nnone found!")
if __name__ == '__main__':
main()

360
tests/python/CMakeLists.txt Normal file
View File

@@ -0,0 +1,360 @@
# ***** BEGIN GPL LICENSE BLOCK *****
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Contributor(s): Jacques Beaurain.
#
# ***** END GPL LICENSE BLOCK *****
# --env-system-scripts allows to run without the install target.
# Use '--write-blend=/tmp/test.blend' to view output
# Some tests are interesting but take too long to run
# and don't give deterministic results
set(USE_EXPERIMENTAL_TESTS FALSE)
set(TEST_SRC_DIR ${CMAKE_SOURCE_DIR}/../lib/tests)
set(TEST_OUT_DIR ${CMAKE_BINARY_DIR}/tests)
# ugh, any better way to do this on testing only?
execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory ${TEST_OUT_DIR})
#~ if(NOT IS_DIRECTORY ${TEST_SRC_DIR})
#~ message(FATAL_ERROR "CMake test directory not found!")
#~ endif()
# all calls to blender use this
if(APPLE)
if(${CMAKE_GENERATOR} MATCHES "Xcode")
set(TEST_BLENDER_EXE ${EXECUTABLE_OUTPUT_PATH}/Debug/blender.app/Contents/MacOS/blender)
else()
set(TEST_BLENDER_EXE ${EXECUTABLE_OUTPUT_PATH}/blender.app/Contents/MacOS/blender)
endif()
else()
set(TEST_BLENDER_EXE ${EXECUTABLE_OUTPUT_PATH}/blender)
endif()
# for testing with valgrind prefix: valgrind --track-origins=yes --error-limit=no
set(TEST_BLENDER_EXE ${TEST_BLENDER_EXE} --background -noaudio --factory-startup --env-system-scripts ${CMAKE_SOURCE_DIR}/release/scripts)
# ------------------------------------------------------------------------------
# GENERAL PYTHON CORRECTNESS TESTS
add_test(script_load_keymap ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_keymap_completeness.py
)
add_test(script_load_addons ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_load_addons.py
)
add_test(script_load_modules ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_load_py_modules.py
)
# test running operators doesn't segfault under various conditions
if(USE_EXPERIMENTAL_TESTS)
add_test(script_run_operators ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_run_operators.py
)
endif()
# test running mathutils testing script
add_test(script_pyapi_mathutils ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_pyapi_mathutils.py
)
# ------------------------------------------------------------------------------
# MODELING TESTS
add_test(bevel ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/modeling/bevel_regression.blend
--python-text run_tests
)
# ------------------------------------------------------------------------------
# IO TESTS
# OBJ Import tests
add_test(import_obj_cube ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.obj\(filepath='${TEST_SRC_DIR}/io_tests/obj/cube.obj'\)
--md5=39cce4bacac2d1b18fc470380279bc15 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_obj_cube.blend
)
add_test(import_obj_nurbs_cyclic ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.obj\(filepath='${TEST_SRC_DIR}/io_tests/obj/nurbs_cyclic.obj'\)
--md5=ad3c307e5883224a0492378cd32691ab --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_obj_nurbs_cyclic.blend
)
add_test(import_obj_makehuman ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.obj\(filepath='${TEST_SRC_DIR}/io_tests/obj/makehuman.obj'\)
--md5=c9f78b185e58358daa4ecaecfa75464e --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_obj_makehuman.blend
)
# OBJ Export tests
add_test(export_obj_cube ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.obj\(filepath='${TEST_OUT_DIR}/export_obj_cube.obj',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_obj_cube.obj
--md5_source=${TEST_OUT_DIR}/export_obj_cube.mtl
--md5=70bdc394c2726203ad26c085176e3484 --md5_method=FILE
)
add_test(export_obj_nurbs ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.obj\(filepath='${TEST_OUT_DIR}/export_obj_nurbs.obj',use_selection=False,use_nurbs=True\)
--md5_source=${TEST_OUT_DIR}/export_obj_nurbs.obj
--md5_source=${TEST_OUT_DIR}/export_obj_nurbs.mtl
--md5=a733ae4fa4a591ea9b0912da3af042de --md5_method=FILE
)
add_test(export_obj_all_objects ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.obj\(filepath='${TEST_OUT_DIR}/export_obj_all_objects.obj',use_selection=False,use_nurbs=True\)
--md5_source=${TEST_OUT_DIR}/export_obj_all_objects.obj
--md5_source=${TEST_OUT_DIR}/export_obj_all_objects.mtl
--md5=04b3ed97cede07a19548fc518ce9f8ca --md5_method=FILE
)
# PLY Import tests
add_test(import_ply_cube ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_mesh.ply\(filepath='${TEST_SRC_DIR}/io_tests/ply/cube_ascii.ply'\)
--md5=527134343c27fc0ea73115b85fbfd3ac --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_ply_cube.blend
)
add_test(import_ply_bunny ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_mesh.ply\(filepath='${TEST_SRC_DIR}/io_tests/ply/bunny2.ply'\)
--md5=6ea5b8533400a17accf928b8fd024eaa --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_ply_bunny.blend
)
add_test(import_ply_small_holes ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_mesh.ply\(filepath='${TEST_SRC_DIR}/io_tests/ply/many_small_holes.ply'\)
--md5=c3093e26ecae5b6d59fbbcf2a0d0b39f --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_ply_small_holes.blend
)
# PLY Export
add_test(export_ply_cube_all_data ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/cube_all_data.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_mesh.ply\(filepath='${TEST_OUT_DIR}/export_ply_cube_all_data.ply'\)
--md5_source=${TEST_OUT_DIR}/export_ply_cube_all_data.ply
--md5=6adc3748ceae8298496f99d0e7e76c15 --md5_method=FILE
)
add_test(export_ply_suzanne_all_data ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/suzanne_all_data.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_mesh.ply\(filepath='${TEST_OUT_DIR}/export_ply_suzanne_all_data.ply'\)
--md5_source=${TEST_OUT_DIR}/export_ply_suzanne_all_data.ply
--md5=68ba23f02efd6511bfd093f45f703221 --md5_method=FILE
)
add_test(export_ply_vertices ${TEST_BLENDER_EXE} # lame, add a better one
${TEST_SRC_DIR}/io_tests/blend_geometry/vertices.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_mesh.ply\(filepath='${TEST_OUT_DIR}/export_ply_vertices.ply'\)
--md5_source=${TEST_OUT_DIR}/export_ply_vertices.ply
--md5=37faba0aa2014451b27f951afa92f870 --md5_method=FILE
)
# STL Import tests
add_test(import_stl_cube ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_mesh.stl\(filepath='${TEST_SRC_DIR}/io_tests/stl/cube.stl'\)
--md5=8ceb5bb7e1cb5f4342fa1669988c66b4 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_stl_cube.blend
)
add_test(import_stl_conrod ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_mesh.stl\(filepath='${TEST_SRC_DIR}/io_tests/stl/conrod.stl'\)
--md5=690a4b8eb9002dcd8631c5a575ea7348 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_stl_conrod.blend
)
add_test(import_stl_knot_max_simplified ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_mesh.stl\(filepath='${TEST_SRC_DIR}/io_tests/stl/knot_max_simplified.stl'\)
--md5=baf82803f45a84ec4ddbad9cef57dd3e --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_stl_knot_max_simplified.blend
)
# STL Export
add_test(export_stl_cube_all_data ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/cube_all_data.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_mesh.stl\(filepath='${TEST_OUT_DIR}/export_stl_cube_all_data.stl'\)
--md5_source=${TEST_OUT_DIR}/export_stl_cube_all_data.stl
--md5=64cb97c0cabb015e1c3f76369835075a --md5_method=FILE
)
add_test(export_stl_suzanne_all_data ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/suzanne_all_data.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_mesh.stl\(filepath='${TEST_OUT_DIR}/export_stl_suzanne_all_data.stl'\)
--md5_source=${TEST_OUT_DIR}/export_stl_suzanne_all_data.stl
--md5=e9b23c97c139ad64961c635105bb9192 --md5_method=FILE
)
add_test(export_stl_vertices ${TEST_BLENDER_EXE} # lame, add a better one
${TEST_SRC_DIR}/io_tests/blend_geometry/vertices.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_mesh.stl\(filepath='${TEST_OUT_DIR}/export_stl_vertices.stl'\)
--md5_source=${TEST_OUT_DIR}/export_stl_vertices.stl
--md5=3fd3c877e573beeebc782532cc005820 --md5_method=FILE
)
# X3D Import
add_test(import_x3d_cube ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.x3d\(filepath='${TEST_SRC_DIR}/io_tests/x3d/color_cube.x3d'\)
--md5=3fae9be004199c145941cd3f9f80ad7b --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_x3d_cube.blend
)
add_test(import_x3d_teapot ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.x3d\(filepath='${TEST_SRC_DIR}/io_tests/x3d/teapot.x3d'\)
--md5=8ee196c71947dce4199d55698501691e --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_x3d_teapot.blend
)
add_test(import_x3d_suzanne_material ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.x3d\(filepath='${TEST_SRC_DIR}/io_tests/x3d/suzanne_material.x3d'\)
--md5=3edea1353257d8b5a5f071942f417be6 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_x3d_suzanne_material.blend
)
# X3D Export
add_test(export_x3d_cube ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.x3d\(filepath='${TEST_OUT_DIR}/export_x3d_cube.x3d',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_x3d_cube.x3d
--md5=05312d278fe41da33560fdfb9bdb268f --md5_method=FILE
)
add_test(export_x3d_nurbs ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.x3d\(filepath='${TEST_OUT_DIR}/export_x3d_nurbs.x3d',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_x3d_nurbs.x3d
--md5=4286d4a2aa507ef78b22ddcbdcc88481 --md5_method=FILE
)
add_test(export_x3d_all_objects ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.x3d\(filepath='${TEST_OUT_DIR}/export_x3d_all_objects.x3d',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_x3d_all_objects.x3d
--md5=f5f9fa4c5619a0eeab66685aafd2f7f0 --md5_method=FILE
)
# 3DS Import
add_test(import_3ds_cube ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.autodesk_3ds\(filepath='${TEST_SRC_DIR}/io_tests/3ds/cube.3ds'\)
--md5=cb5a45c35a343c3f5beca2a918472951 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_3ds_cube.blend
)
add_test(import_3ds_hierarchy_lara ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.autodesk_3ds\(filepath='${TEST_SRC_DIR}/io_tests/3ds/hierarchy_lara.3ds'\)
--md5=766c873d9fdb5f190e43796cfbae63b6 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_3ds_hierarchy_lara.blend
)
add_test(import_3ds_hierarchy_greek_trireme ${TEST_BLENDER_EXE}
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.import_scene.autodesk_3ds\(filepath='${TEST_SRC_DIR}/io_tests/3ds/hierarchy_greek_trireme.3ds'\)
--md5=b62ee30101e8999cb91ef4f8a8760056 --md5_method=SCENE
--write-blend=${TEST_OUT_DIR}/import_3ds_hierarchy_greek_trireme.blend
)
# 3DS Export
add_test(export_3ds_cube ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.autodesk_3ds\(filepath='${TEST_OUT_DIR}/export_3ds_cube.3ds',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_3ds_cube.3ds
--md5=a31f5071b6c6dc7445b9099cdc7f63b3 --md5_method=FILE
)
add_test(export_3ds_nurbs ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.autodesk_3ds\(filepath='${TEST_OUT_DIR}/export_3ds_nurbs.3ds',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_3ds_nurbs.3ds
--md5=5bdd21be3c80d814fbc83cb25edb08c2 --md5_method=FILE
)
add_test(export_3ds_all_objects ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.autodesk_3ds\(filepath='${TEST_OUT_DIR}/export_3ds_all_objects.3ds',use_selection=False\)
--md5_source=${TEST_OUT_DIR}/export_3ds_all_objects.3ds
--md5=68447761ab0ca38e1e22e7c177ed48a8 --md5_method=FILE
)
# FBX Export
# 'use_metadata=False' for reliable md5's
add_test(export_fbx_cube ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/all_quads.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.fbx\(filepath='${TEST_OUT_DIR}/export_fbx_cube.fbx',use_selection=False,use_metadata=False\)
--md5_source=${TEST_OUT_DIR}/export_fbx_cube.fbx
--md5=59a35577462f95f9a0b4e6035226ce9b --md5_method=FILE
)
add_test(export_fbx_nurbs ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_geometry/nurbs.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.fbx\(filepath='${TEST_OUT_DIR}/export_fbx_nurbs.fbx',use_selection=False,use_metadata=False\)
--md5_source=${TEST_OUT_DIR}/export_fbx_nurbs.fbx
--md5=d31875f18f613fa0c3b16e978f87f6f8 --md5_method=FILE
)
add_test(export_fbx_all_objects ${TEST_BLENDER_EXE}
${TEST_SRC_DIR}/io_tests/blend_scene/all_objects.blend
--python ${CMAKE_CURRENT_LIST_DIR}/bl_test.py --
--run={'FINISHED'}&bpy.ops.export_scene.fbx\(filepath='${TEST_OUT_DIR}/export_fbx_all_objects.fbx',use_selection=False,use_metadata=False\)
--md5_source=${TEST_OUT_DIR}/export_fbx_all_objects.fbx
--md5=b35eb2a9d0e73762ecae2278c25a38ac --md5_method=FILE
)

View File

@@ -0,0 +1,201 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
"""
Example Usage:
./blender.bin --background --python tests/python/batch_import.py -- \
--operator="bpy.ops.import_scene.obj" \
--path="/fe/obj" \
--match="*.obj" \
--start=0 --end=10 \
--save_path=/tmp/test
./blender.bin --background --python tests/python/batch_import.py -- \
--operator="bpy.ops.import_scene.autodesk_3ds" \
--path="/fe/" \
--match="*.3ds" \
--start=0 --end=1000 \
--save_path=/tmp/test
./blender.bin --background --addons io_curve_svg --python tests/python/batch_import.py -- \
--operator="bpy.ops.import_curve.svg" \
--path="/usr/" \
--match="*.svg" \
--start=0 --end=1000 \
--save_path=/tmp/test
"""
import os
import sys
def clear_scene():
import bpy
unique_obs = set()
for scene in bpy.data.scenes:
for obj in scene.objects[:]:
scene.objects.unlink(obj)
unique_obs.add(obj)
# remove obdata, for now only worry about the startup scene
for bpy_data_iter in (bpy.data.objects, bpy.data.meshes, bpy.data.lamps, bpy.data.cameras):
for id_data in bpy_data_iter:
bpy_data_iter.remove(id_data)
def batch_import(operator="",
path="",
save_path="",
match="",
start=0,
end=sys.maxsize,
):
import addon_utils
_reset_all = addon_utils.reset_all # XXX, hack
import fnmatch
path = os.path.normpath(path)
path = os.path.abspath(path)
match_upper = match.upper()
pattern_match = lambda a: fnmatch.fnmatchcase(a.upper(), match_upper)
def file_generator(path):
for dirpath, dirnames, filenames in os.walk(path):
# skip '.svn'
if dirpath.startswith("."):
continue
for filename in filenames:
if pattern_match(filename):
yield os.path.join(dirpath, filename)
print("Collecting %r files in %s" % (match, path), end="")
files = list(file_generator(path))
files_len = len(files)
end = min(end, len(files))
print(" found %d" % files_len, end="")
files.sort()
files = files[start:end]
if len(files) != files_len:
print(" using a subset in (%d, %d), total %d" % (start, end, len(files)), end="")
import bpy
op = eval(operator)
tot_done = 0
tot_fail = 0
for i, f in enumerate(files):
print(" %s(filepath=%r) # %d of %d" % (operator, f, i + start, len(files)))
# hack so loading the new file doesn't undo our loaded addons
addon_utils.reset_all = lambda: None # XXX, hack
bpy.ops.wm.read_factory_settings()
addon_utils.reset_all = _reset_all # XXX, hack
clear_scene()
result = op(filepath=f)
if 'FINISHED' in result:
tot_done += 1
else:
tot_fail += 1
if save_path:
fout = os.path.join(save_path, os.path.relpath(f, path))
fout_blend = os.path.splitext(fout)[0] + ".blend"
print("\tSaving: %r" % fout_blend)
fout_dir = os.path.dirname(fout_blend)
os.makedirs(fout_dir, exist_ok=True)
bpy.ops.wm.save_as_mainfile(filepath=fout_blend)
print("finished, done:%d, fail:%d" % (tot_done, tot_fail))
def main():
import optparse
# get the args passed to blender after "--", all of which are ignored by blender specifically
# so python may receive its own arguments
argv = sys.argv
if "--" not in argv:
argv = [] # as if no args are passed
else:
argv = argv[argv.index("--") + 1:] # get all args after "--"
# When --help or no args are given, print this help
usage_text = "Run blender in background mode with this script:"
usage_text += " blender --background --python " + __file__ + " -- [options]"
parser = optparse.OptionParser(usage=usage_text)
# Example background utility, add some text and renders or saves it (with options)
# Possible types are: string, int, long, choice, float and complex.
parser.add_option("-o", "--operator", dest="operator", help="This text will be used to render an image", type="string")
parser.add_option("-p", "--path", dest="path", help="Path to use for searching for files", type='string')
parser.add_option("-m", "--match", dest="match", help="Wildcard to match filename", type="string")
parser.add_option("-s", "--save_path", dest="save_path", help="Save the input file to a blend file in a new location", metavar='string')
parser.add_option("-S", "--start", dest="start", help="From collected files, start with this index", metavar='int')
parser.add_option("-E", "--end", dest="end", help="From collected files, end with this index", metavar='int')
options, args = parser.parse_args(argv) # In this example we wont use the args
if not argv:
parser.print_help()
return
if not options.operator:
print("Error: --operator=\"some string\" argument not given, aborting.")
parser.print_help()
return
if options.start is None:
options.start = 0
if options.end is None:
options.end = sys.maxsize
# Run the example function
batch_import(operator=options.operator,
path=options.path,
save_path=options.save_path,
match=options.match,
start=int(options.start),
end=int(options.end),
)
print("batch job finished, exiting")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,84 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# simple script to test 'keyconfig_utils' contains correct values.
from bpy_extras import keyconfig_utils
def check_maps():
maps = {}
def fill_maps(ls):
for km_name, km_space_type, km_region_type, km_sub in ls:
maps[km_name] = (km_space_type, km_region_type)
fill_maps(km_sub)
fill_maps(keyconfig_utils.KM_HIERARCHY)
import bpy
keyconf = bpy.context.window_manager.keyconfigs.active
maps_bl = set(keyconf.keymaps.keys())
maps_py = set(maps.keys())
err = False
# Check keyconfig contains only maps that exist in blender
test = maps_py - maps_bl
if test:
print("Keymaps that are in 'keyconfig_utils' but not blender")
for km_id in sorted(test):
print("\t%s" % km_id)
err = True
test = maps_bl - maps_py
if test:
print("Keymaps that are in blender but not in 'keyconfig_utils'")
for km_id in sorted(test):
km = keyconf.keymaps[km_id]
print(" ('%s', '%s', '%s', [])," % (km_id, km.space_type, km.region_type))
err = True
# Check space/region's are OK
print("Comparing keymap space/region types...")
for km_id, km in keyconf.keymaps.items():
km_py = maps.get(km_id)
if km_py is not None:
km_space_type, km_region_type = km_py
if km_space_type != km.space_type or km_region_type != km.region_type:
print(" Error:")
print(" expected -- ('%s', '%s', '%s', [])," % (km_id, km.space_type, km.region_type))
print(" got -- ('%s', '%s', '%s', [])," % (km_id, km_space_type, km_region_type))
print("done!")
return err
def main():
err = check_maps()
import bpy
if err and bpy.app.background:
# alert CTest we failed
import sys
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,111 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# simple script to enable all addons, and disable
import bpy
import addon_utils
import sys
import imp
def disable_addons():
# first disable all
addons = bpy.context.user_preferences.addons
for mod_name in list(addons.keys()):
addon_utils.disable(mod_name)
assert(bool(addons) is False)
def test_load_addons():
modules = addon_utils.modules({})
modules.sort(key=lambda mod: mod.__name__)
disable_addons()
addons = bpy.context.user_preferences.addons
addons_fail = []
for mod in modules:
mod_name = mod.__name__
print("\tenabling:", mod_name)
addon_utils.enable(mod_name)
if mod_name not in addons:
addons_fail.append(mod_name)
if addons_fail:
print("addons failed to load (%d):" % len(addons_fail))
for mod_name in addons_fail:
print(" %s" % mod_name)
else:
print("addons all loaded without errors!")
print("")
def reload_addons(do_reload=True, do_reverse=True):
modules = addon_utils.modules({})
modules.sort(key=lambda mod: mod.__name__)
addons = bpy.context.user_preferences.addons
disable_addons()
# Run twice each time.
for i in (0, 1):
for mod in modules:
mod_name = mod.__name__
print("\tenabling:", mod_name)
addon_utils.enable(mod_name)
assert(mod_name in addons)
for mod in addon_utils.modules({}):
mod_name = mod.__name__
print("\tdisabling:", mod_name)
addon_utils.disable(mod_name)
assert(not (mod_name in addons))
# now test reloading
if do_reload:
imp.reload(sys.modules[mod_name])
if do_reverse:
# in case order matters when it shouldn't
modules.reverse()
def main():
# first load addons, print a list of all addons that fail
test_load_addons()
reload_addons(do_reload=False, do_reverse=False)
reload_addons(do_reload=False, do_reverse=True)
reload_addons(do_reload=True, do_reverse=True)
if __name__ == "__main__":
# So a python error exits(1)
try:
main()
except:
import traceback
traceback.print_exc()
sys.exit(1)

View File

@@ -0,0 +1,167 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# simple script to enable all addons, and disable
import bpy
import addon_utils
import sys
import os
BLACKLIST = {
"bl_i18n_utils",
"cycles",
"io_export_dxf", # TODO, check on why this fails
}
def source_list(path, filename_check=None):
from os.path import join
for dirpath, dirnames, filenames in os.walk(path):
# skip '.svn'
if dirpath.startswith("."):
continue
for filename in filenames:
filepath = join(dirpath, filename)
if filename_check is None or filename_check(filepath):
yield filepath
def load_addons():
modules = addon_utils.modules({})
modules.sort(key=lambda mod: mod.__name__)
addons = bpy.context.user_preferences.addons
# first disable all
for mod_name in list(addons.keys()):
addon_utils.disable(mod_name)
assert(bool(addons) is False)
for mod in modules:
mod_name = mod.__name__
addon_utils.enable(mod_name)
assert(mod_name in addons)
def load_modules():
modules = []
module_paths = []
# paths blender stores scripts in.
paths = bpy.utils.script_paths()
print("Paths:")
for script_path in paths:
print("\t'%s'" % script_path)
#
# find all sys.path we added
for script_path in paths:
for mod_dir in sys.path:
if mod_dir.startswith(script_path):
if mod_dir not in module_paths:
if os.path.exists(mod_dir):
module_paths.append(mod_dir)
#
# collect modules from our paths.
module_names = {}
for mod_dir in module_paths:
# print("mod_dir", mod_dir)
for mod, mod_full in bpy.path.module_names(mod_dir):
if mod in BLACKLIST:
continue
if mod in module_names:
mod_dir_prev, mod_full_prev = module_names[mod]
raise Exception("Module found twice %r.\n (%r -> %r, %r -> %r)" %
(mod, mod_dir, mod_full, mod_dir_prev, mod_full_prev))
modules.append(__import__(mod))
module_names[mod] = mod_dir, mod_full
del module_names
#
# now submodules
for m in modules:
filepath = m.__file__
if os.path.basename(filepath).startswith("__init__."):
mod_dir = os.path.dirname(filepath)
for submod, submod_full in bpy.path.module_names(mod_dir):
# fromlist is ignored, ugh.
mod_name_full = m.__name__ + "." + submod
__import__(mod_name_full)
mod_imp = sys.modules[mod_name_full]
# check we load what we ask for.
assert(os.path.samefile(mod_imp.__file__, submod_full))
modules.append(mod_imp)
#
# check which filepaths we didn't load
source_files = []
for mod_dir in module_paths:
source_files.extend(source_list(mod_dir, filename_check=lambda f: f.endswith(".py")))
source_files = list(set(source_files))
source_files.sort()
#
# remove loaded files
loaded_files = list({m.__file__ for m in modules})
loaded_files.sort()
for f in loaded_files:
source_files.remove(f)
#
# test we tested all files except for presets and templates
ignore_paths = [
os.sep + "presets" + os.sep,
os.sep + "templates" + os.sep,
] + [(os.sep + f + os.sep) for f in BLACKLIST]
for f in source_files:
ok = False
for ignore in ignore_paths:
if ignore in f:
ok = True
if not ok:
raise Exception("Source file %r not loaded in test" % f)
print("loaded %d modules" % len(loaded_files))
def main():
load_addons()
load_modules()
if __name__ == "__main__":
# So a python error exits(1)
try:
main()
except:
import traceback
traceback.print_exc()
sys.exit(1)

View File

@@ -0,0 +1,866 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Currently this script only generates images from different modifier
# combinations and does not validate they work correctly,
# this is because we don't get 1:1 match with bmesh.
#
# Later, we may have a way to check the results are valid.
# ./blender.bin --factory-startup --python tests/python/bl_mesh_modifiers.py
#
import math
USE_QUICK_RENDER = False
IS_BMESH = hasattr(__import__("bpy").types, "LoopColors")
# -----------------------------------------------------------------------------
# utility functions
def render_gl(context, filepath, shade):
def ctx_viewport_shade(context, shade):
for area in context.window.screen.areas:
if area.type == 'VIEW_3D':
space = area.spaces.active
# rv3d = space.region_3d
space.viewport_shade = shade
import bpy
scene = context.scene
render = scene.render
render.filepath = filepath
render.image_settings.file_format = 'PNG'
render.image_settings.color_mode = 'RGB'
render.use_file_extension = True
render.use_antialiasing = False
# render size
render.resolution_percentage = 100
render.resolution_x = 512
render.resolution_y = 512
ctx_viewport_shade(context, shade)
#~ # stop to inspect!
#~ if filepath == "test_cube_shell_solidify_subsurf_wp_wire":
#~ assert(0)
#~ else:
#~ return
bpy.ops.render.opengl(write_still=True,
view_context=True)
def render_gl_all_modes(context, obj, filepath=""):
assert(obj is not None)
assert(filepath != "")
scene = context.scene
# avoid drawing outline/center dot
bpy.ops.object.select_all(action='DESELECT')
scene.objects.active = None
# editmode
scene.tool_settings.mesh_select_mode = False, True, False
# render
render_gl(context, filepath + "_ob_solid", shade='SOLID')
if USE_QUICK_RENDER:
return
render_gl(context, filepath + "_ob_wire", shade='WIREFRAME')
render_gl(context, filepath + "_ob_textured", shade='TEXTURED')
# -------------------------------------------------------------------------
# not just draw modes, but object modes!
scene.objects.active = obj
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
bpy.ops.mesh.select_all(action='DESELECT')
render_gl(context, filepath + "_edit_wire", shade='WIREFRAME')
render_gl(context, filepath + "_edit_solid", shade='SOLID')
render_gl(context, filepath + "_edit_textured", shade='TEXTURED')
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
bpy.ops.object.mode_set(mode='WEIGHT_PAINT', toggle=False)
render_gl(context, filepath + "_wp_wire", shade='WIREFRAME')
assert(1)
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
scene.objects.active = None
def ctx_clear_scene(): # copied from batch_import.py
import bpy
unique_obs = set()
for scene in bpy.data.scenes:
for obj in scene.objects[:]:
scene.objects.unlink(obj)
unique_obs.add(obj)
# remove obdata, for now only worry about the startup scene
for bpy_data_iter in (bpy.data.objects,
bpy.data.meshes,
bpy.data.lamps,
bpy.data.cameras,
):
for id_data in bpy_data_iter:
bpy_data_iter.remove(id_data)
def ctx_viewport_camera(context):
# because gl render without view_context has no shading option.
for area in context.window.screen.areas:
if area.type == 'VIEW_3D':
space = area.spaces.active
space.region_3d.view_perspective = 'CAMERA'
def ctx_camera_setup(context,
location=(0.0, 0.0, 0.0),
lookat=(0.0, 0.0, 0.0),
# most likely the following vars can be left as defaults
up=(0.0, 0.0, 1.0),
lookat_axis='-Z',
up_axis='Y',
):
camera = bpy.data.cameras.new(whoami())
obj = bpy.data.objects.new(whoami(), camera)
scene = context.scene
scene.objects.link(obj)
scene.camera = obj
from mathutils import Vector, Matrix
# setup transform
view_vec = Vector(lookat) - Vector(location)
rot_mat = view_vec.to_track_quat(lookat_axis, up_axis).to_matrix().to_4x4()
tra_mat = Matrix.Translation(location)
obj.matrix_world = tra_mat * rot_mat
ctx_viewport_camera(context)
return obj
# -----------------------------------------------------------------------------
# inspect functions
import inspect
# functions
def whoami():
return inspect.stack()[1][3]
def whosdaddy():
return inspect.stack()[2][3]
# -----------------------------------------------------------------------------
# models (defaults)
def defaults_object(obj):
obj.show_wire = True
if obj.type == 'MESH':
mesh = obj.data
mesh.show_all_edges = True
mesh.show_normal_vertex = True
# lame!
if IS_BMESH:
for poly in mesh.polygons:
poly.use_smooth = True
else:
for face in mesh.faces:
face.use_smooth = True
def defaults_modifier(mod):
mod.show_in_editmode = True
mod.show_on_cage = True
# -----------------------------------------------------------------------------
# models (utils)
if IS_BMESH:
def mesh_bmesh_poly_elems(poly, elems):
vert_start = poly.loop_start
vert_total = poly.loop_total
return elems[vert_start:vert_start + vert_total]
def mesh_bmesh_poly_vertices(poly):
return [loop.vertex_index
for loop in mesh_bmesh_poly_elems(poly, poly.id_data.loops)]
def mesh_bounds(mesh):
xmin = ymin = zmin = +100000000.0
xmax = ymax = zmax = -100000000.0
for v in mesh.vertices:
x, y, z = v.co
xmax = max(x, xmax)
ymax = max(y, ymax)
zmax = max(z, zmax)
xmin = min(x, xmin)
ymin = min(y, ymin)
zmin = min(z, zmin)
return (xmin, ymin, zmin), (xmax, ymax, zmax)
def mesh_uv_add(obj):
uvs = ((0.0, 0.0),
(0.0, 1.0),
(1.0, 1.0),
(1.0, 0.0))
uv_lay = obj.data.uv_textures.new()
if IS_BMESH:
# XXX, odd that we need to do this. until UV's and texface
# are separated we will need to keep it
uv_loops = obj.data.uv_layers[-1]
uv_list = uv_loops.data[:]
for poly in obj.data.polygons:
poly_uvs = mesh_bmesh_poly_elems(poly, uv_list)
for i, c in enumerate(poly_uvs):
c.uv = uvs[i % 4]
else:
for uv in uv_lay.data:
uv.uv1 = uvs[0]
uv.uv2 = uvs[1]
uv.uv3 = uvs[2]
uv.uv4 = uvs[3]
return uv_lay
def mesh_vcol_add(obj, mode=0):
colors = ((0.0, 0.0, 0.0), # black
(1.0, 0.0, 0.0), # red
(0.0, 1.0, 0.0), # green
(0.0, 0.0, 1.0), # blue
(1.0, 1.0, 0.0), # yellow
(0.0, 1.0, 1.0), # cyan
(1.0, 0.0, 1.0), # magenta
(1.0, 1.0, 1.0), # white
)
def colors_get(i):
return colors[i % len(colors)]
vcol_lay = obj.data.vertex_colors.new()
mesh = obj.data
if IS_BMESH:
col_list = vcol_lay.data[:]
for poly in mesh.polygons:
face_verts = mesh_bmesh_poly_vertices(poly)
poly_cols = mesh_bmesh_poly_elems(poly, col_list)
for i, c in enumerate(poly_cols):
c.color = colors_get(face_verts[i])
else:
for i, col in enumerate(vcol_lay.data):
face_verts = mesh.faces[i].vertices
col.color1 = colors_get(face_verts[0])
col.color2 = colors_get(face_verts[1])
col.color3 = colors_get(face_verts[2])
if len(face_verts) == 4:
col.color4 = colors_get(face_verts[3])
return vcol_lay
def mesh_vgroup_add(obj, name="Group", axis=0, invert=False, mode=0):
mesh = obj.data
vgroup = obj.vertex_groups.new(name=name)
vgroup.add(list(range(len(mesh.vertices))), 1.0, 'REPLACE')
group_index = len(obj.vertex_groups) - 1
min_bb, max_bb = mesh_bounds(mesh)
range_axis = max_bb[axis] - min_bb[axis]
# gradient
for v in mesh.vertices:
for vg in v.groups:
if vg.group == group_index:
f = (v.co[axis] - min_bb[axis]) / range_axis
vg.weight = 1.0 - f if invert else f
return vgroup
def mesh_shape_add(obj, mode=0):
pass
def mesh_armature_add(obj, mode=0):
pass
# -----------------------------------------------------------------------------
# modifiers
def modifier_subsurf_add(scene, obj, levels=2):
mod = obj.modifiers.new(name=whoami(), type='SUBSURF')
defaults_modifier(mod)
mod.levels = levels
mod.render_levels = levels
return mod
def modifier_armature_add(scene, obj):
mod = obj.modifiers.new(name=whoami(), type='ARMATURE')
defaults_modifier(mod)
arm_data = bpy.data.armatures.new(whoami())
obj_arm = bpy.data.objects.new(whoami(), arm_data)
scene.objects.link(obj_arm)
obj_arm.select = True
scene.objects.active = obj_arm
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
# XXX, annoying, remove bone.
while arm_data.edit_bones:
obj_arm.edit_bones.remove(arm_data.edit_bones[-1])
bone_a = arm_data.edit_bones.new("Bone.A")
bone_b = arm_data.edit_bones.new("Bone.B")
bone_b.parent = bone_a
bone_a.head = -1, 0, 0
bone_a.tail = 0, 0, 0
bone_b.head = 0, 0, 0
bone_b.tail = 1, 0, 0
# Get armature animation data
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
# 45d armature
obj_arm.pose.bones["Bone.B"].rotation_quaternion = 1, -0.5, 0, 0
# set back to the original
scene.objects.active = obj
# display options
obj_arm.show_x_ray = True
arm_data.draw_type = 'STICK'
# apply to modifier
mod.object = obj_arm
mesh_vgroup_add(obj, name="Bone.A", axis=0, invert=True)
mesh_vgroup_add(obj, name="Bone.B", axis=0, invert=False)
return mod
def modifier_mirror_add(scene, obj):
mod = obj.modifiers.new(name=whoami(), type='MIRROR')
defaults_modifier(mod)
return mod
def modifier_solidify_add(scene, obj, thickness=0.25):
mod = obj.modifiers.new(name=whoami(), type='SOLIDIFY')
defaults_modifier(mod)
mod.thickness = thickness
return mod
def modifier_hook_add(scene, obj, use_vgroup=True):
scene.objects.active = obj
# no nice way to add hooks from py api yet
# assume object mode, hook first face!
mesh = obj.data
if use_vgroup:
for v in mesh.vertices:
v.select = True
else:
for v in mesh.vertices:
v.select = False
if IS_BMESH:
face_verts = mesh_bmesh_poly_vertices(mesh.polygons[0])
else:
face_verts = mesh.faces[0].vertices[:]
for i in mesh.faces[0].vertices:
mesh.vertices[i].select = True
bpy.ops.object.mode_set(mode='EDIT', toggle=False)
bpy.ops.object.hook_add_newob()
bpy.ops.object.mode_set(mode='OBJECT', toggle=False)
# mod = obj.modifiers.new(name=whoami(), type='HOOK')
mod = obj.modifiers[-1]
defaults_modifier(mod)
obj_hook = mod.object
obj_hook.rotation_euler = 0, math.radians(45), 0
obj_hook.show_x_ray = True
if use_vgroup:
mod.vertex_group = obj.vertex_groups[0].name
return mod
def modifier_decimate_add(scene, obj):
mod = obj.modifiers.new(name=whoami(), type='DECIMATE')
defaults_modifier(mod)
mod.ratio = 1 / 3
return mod
def modifier_build_add(scene, obj):
mod = obj.modifiers.new(name=whoami(), type='BUILD')
defaults_modifier(mod)
# ensure we display some faces
if IS_BMESH:
totface = len(obj.data.polygons)
else:
totface = len(obj.data.faces)
mod.frame_start = totface // 2
mod.frame_duration = totface
return mod
def modifier_mask_add(scene, obj):
mod = obj.modifiers.new(name=whoami(), type='MASK')
defaults_modifier(mod)
mod.vertex_group = obj.vertex_groups[0].name
return mod
# -----------------------------------------------------------------------------
# models
# useful since its solid boxy shape but simple enough to debug errors
cube_like_vertices = (
(1, 1, -1),
(1, -1, -1),
(-1, -1, -1),
(-1, 1, -1),
(1, 1, 1),
(1, -1, 1),
(-1, -1, 1),
(-1, 1, 1),
(0, -1, -1),
(1, 0, -1),
(0, 1, -1),
(-1, 0, -1),
(1, 0, 1),
(0, -1, 1),
(-1, 0, 1),
(0, 1, 1),
(1, -1, 0),
(1, 1, 0),
(-1, -1, 0),
(-1, 1, 0),
(0, 0, -1),
(0, 0, 1),
(1, 0, 0),
(0, -1, 0),
(-1, 0, 0),
(2, 0, 0),
(2, 0, -1),
(2, 1, 0),
(2, 1, -1),
(0, 1, 2),
(0, 0, 2),
(-1, 0, 2),
(-1, 1, 2),
(-1, 0, 3),
(-1, 1, 3),
(0, 1, 3),
(0, 0, 3),
)
cube_like_faces = (
(0, 9, 20, 10),
(0, 10, 17),
(0, 17, 27, 28),
(1, 16, 23, 8),
(2, 18, 24, 11),
(3, 19, 10),
(4, 15, 21, 12),
(4, 17, 15),
(7, 14, 31, 32),
(7, 15, 19),
(8, 23, 18, 2),
(9, 0, 28, 26),
(9, 1, 8, 20),
(9, 22, 16, 1),
(10, 20, 11, 3),
(11, 24, 19, 3),
(12, 21, 13, 5),
(13, 6, 18),
(14, 21, 30, 31),
(15, 7, 32, 29),
(15, 17, 10, 19),
(16, 5, 13, 23),
(17, 4, 12, 22),
(17, 22, 25, 27),
(18, 6, 14, 24),
(20, 8, 2, 11),
(21, 14, 6, 13),
(21, 15, 29, 30),
(22, 9, 26, 25),
(22, 12, 5, 16),
(23, 13, 18),
(24, 14, 7, 19),
(28, 27, 25, 26),
(29, 32, 34, 35),
(30, 29, 35, 36),
(31, 30, 36, 33),
(32, 31, 33, 34),
(35, 34, 33, 36),
)
# useful since its a shell for solidify and it can be mirrored
cube_shell_vertices = (
(0, 0, 1),
(0, 1, 1),
(-1, 1, 1),
(-1, 0, 1),
(0, 0, 0),
(0, 1, 0),
(-1, 1, 0),
(-1, 0, 0),
(-1, -1, 0),
(0, -1, 0),
(0, 0, -1),
(0, 1, -1),
)
cube_shell_face = (
(0, 1, 2, 3),
(0, 3, 8, 9),
(1, 5, 6, 2),
(2, 6, 7, 3),
(3, 7, 8),
(4, 7, 10),
(6, 5, 11),
(7, 4, 9, 8),
(10, 7, 6, 11),
)
def make_cube(scene):
bpy.ops.mesh.primitive_cube_add(view_align=False,
enter_editmode=False,
location=(0, 0, 0),
rotation=(0, 0, 0),
)
obj = scene.objects.active
defaults_object(obj)
return obj
def make_cube_extra(scene):
obj = make_cube(scene)
# extra data layers
mesh_uv_add(obj)
mesh_vcol_add(obj)
mesh_vgroup_add(obj)
return obj
def make_cube_like(scene):
mesh = bpy.data.meshes.new(whoami())
mesh.from_pydata(cube_like_vertices, (), cube_like_faces)
mesh.update() # add edges
obj = bpy.data.objects.new(whoami(), mesh)
scene.objects.link(obj)
defaults_object(obj)
return obj
def make_cube_like_extra(scene):
obj = make_cube_like(scene)
# extra data layers
mesh_uv_add(obj)
mesh_vcol_add(obj)
mesh_vgroup_add(obj)
return obj
def make_cube_shell(scene):
mesh = bpy.data.meshes.new(whoami())
mesh.from_pydata(cube_shell_vertices, (), cube_shell_face)
mesh.update() # add edges
obj = bpy.data.objects.new(whoami(), mesh)
scene.objects.link(obj)
defaults_object(obj)
return obj
def make_cube_shell_extra(scene):
obj = make_cube_shell(scene)
# extra data layers
mesh_uv_add(obj)
mesh_vcol_add(obj)
mesh_vgroup_add(obj)
return obj
def make_monkey(scene):
bpy.ops.mesh.primitive_monkey_add(view_align=False,
enter_editmode=False,
location=(0, 0, 0),
rotation=(0, 0, 0),
)
obj = scene.objects.active
defaults_object(obj)
return obj
def make_monkey_extra(scene):
obj = make_monkey(scene)
# extra data layers
mesh_uv_add(obj)
mesh_vcol_add(obj)
mesh_vgroup_add(obj)
return obj
# -----------------------------------------------------------------------------
# tests (utils)
global_tests = []
global_tests.append(("none",
(),
))
# single
global_tests.append(("subsurf_single",
((modifier_subsurf_add, dict(levels=2)), ),
))
global_tests.append(("armature_single",
((modifier_armature_add, dict()), ),
))
global_tests.append(("mirror_single",
((modifier_mirror_add, dict()), ),
))
global_tests.append(("hook_single",
((modifier_hook_add, dict()), ),
))
global_tests.append(("decimate_single",
((modifier_decimate_add, dict()), ),
))
global_tests.append(("build_single",
((modifier_build_add, dict()), ),
))
global_tests.append(("mask_single",
((modifier_mask_add, dict()), ),
))
# combinations
global_tests.append(("mirror_subsurf",
((modifier_mirror_add, dict()),
(modifier_subsurf_add, dict(levels=2))),
))
global_tests.append(("solidify_subsurf",
((modifier_solidify_add, dict()),
(modifier_subsurf_add, dict(levels=2))),
))
def apply_test(test, scene, obj,
render_func=None,
render_args=None,
render_kwargs=None,
):
test_name, test_funcs = test
for cb, kwargs in test_funcs:
cb(scene, obj, **kwargs)
render_kwargs_copy = render_kwargs.copy()
# add test name in filepath
render_kwargs_copy["filepath"] += "_%s" % test_name
render_func(*render_args, **render_kwargs_copy)
# -----------------------------------------------------------------------------
# tests themselves!
# having the 'test_' prefix automatically means these functions are called
# for testing
def test_cube(context, test):
scene = context.scene
obj = make_cube_extra(scene)
ctx_camera_setup(context, location=(3, 3, 3))
apply_test(test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami()))
def test_cube_like(context, test):
scene = context.scene
obj = make_cube_like_extra(scene)
ctx_camera_setup(context, location=(5, 5, 5))
apply_test(test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami()))
def test_cube_shell(context, test):
scene = context.scene
obj = make_cube_shell_extra(scene)
ctx_camera_setup(context, location=(4, 4, 4))
apply_test(test, scene, obj,
render_func=render_gl_all_modes,
render_args=(context, obj),
render_kwargs=dict(filepath=whoami()))
# -----------------------------------------------------------------------------
# call all tests
def main():
print("Calling main!")
#render_gl(bpy.context, "/testme")
#ctx_clear_scene()
context = bpy.context
ctx_clear_scene()
# run all tests
for key, val in sorted(globals().items()):
if key.startswith("test_") and hasattr(val, "__call__"):
print("calling:", key)
for t in global_tests:
val(context, test=t)
ctx_clear_scene()
# -----------------------------------------------------------------------------
# annoying workaround for theme initialization
if __name__ == "__main__":
import bpy
from bpy.app.handlers import persistent
@persistent
def load_handler(dummy):
print("Load Handler:", bpy.data.filepath)
if load_handler.first is False:
bpy.app.handlers.scene_update_post.remove(load_handler)
try:
main()
import sys
sys.exit(0)
except:
import traceback
traceback.print_exc()
import sys
# sys.exit(1) # comment to debug
else:
load_handler.first = False
load_handler.first = True
bpy.app.handlers.scene_update_post.append(load_handler)

View File

@@ -0,0 +1,161 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Simple script to check mash validate code.
# XXX Should be extended with many more "wrong cases"!
import bpy
import sys
import random
MESHES = {
"test1": (
(
( # Verts
(-1.0, -1.0, 0.0),
(-1.0, 0.0, 0.0),
(-1.0, 1.0, 0.0),
(0.0, -1.0, 0.0),
(0.0, 0.0, 0.0),
(0.0, 1.0, 0.0),
(1.0, -1.0, 0.0),
(1.0, 0.0, 0.0),
(1.5, 0.5, 0.0),
(1.0, 1.0, 0.0),
),
( # Edges
),
( # Loops
0, 1, 4, 3,
3, 4, 6,
1, 2, 5, 4,
3, 4, 6,
4, 7, 6,
4, 5, 9, 4, 8, 7,
),
( # Polygons
(0, 4),
(4, 3),
(7, 4),
(11, 3),
(14, 3),
(16, 6),
),
),
),
}
BUILTINS = (
"primitive_plane_add",
"primitive_cube_add",
"primitive_circle_add",
"primitive_uv_sphere_add",
"primitive_ico_sphere_add",
"primitive_cylinder_add",
"primitive_cone_add",
"primitive_grid_add",
"primitive_monkey_add",
"primitive_torus_add",
)
BUILTINS_NBR = 4
BUILTINS_NBRCHANGES = 5
def test_meshes():
for m in MESHES["test1"]:
bpy.ops.object.add(type="MESH")
data = bpy.context.active_object.data
# Vertices.
data.vertices.add(len(m[0]))
for idx, v in enumerate(m[0]):
data.vertices[idx].co = v
# Edges.
data.edges.add(len(m[1]))
for idx, e in enumerate(m[1]):
data.edges[idx].vertices = e
# Loops.
data.loops.add(len(m[2]))
for idx, v in enumerate(m[2]):
data.loops[idx].vertex_index = v
# Polygons.
data.polygons.add(len(m[3]))
for idx, l in enumerate(m[3]):
data.polygons[idx].loop_start = l[0]
data.polygons[idx].loop_total = l[1]
while data.validate(verbose=True):
pass
def test_builtins():
for x, func in enumerate(BUILTINS):
for y in range(BUILTINS_NBR):
getattr(bpy.ops.mesh, func)(location=(x * 2.5, y * 2.5, 0))
data = bpy.context.active_object.data
try:
for n in range(BUILTINS_NBRCHANGES):
rnd = random.randint(1, 3)
if rnd == 1:
# Make fun with some edge.
e = random.randrange(0, len(data.edges))
data.edges[e].vertices[random.randint(0, 1)] = \
random.randrange(0, len(data.vertices) * 2)
elif rnd == 2:
# Make fun with some loop.
l = random.randrange(0, len(data.loops))
if random.randint(0, 1):
data.loops[l].vertex_index = \
random.randrange(0, len(data.vertices) * 2)
else:
data.loops[l].edge_index = \
random.randrange(0, len(data.edges) * 2)
elif rnd == 3:
# Make fun with some polygons.
p = random.randrange(0, len(data.polygons))
if random.randint(0, 1):
data.polygons[p].loop_start = \
random.randrange(0, len(data.loops))
else:
data.polygons[p].loop_total = \
random.randrange(0, 10)
except:
pass
while data.validate(verbose=True):
pass
def main():
test_builtins()
test_meshes()
if __name__ == "__main__":
# So a python error exits(1)
try:
main()
except:
import traceback
traceback.print_exc()
sys.exit(1)

View File

@@ -0,0 +1,307 @@
# ./blender.bin --background -noaudio --python tests/python/bl_pyapi_mathutils.py
import unittest
from test import support
from mathutils import Matrix, Vector
from mathutils import kdtree
import math
# keep globals immutable
vector_data = (
(1.0, 0.0, 0.0),
(0.0, 1.0, 0.0),
(0.0, 0.0, 1.0),
(1.0, 1.0, 1.0),
(0.33783, 0.715698, -0.611206),
(-0.944031, -0.326599, -0.045624),
(-0.101074, -0.416443, -0.903503),
(0.799286, 0.49411, -0.341949),
(-0.854645, 0.518036, 0.033936),
(0.42514, -0.437866, -0.792114),
(-0.358948, 0.597046, 0.717377),
(-0.985413,0.144714, 0.089294),
)
# get data at different scales
vector_data = sum(
(tuple(tuple(a * scale for a in v) for v in vector_data)
for scale in (s * sign for s in (0.0001, 0.1, 1.0, 10.0, 1000.0, 100000.0)
for sign in (1.0, -1.0))), ()) + ((0.0, 0.0, 0.0),)
class MatrixTesting(unittest.TestCase):
def test_matrix_column_access(self):
#mat =
#[ 1 2 3 4 ]
#[ 1 2 3 4 ]
#[ 1 2 3 4 ]
mat = Matrix(((1, 11, 111),
(2, 22, 222),
(3, 33, 333),
(4, 44, 444)))
self.assertEqual(mat[0], Vector((1, 11, 111)))
self.assertEqual(mat[1], Vector((2, 22, 222)))
self.assertEqual(mat[2], Vector((3, 33, 333)))
self.assertEqual(mat[3], Vector((4, 44, 444)))
def test_item_access(self):
args = ((1, 4, 0, -1),
(2, -1, 2, -2),
(0, 3, 8, 3),
(-2, 9, 1, 0))
mat = Matrix(args)
for row in range(4):
for col in range(4):
self.assertEqual(mat[row][col], args[row][col])
self.assertEqual(mat[0][2], 0)
self.assertEqual(mat[3][1], 9)
self.assertEqual(mat[2][3], 3)
self.assertEqual(mat[0][0], 1)
self.assertEqual(mat[3][3], 0)
def test_item_assignment(self):
mat = Matrix() - Matrix()
indices = (0, 0), (1, 3), (2, 0), (3, 2), (3, 1)
checked_indices = []
for row, col in indices:
mat[row][col] = 1
for row in range(4):
for col in range(4):
if mat[row][col]:
checked_indices.append((row, col))
for item in checked_indices:
self.assertIn(item, indices)
def test_matrix_to_3x3(self):
#mat =
#[ 1 2 3 4 ]
#[ 2 4 6 8 ]
#[ 3 6 9 12 ]
#[ 4 8 12 16 ]
mat = Matrix(tuple((i, 2 * i, 3 * i, 4 * i) for i in range(1, 5)))
mat_correct = Matrix(((1, 2, 3), (2, 4, 6), (3, 6, 9)))
self.assertEqual(mat.to_3x3(), mat_correct)
def test_matrix_to_translation(self):
mat = Matrix()
mat[0][3] = 1
mat[1][3] = 2
mat[2][3] = 3
self.assertEqual(mat.to_translation(), Vector((1, 2, 3)))
def test_matrix_translation(self):
mat = Matrix()
mat.translation = Vector((1, 2, 3))
self.assertEqual(mat[0][3], 1)
self.assertEqual(mat[1][3], 2)
self.assertEqual(mat[2][3], 3)
def test_non_square_mult(self):
mat1 = Matrix(((1, 2, 3),
(4, 5, 6)))
mat2 = Matrix(((1, 2),
(3, 4),
(5, 6)))
prod_mat1 = Matrix(((22, 28),
(49, 64)))
prod_mat2 = Matrix(((9, 12, 15),
(19, 26, 33),
(29, 40, 51)))
self.assertEqual(mat1 * mat2, prod_mat1)
self.assertEqual(mat2 * mat1, prod_mat2)
def test_mat4x4_vec3D_mult(self):
mat = Matrix(((1, 0, 2, 0),
(0, 6, 0, 0),
(0, 0, 1, 1),
(0, 0, 0, 1)))
vec = Vector((1, 2, 3))
prod_mat_vec = Vector((7, 12, 4))
prod_vec_mat = Vector((1, 12, 5))
self.assertEqual(mat * vec, prod_mat_vec)
self.assertEqual(vec * mat, prod_vec_mat)
def test_mat_vec_mult(self):
mat1 = Matrix()
vec = Vector((1, 2))
self.assertRaises(ValueError, mat1.__mul__, vec)
self.assertRaises(ValueError, vec.__mul__, mat1)
mat2 = Matrix(((1, 2),
(-2, 3)))
prod = Vector((5, 4))
self.assertEqual(mat2 * vec, prod)
def test_matrix_inverse(self):
mat = Matrix(((1, 4, 0, -1),
(2, -1, 2, -2),
(0, 3, 8, 3),
(-2, 9, 1, 0)))
inv_mat = (1 / 285) * Matrix(((195, -57, 27, -102),
(50, -19, 4, 6),
(-60, 57, 18, 27),
(110, -133, 43, -78)))
self.assertEqual(mat.inverted(), inv_mat)
def test_matrix_mult(self):
mat = Matrix(((1, 4, 0, -1),
(2, -1, 2, -2),
(0, 3, 8, 3),
(-2, 9, 1, 0)))
prod_mat = Matrix(((11, -9, 7, -9),
(4, -3, 12, 6),
(0, 48, 73, 18),
(16, -14, 26, -13)))
self.assertEqual(mat * mat, prod_mat)
class VectorTesting(unittest.TestCase):
def test_orthogonal(self):
angle_90d = math.pi / 2.0
for v in vector_data:
v = Vector(v)
if v.length_squared != 0.0:
self.assertAlmostEqual(v.angle(v.orthogonal()), angle_90d)
class KDTreeTesting(unittest.TestCase):
@staticmethod
def kdtree_create_grid_3d(tot):
k = kdtree.KDTree(tot * tot * tot)
index = 0
mul = 1.0 / (tot - 1)
for x in range(tot):
for y in range(tot):
for z in range(tot):
k.insert((x * mul, y * mul, z * mul), index)
index += 1
k.balance()
return k
def test_kdtree_single(self):
co = (0,) * 3
index = 2
k = kdtree.KDTree(1)
k.insert(co, index)
k.balance()
co_found, index_found, dist_found = k.find(co)
self.assertEqual(tuple(co_found), co)
self.assertEqual(index_found, index)
self.assertEqual(dist_found, 0.0)
def test_kdtree_empty(self):
co = (0,) * 3
k = kdtree.KDTree(0)
k.balance()
co_found, index_found, dist_found = k.find(co)
self.assertIsNone(co_found)
self.assertIsNone(index_found)
self.assertIsNone(dist_found)
def test_kdtree_line(self):
tot = 10
k = kdtree.KDTree(tot)
for i in range(tot):
k.insert((i,) * 3, i)
k.balance()
co_found, index_found, dist_found = k.find((-1,) * 3)
self.assertEqual(tuple(co_found), (0,) * 3)
co_found, index_found, dist_found = k.find((tot,) * 3)
self.assertEqual(tuple(co_found), (tot - 1,) * 3)
def test_kdtree_grid(self):
size = 10
k = self.kdtree_create_grid_3d(size)
# find_range
ret = k.find_range((0.5,) * 3, 2.0)
self.assertEqual(len(ret), size * size * size)
ret = k.find_range((1.0,) * 3, 1.0 / size)
self.assertEqual(len(ret), 1)
ret = k.find_range((1.0,) * 3, 2.0 / size)
self.assertEqual(len(ret), 8)
ret = k.find_range((10,) * 3, 0.5)
self.assertEqual(len(ret), 0)
# find_n
tot = 0
ret = k.find_n((1.0,) * 3, tot)
self.assertEqual(len(ret), tot)
tot = 10
ret = k.find_n((1.0,) * 3, tot)
self.assertEqual(len(ret), tot)
self.assertEqual(ret[0][2], 0.0)
tot = size * size * size
ret = k.find_n((1.0,) * 3, tot)
self.assertEqual(len(ret), tot)
def test_kdtree_invalid_size(self):
with self.assertRaises(ValueError):
kdtree.KDTree(-1)
def test_kdtree_invalid_balance(self):
co = (0,) * 3
index = 2
k = kdtree.KDTree(2)
k.insert(co, index)
k.balance()
k.insert(co, index)
with self.assertRaises(RuntimeError):
k.find(co)
def test_main():
try:
support.run_unittest(MatrixTesting)
support.run_unittest(VectorTesting)
support.run_unittest(KDTreeTesting)
except:
import traceback
traceback.print_exc()
# alert CTest we failed
import sys
sys.exit(1)
if __name__ == '__main__':
test_main()

View File

@@ -0,0 +1,80 @@
# ./blender.bin --background -noaudio --python tests/python/bl_pyapi_units.py
import unittest
from test import support
from bpy.utils import units
class UnitsTesting(unittest.TestCase):
# From user typing to 'internal' Blender value.
INPUT_TESTS = (
# system, type, ref, input, value
##### LENGTH
('IMPERIAL', 'LENGTH', "", "1ft", 0.3048),
('IMPERIAL', 'LENGTH', "", "(1+1)ft", 0.3048 * 2),
('IMPERIAL', 'LENGTH', "", "1mi4\"", 1609.344 + 0.0254 * 4),
('METRIC', 'LENGTH', "", "0.005µm", 0.000001 * 0.005),
('METRIC', 'LENGTH', "", "1e6km", 1000.0 * 1e6),
('IMPERIAL', 'LENGTH', "", "1ft5cm", 0.3048 + 0.01 * 5),
('METRIC', 'LENGTH', "", "1ft5cm", 0.3048 + 0.01 * 5),
# Using reference string to find a unit when none is given.
('IMPERIAL', 'LENGTH', "33.3ft", "1", 0.3048),
('METRIC', 'LENGTH', "33.3dm", "1", 0.1),
('IMPERIAL', 'LENGTH', "33.3cm", "1", 0.3048), # ref unit is not in IMPERIAL system, default to feet...
('IMPERIAL', 'LENGTH', "33.3ft", "1\"", 0.0254), # unused ref unit, since one is given already!
#('IMPERIAL', 'LENGTH', "", "1+1ft", 0.3048 * 2), # Will fail with current code!
)
# From 'internal' Blender value to user-friendly printing
OUTPUT_TESTS = (
# system, type, prec, sep, compat, value, output
##### LENGTH
('IMPERIAL', 'LENGTH', 3, False, False, 0.3048, "1'"),
('IMPERIAL', 'LENGTH', 3, False, True, 0.3048, "1ft"),
('IMPERIAL', 'LENGTH', 3, True, False, 0.3048 * 2 + 0.0254 * 5.5, "2' 5.5\""),
# Those next two fail, here again because precision ignores order magnitude :/
#('IMPERIAL', 'LENGTH', 3, False, False, 1609.344 * 1e6, "1000000mi"), # == 1000000.004mi!!!
#('IMPERIAL', 'LENGTH', 6, False, False, 1609.344 * 1e6, "1000000mi"), # == 1000000.003641mi!!!
('METRIC', 'LENGTH', 3, True, False, 1000 * 2 + 0.001 * 15, "2km 1.5cm"),
('METRIC', 'LENGTH', 3, True, False, 1234.56789, "1km 234.6m"),
# Note: precision seems basically unused when using multi units!
('METRIC', 'LENGTH', 9, True, False, 1234.56789, "1km 234.6m"),
('METRIC', 'LENGTH', 9, False, False, 1234.56789, "1.23456789km"),
('METRIC', 'LENGTH', 9, True, False, 1000.000123456789, "1km 0.1mm"),
)
def test_units_inputs(self):
# Stolen from FBX addon!
def similar_values(v1, v2, e):
if v1 == v2:
return True
return ((abs(v1 - v2) / max(abs(v1), abs(v2))) <= e)
for usys, utype, ref, inpt, val in self.INPUT_TESTS:
opt_val = units.to_value(usys, utype, inpt, ref)
# Note: almostequal is not good here, precision is fixed on decimal digits, not variable with
# magnitude of numbers (i.e. 1609.4416 ~= 1609.4456 fails even at 5 of 'places'...).
self.assertTrue(similar_values(opt_val, val, 1e-7),
msg="%s, %s: \"%s\" (ref: \"%s\") => %f, expected %f"
"" % (usys, utype, inpt, ref, opt_val, val))
def test_units_outputs(self):
for usys, utype, prec, sep, compat, val, output in self.OUTPUT_TESTS:
opt_str = units.to_string(usys, utype, val, prec, sep, compat)
self.assertEqual(opt_str, output,
msg="%s, %s: %f (precision: %d, separate units: %d, compat units: %d) => "
"\"%s\", expected \"%s\"" % (usys, utype, val, prec, sep, compat, opt_str, output))
def test_main():
try:
support.run_unittest(UnitsTesting)
except:
import traceback
traceback.print_exc()
# alert CTest we failed
import sys
sys.exit(1)
if __name__ == '__main__':
test_main()

View File

@@ -0,0 +1,144 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Use for validating our wiki interlinking.
# ./blender.bin --background -noaudio --python tests/python/bl_rna_wiki_reference.py
#
# 1) test_data() -- ensure the data we have is correct format
# 2) test_lookup_coverage() -- ensure that we have lookups for _every_ RNA path
# 3) test_urls() -- ensure all the URL's are correct
# 4) test_language_coverage() -- ensure language lookup table is complete
#
import bpy
def test_data():
import rna_wiki_reference
assert(isinstance(rna_wiki_reference.url_manual_mapping, tuple))
for i, value in enumerate(rna_wiki_reference.url_manual_mapping):
try:
assert(len(value) == 2)
assert(isinstance(value[0], str))
assert(isinstance(value[1], str))
except:
print("Expected a tuple of 2 strings, instead item %d is a %s: %r" % (i, type(value), value))
import traceback
traceback.print_exc()
raise
# a stripped down version of api_dump() in rna_info_dump.py
def test_lookup_coverage():
def rna_ids():
import rna_info
struct = rna_info.BuildRNAInfo()[0]
for struct_id, v in sorted(struct.items()):
props = [(prop.identifier, prop) for prop in v.properties]
struct_path = "bpy.types.%s" % struct_id[1]
for prop_id, prop in props:
yield (struct_path, "%s.%s" % (struct_path, prop_id))
for submod_id in dir(bpy.ops):
op_path = "bpy.ops.%s" % submod_id
for op_id in dir(getattr(bpy.ops, submod_id)):
yield (op_path, "%s.%s" % (op_path, op_id))
# check coverage
from bl_operators import wm
set_group_all = set()
set_group_doc = set()
for rna_group, rna_id in rna_ids():
url = wm.WM_OT_doc_view_manual._lookup_rna_url(rna_id, verbose=False)
print(rna_id, "->", url)
set_group_all.add(rna_group)
if url is not None:
set_group_doc.add(rna_group)
# finally report undocumented groups
print("")
print("---------------------")
print("Undocumented Sections")
for rna_group in sorted(set_group_all):
if rna_group not in set_group_doc:
print("%s.*" % rna_group)
def test_language_coverage():
pass # TODO
def test_urls():
import sys
import rna_wiki_reference
import urllib.error
from urllib.request import urlopen
prefix = rna_wiki_reference.url_manual_prefix
urls = {suffix for (rna_id, suffix) in rna_wiki_reference.url_manual_mapping}
urls_len = "%d" % len(urls)
print("")
print("-------------" + "-" * len(urls_len))
print("Testing URLS %s" % urls_len)
print("")
color_red = '\033[0;31m'
color_green = '\033[1;32m'
color_normal = '\033[0m'
urls_fail = []
for url in sorted(urls):
url_full = prefix + url
print(" %s ... " % url_full, end="")
sys.stdout.flush()
try:
urlopen(url_full)
print(color_green + "OK" + color_normal)
except urllib.error.HTTPError:
print(color_red + "FAIL!" + color_normal)
urls_fail.append(url)
if urls_fail:
urls_len = "%d" % len(urls_fail)
print("")
print("------------" + "-" * len(urls_len))
print("Failed URLS %s" % urls_len)
print("")
for url in urls_fail:
print(" %s%s%s" % (color_red, url, color_normal))
def main():
test_data()
test_lookup_coverage()
test_language_coverage()
test_urls()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,159 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# run this script in the game engine.
# or on the command line with...
# ./blender.bin --background -noaudio --python tests/python/bl_rst_completeness.py
# Paste this into the bge and run on an always actuator.
'''
filepath = "/src/blender/tests/python/bl_rst_completeness.py"
exec(compile(open(filepath).read(), filepath, 'exec'))
'''
import os
THIS_DIR = os.path.dirname(__file__)
RST_DIR = os.path.normpath(os.path.join(THIS_DIR, "..", "..", "doc", "python_api", "rst"))
import sys
sys.path.append(THIS_DIR)
import rst_to_doctree_mini
try:
import bge
except:
bge = None
# (file, module)
modules = (
("bge.constraints.rst", "bge.constraints", False),
("bge.events.rst", "bge.events", False),
("bge.logic.rst", "bge.logic", False),
("bge.render.rst", "bge.render", False),
("bge.texture.rst", "bge.texture", False),
("bge.types.rst", "bge.types", False),
("bgl.rst", "bgl", True),
("gpu.rst", "gpu", False),
)
def is_directive_pydata(filepath, directive):
if directive.type in {"function", "method", "class", "attribute", "data"}:
return True
elif directive.type in {"module", "note", "warning", "code-block", "hlist", "seealso"}:
return False
elif directive.type in {"literalinclude"}: # TODO
return False
else:
print(directive_to_str(filepath, directive), end=" ")
print("unknown directive type %r" % directive.type)
return False
def directive_to_str(filepath, directive):
return "%s:%d:%d:" % (filepath, directive.line + 1, directive.indent)
def directive_members_dict(filepath, directive_members):
return {directive.value_strip: directive for directive in directive_members
if is_directive_pydata(filepath, directive)}
def module_validate(filepath, mod, mod_name, doctree, partial_ok):
# RST member missing from MODULE ???
for directive in doctree:
# print(directive.type)
if is_directive_pydata(filepath, directive):
attr = directive.value_strip
has_attr = hasattr(mod, attr)
ok = False
if not has_attr:
# so we can have glNormal docs cover glNormal3f
if partial_ok:
for s in dir(mod):
if s.startswith(attr):
ok = True
break
if not ok:
print(directive_to_str(filepath, directive), end=" ")
print("rst contains non existing member %r" % attr)
# if its a class, scan down the class...
# print(directive.type)
if has_attr:
if directive.type == "class":
cls = getattr(mod, attr)
# print("directive: ", directive)
for directive_child in directive.members:
# print("directive_child: ", directive_child)
if is_directive_pydata(filepath, directive_child):
attr_child = directive_child.value_strip
if attr_child not in cls.__dict__:
attr_id = "%s.%s" % (attr, attr_child)
print(directive_to_str(filepath, directive_child), end=" ")
print("rst contains non existing class member %r" % attr_id)
# MODULE member missing from RST ???
doctree_dict = directive_members_dict(filepath, doctree)
for attr in dir(mod):
if attr.startswith("_"):
continue
directive = doctree_dict.get(attr)
if directive is None:
print("module contains undocumented member %r from %r" % ("%s.%s" % (mod_name, attr), filepath))
else:
if directive.type == "class":
directive_dict = directive_members_dict(filepath, directive.members)
cls = getattr(mod, attr)
for attr_child in cls.__dict__.keys():
if attr_child.startswith("_"):
continue
if attr_child not in directive_dict:
attr_id = "%s.%s.%s" % (mod_name, attr, attr_child), filepath
print("module contains undocumented member %r from %r" % attr_id)
def main():
if bge is None:
print("Skipping BGE modules!")
for filename, modname, partial_ok in modules:
if bge is None and modname.startswith("bge"):
continue
filepath = os.path.join(RST_DIR, filename)
if not os.path.exists(filepath):
raise Exception("%r not found" % filepath)
doctree = rst_to_doctree_mini.parse_rst_py(filepath)
__import__(modname)
mod = sys.modules[modname]
module_validate(filepath, mod, modname, doctree, partial_ok)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,490 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# semi-useful script, runs all operators in a number of different
# contexts, cheap way to find misc small bugs but is in no way a complete test.
#
# only error checked for here is a segfault.
import bpy
import sys
USE_ATTRSET = False
USE_FILES = "" # "/mango/"
USE_RANDOM = False
USE_RANDOM_SCREEN = False
RANDOM_SEED = [1] # so we can redo crashes
RANDOM_RESET = 0.1 # 10% chance of resetting on each new operator
RANDOM_MULTIPLY = 10
op_blacklist = (
"script.reload",
"export*.*",
"import*.*",
"*.save_*",
"*.read_*",
"*.open_*",
"*.link_append",
"render.render",
"render.play_rendered_anim",
"sound.bake_animation", # OK but slow
"sound.mixdown", # OK but slow
"object.bake_image", # OK but slow
"object.paths_calculate", # OK but slow
"object.paths_update", # OK but slow
"ptcache.bake_all", # OK but slow
"nla.bake", # OK but slow
"*.*_export",
"*.*_import",
"ed.undo",
"ed.undo_push",
"script.autoexec_warn_clear",
"screen.delete", # already used for random screens
"wm.blenderplayer_start",
"wm.recover_auto_save",
"wm.quit_blender",
"wm.url_open",
"wm.doc_view",
"wm.doc_edit",
"wm.doc_view_manual",
"wm.path_open",
"wm.theme_install",
"wm.context_*",
"wm.properties_add",
"wm.properties_remove",
"wm.properties_edit",
"wm.properties_context_change",
"wm.operator_cheat_sheet",
"wm.interface_theme_*",
"wm.appconfig_*", # just annoying - but harmless
"wm.keyitem_add", # just annoying - but harmless
"wm.keyconfig_activate", # just annoying - but harmless
"wm.keyconfig_preset_add", # just annoying - but harmless
"wm.keyconfig_test", # just annoying - but harmless
"wm.memory_statistics", # another annoying one
"wm.dependency_relations", # another annoying one
"wm.keymap_restore", # another annoying one
"wm.addon_*", # harmless, but dont change state
"console.*", # just annoying - but harmless
)
def blend_list(mainpath):
import os
from os.path import join, splitext
def file_list(path, filename_check=None):
for dirpath, dirnames, filenames in os.walk(path):
# skip '.svn'
if dirpath.startswith("."):
continue
for filename in filenames:
filepath = join(dirpath, filename)
if filename_check is None or filename_check(filepath):
yield filepath
def is_blend(filename):
ext = splitext(filename)[1]
return (ext in {".blend", })
return list(sorted(file_list(mainpath, is_blend)))
if USE_FILES:
USE_FILES_LS = blend_list(USE_FILES)
# print(USE_FILES_LS)
def filter_op_list(operators):
from fnmatch import fnmatchcase
def is_op_ok(op):
for op_match in op_blacklist:
if fnmatchcase(op, op_match):
print(" skipping: %s (%s)" % (op, op_match))
return False
return True
operators[:] = [op for op in operators if is_op_ok(op[0])]
def reset_blend():
bpy.ops.wm.read_factory_settings()
for scene in bpy.data.scenes:
# reduce range so any bake action doesnt take too long
scene.frame_start = 1
scene.frame_end = 5
if USE_RANDOM_SCREEN:
import random
for i in range(random.randint(0, len(bpy.data.screens))):
bpy.ops.screen.delete()
print("Scree IS", bpy.context.screen)
def reset_file():
import random
f = USE_FILES_LS[random.randint(0, len(USE_FILES_LS) - 1)]
bpy.ops.wm.open_mainfile(filepath=f)
if USE_ATTRSET:
def build_property_typemap(skip_classes):
property_typemap = {}
for attr in dir(bpy.types):
cls = getattr(bpy.types, attr)
if issubclass(cls, skip_classes):
continue
## to support skip-save we cant get all props
# properties = cls.bl_rna.properties.keys()
properties = []
for prop_id, prop in cls.bl_rna.properties.items():
if not prop.is_skip_save:
properties.append(prop_id)
properties.remove("rna_type")
property_typemap[attr] = properties
return property_typemap
CLS_BLACKLIST = (
bpy.types.BrushTextureSlot,
bpy.types.Brush,
)
property_typemap = build_property_typemap(CLS_BLACKLIST)
bpy_struct_type = bpy.types.Struct.__base__
def id_walk(value, parent):
value_type = type(value)
value_type_name = value_type.__name__
value_id = getattr(value, "id_data", Ellipsis)
value_props = property_typemap.get(value_type_name, ())
for prop in value_props:
subvalue = getattr(value, prop)
if subvalue == parent:
continue
# grr, recursive!
if prop == "point_caches":
continue
subvalue_type = type(subvalue)
yield value, prop, subvalue_type
subvalue_id = getattr(subvalue, "id_data", Ellipsis)
if value_id == subvalue_id:
if subvalue_type == float:
pass
elif subvalue_type == int:
pass
elif subvalue_type == bool:
pass
elif subvalue_type == str:
pass
elif hasattr(subvalue, "__len__"):
for sub_item in subvalue[:]:
if isinstance(sub_item, bpy_struct_type):
subitem_id = getattr(sub_item, "id_data", Ellipsis)
if subitem_id == subvalue_id:
yield from id_walk(sub_item, value)
if subvalue_type.__name__ in property_typemap:
yield from id_walk(subvalue, value)
# main function
_random_values = (
None, object, type,
1, 0.1, -1, # float("nan"),
"", "test", b"", b"test",
(), [], {},
(10,), (10, 20), (0, 0, 0),
{0: "", 1: "hello", 2: "test"}, {"": 0, "hello": 1, "test": 2},
set(), {"", "test", "."}, {None, ..., type},
range(10), (" " * i for i in range(10)),
)
def attrset_data():
for attr in dir(bpy.data):
if attr == "window_managers":
continue
seq = getattr(bpy.data, attr)
if seq.__class__.__name__ == 'bpy_prop_collection':
for id_data in seq:
for val, prop, tp in id_walk(id_data, bpy.data):
# print(id_data)
for val_rnd in _random_values:
try:
setattr(val, prop, val_rnd)
except:
pass
def run_ops(operators, setup_func=None, reset=True):
print("\ncontext:", setup_func.__name__)
# first invoke
for op_id, op in operators:
if op.poll():
print(" operator:", op_id)
sys.stdout.flush() # in case of crash
# disable will get blender in a bad state and crash easy!
if reset:
reset_test = True
if USE_RANDOM:
import random
if random.random() < (1.0 - RANDOM_RESET):
reset_test = False
if reset_test:
if USE_FILES:
reset_file()
else:
reset_blend()
del reset_test
if USE_RANDOM:
# we can't be sure it will work
try:
setup_func()
except:
pass
else:
setup_func()
for mode in {'EXEC_DEFAULT', 'INVOKE_DEFAULT'}:
try:
op(mode)
except:
#import traceback
#traceback.print_exc()
pass
if USE_ATTRSET:
attrset_data()
if not operators:
# run test
if reset:
reset_blend()
if USE_RANDOM:
# we can't be sure it will work
try:
setup_func()
except:
pass
else:
setup_func()
# contexts
def ctx_clear_scene(): # copied from batch_import.py
unique_obs = set()
for scene in bpy.data.scenes:
for obj in scene.objects[:]:
scene.objects.unlink(obj)
unique_obs.add(obj)
# remove obdata, for now only worry about the startup scene
for bpy_data_iter in (bpy.data.objects, bpy.data.meshes, bpy.data.lamps, bpy.data.cameras):
for id_data in bpy_data_iter:
bpy_data_iter.remove(id_data)
def ctx_editmode_mesh():
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_mesh_extra():
bpy.ops.object.vertex_group_add()
bpy.ops.object.shape_key_add(from_mix=False)
bpy.ops.object.shape_key_add(from_mix=True)
bpy.ops.mesh.uv_texture_add()
bpy.ops.mesh.vertex_color_add()
bpy.ops.object.material_slot_add()
# editmode last!
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_mesh_empty():
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.mesh.delete()
def ctx_editmode_curves():
bpy.ops.curve.primitive_nurbs_circle_add()
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_curves_empty():
bpy.ops.curve.primitive_nurbs_circle_add()
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.curve.select_all(action='SELECT')
bpy.ops.curve.delete(type='VERT')
def ctx_editmode_surface():
bpy.ops.surface.primitive_nurbs_surface_torus_add()
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_mball():
bpy.ops.object.metaball_add()
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_text():
bpy.ops.object.text_add()
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_armature():
bpy.ops.object.armature_add()
bpy.ops.object.mode_set(mode='EDIT')
def ctx_editmode_armature_empty():
bpy.ops.object.armature_add()
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.armature.select_all(action='SELECT')
bpy.ops.armature.delete()
def ctx_editmode_lattice():
bpy.ops.object.add(type='LATTICE')
bpy.ops.object.mode_set(mode='EDIT')
# bpy.ops.object.vertex_group_add()
def ctx_object_empty():
bpy.ops.object.add(type='EMPTY')
def ctx_object_pose():
bpy.ops.object.armature_add()
bpy.ops.object.mode_set(mode='POSE')
bpy.ops.pose.select_all(action='SELECT')
def ctx_object_paint_weight():
bpy.ops.object.mode_set(mode='WEIGHT_PAINT')
def ctx_object_paint_vertex():
bpy.ops.object.mode_set(mode='VERTEX_PAINT')
def ctx_object_paint_sculpt():
bpy.ops.object.mode_set(mode='SCULPT')
def ctx_object_paint_texture():
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
def bpy_check_type_duplicates():
# non essential sanity check
bl_types = dir(bpy.types)
bl_types_unique = set(bl_types)
if len(bl_types) != len(bl_types_unique):
print("Error, found duplicates in 'bpy.types'")
for t in sorted(bl_types_unique):
tot = bl_types.count(t)
if tot > 1:
print(" '%s', %d" % (t, tot))
import sys
sys.exit(1)
def main():
bpy_check_type_duplicates()
# reset_blend()
import bpy
operators = []
for mod_name in dir(bpy.ops):
mod = getattr(bpy.ops, mod_name)
for submod_name in dir(mod):
op = getattr(mod, submod_name)
operators.append(("%s.%s" % (mod_name, submod_name), op))
operators.sort(key=lambda op: op[0])
filter_op_list(operators)
# for testing, mix the list up.
#operators.reverse()
if USE_RANDOM:
import random
random.seed(RANDOM_SEED[0])
operators = operators * RANDOM_MULTIPLY
random.shuffle(operators)
# 2 passes, first just run setup_func to make sure they are ok
for operators_test in ((), operators):
# Run the operator tests in different contexts
run_ops(operators_test, setup_func=lambda: None)
if USE_FILES:
continue
run_ops(operators_test, setup_func=ctx_clear_scene)
# object modes
run_ops(operators_test, setup_func=ctx_object_empty)
run_ops(operators_test, setup_func=ctx_object_pose)
run_ops(operators_test, setup_func=ctx_object_paint_weight)
run_ops(operators_test, setup_func=ctx_object_paint_vertex)
run_ops(operators_test, setup_func=ctx_object_paint_sculpt)
run_ops(operators_test, setup_func=ctx_object_paint_texture)
# mesh
run_ops(operators_test, setup_func=ctx_editmode_mesh)
run_ops(operators_test, setup_func=ctx_editmode_mesh_extra)
run_ops(operators_test, setup_func=ctx_editmode_mesh_empty)
# armature
run_ops(operators_test, setup_func=ctx_editmode_armature)
run_ops(operators_test, setup_func=ctx_editmode_armature_empty)
# curves
run_ops(operators_test, setup_func=ctx_editmode_curves)
run_ops(operators_test, setup_func=ctx_editmode_curves_empty)
run_ops(operators_test, setup_func=ctx_editmode_surface)
# other
run_ops(operators_test, setup_func=ctx_editmode_mball)
run_ops(operators_test, setup_func=ctx_editmode_text)
run_ops(operators_test, setup_func=ctx_editmode_lattice)
if not operators_test:
print("All setup functions run fine!")
print("Finished %r" % __file__)
if __name__ == "__main__":
#~ for i in range(200):
#~ RANDOM_SEED[0] += 1
#~ main()
main()

197
tests/python/bl_test.py Normal file
View File

@@ -0,0 +1,197 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import sys
import os
# may split this out into a new file
def replace_bpy_app_version():
""" So MD5's are predictable from output which uses blenders versions.
"""
import bpy
app = bpy.app
app_fake = type(bpy)("bpy.app")
for attr in dir(app):
if not attr.startswith("_"):
setattr(app_fake, attr, getattr(app, attr))
app_fake.version = 0, 0, 0
app_fake.version_string = "0.00 (sub 0)"
bpy.app = app_fake
def clear_startup_blend():
import bpy
for scene in bpy.data.scenes:
for obj in scene.objects:
scene.objects.unlink(obj)
def blend_to_md5():
import bpy
scene = bpy.context.scene
ROUND = 4
def matrix2str(matrix):
return "".join([str(round(axis, ROUND)) for vector in matrix for axis in vector]).encode('ASCII')
def coords2str(seq, attr):
return "".join([str(round(axis, ROUND)) for vertex in seq for axis in getattr(vertex, attr)]).encode('ASCII')
import hashlib
md5 = hashlib.new("md5")
md5_update = md5.update
for obj in scene.objects:
md5_update(matrix2str(obj.matrix_world))
data = obj.data
if type(data) == bpy.types.Mesh:
md5_update(coords2str(data.vertices, "co"))
elif type(data) == bpy.types.Curve:
for spline in data.splines:
md5_update(coords2str(spline.bezier_points, "co"))
md5_update(coords2str(spline.points, "co"))
return md5.hexdigest()
def main():
argv = sys.argv
print(" args:", " ".join(argv))
argv = argv[argv.index("--") + 1:]
def arg_extract(arg, optional=True, array=False):
arg += "="
if array:
value = []
else:
value = None
i = 0
while i < len(argv):
if argv[i].startswith(arg):
item = argv[i][len(arg):]
del argv[i]
i -= 1
if array:
value.append(item)
else:
value = item
break
i += 1
if (not value) and (not optional):
print(" '%s' not set" % arg)
sys.exit(1)
return value
run = arg_extract("--run", optional=False)
md5 = arg_extract("--md5", optional=False)
md5_method = arg_extract("--md5_method", optional=False) # 'SCENE' / 'FILE'
# only when md5_method is 'FILE'
md5_source = arg_extract("--md5_source", optional=True, array=True)
# save blend file, for testing
write_blend = arg_extract("--write-blend", optional=True)
# ensure files are written anew
for f in md5_source:
if os.path.exists(f):
os.remove(f)
import bpy
replace_bpy_app_version()
if not bpy.data.filepath:
clear_startup_blend()
print(" Running: '%s'" % run)
print(" MD5: '%s'!" % md5)
try:
result = eval(run)
except:
import traceback
traceback.print_exc()
sys.exit(1)
if write_blend is not None:
print(" Writing Blend: %s" % write_blend)
bpy.ops.wm.save_mainfile('EXEC_DEFAULT', filepath=write_blend)
print(" Result: '%s'" % str(result))
if not result:
print(" Running: %s -> False" % run)
sys.exit(1)
if md5_method == 'SCENE':
md5_new = blend_to_md5()
elif md5_method == 'FILE':
if not md5_source:
print(" Missing --md5_source argument")
sys.exit(1)
for f in md5_source:
if not os.path.exists(f):
print(" Missing --md5_source=%r argument does not point to a file")
sys.exit(1)
import hashlib
md5_instance = hashlib.new("md5")
md5_update = md5_instance.update
for f in md5_source:
filehandle = open(f, "rb")
md5_update(filehandle.read())
filehandle.close()
md5_new = md5_instance.hexdigest()
else:
print(" Invalid --md5_method=%s argument is not a valid source")
sys.exit(1)
if md5 != md5_new:
print(" Running: %s\n MD5 Recieved: %s\n MD5 Expected: %s" % (run, md5_new, md5))
sys.exit(1)
print(" Success: %s" % run)
if __name__ == "__main__":
# So a python error exits(1)
try:
main()
except:
import traceback
traceback.print_exc()
sys.exit(1)

154
tests/python/pep8.py Normal file
View File

@@ -0,0 +1,154 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
import os
# depends on pep8, frosted, pylint
# for Ubuntu
#
# sudo apt-get install pylint
#
# sudo apt-get install python-setuptools python-pip
# sudo pip install pep8
# sudo pip install frosted
#
# in Debian install pylint pep8 with apt-get/aptitude/etc
#
# on *nix run
# python tests/pep8.py > test_pep8.log 2>&1
# how many lines to read into the file, pep8 comment
# should be directly after the license header, ~20 in most cases
PEP8_SEEK_COMMENT = 40
SKIP_PREFIX = "./tools", "./config", "./scons", "./extern"
SKIP_ADDONS = True
FORCE_PEP8_ALL = False
def file_list_py(path):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
if filename.endswith((".py", ".cfg")):
yield os.path.join(dirpath, filename)
def is_pep8(path):
print(path)
if open(path, 'rb').read(3) == b'\xef\xbb\xbf':
print("\nfile contains BOM, remove first 3 bytes: %r\n" % path)
# templates don't have a header but should be pep8
for d in ("presets", "templates_py", "examples"):
if ("%s%s%s" % (os.sep, d, os.sep)) in path:
return 1
f = open(path, 'r', encoding="utf8")
for i in range(PEP8_SEEK_COMMENT):
line = f.readline()
if line.startswith("# <pep8"):
if line.startswith("# <pep8 compliant>"):
return 1
elif line.startswith("# <pep8-80 compliant>"):
return 2
f.close()
return 0
def main():
files = []
files_skip = []
for f in file_list_py("."):
if [None for prefix in SKIP_PREFIX if f.startswith(prefix)]:
continue
if SKIP_ADDONS:
if (os.sep + "addons") in f:
continue
pep8_type = FORCE_PEP8_ALL or is_pep8(f)
if pep8_type:
# so we can batch them for each tool.
files.append((os.path.abspath(f), pep8_type))
else:
files_skip.append(f)
print("\nSkipping...")
for f in files_skip:
print(" %s" % f)
# strict imports
print("\n\n\n# running pep8...")
import re
import_check = re.compile(r"\s*from\s+[A-z\.]+\s+import \*\s*")
for f, pep8_type in files:
for i, l in enumerate(open(f, 'r', encoding='utf8')):
if import_check.match(l):
print("%s:%d:0: global import bad practice" % (f, i + 1))
print("\n\n\n# running pep8...")
# these are very picky and often hard to follow
# while keeping common script formatting.
ignore = "E122", "E123", "E124", "E125", "E126", "E127", "E128"
for f, pep8_type in files:
if pep8_type == 1:
# E501:80 line length
ignore_tmp = ignore + ("E501", )
else:
ignore_tmp = ignore
os.system("pep8 --repeat --ignore=%s '%s'" % (",".join(ignore_tmp), f))
# frosted
print("\n\n\n# running frosted...")
for f, pep8_type in files:
os.system("frosted '%s'" % f)
print("\n\n\n# running pylint...")
for f, pep8_type in files:
# let pep8 complain about line length
os.system("pylint "
"--disable="
"C0111," # missing doc string
"C0103," # invalid name
"W0613," # unused argument, may add this back
# but happens a lot for 'context' for eg.
"W0232," # class has no __init__, Operator/Panel/Menu etc
"W0142," # Used * or ** magic
# even needed in some cases
"R0902," # Too many instance attributes
"R0903," # Too many statements
"R0911," # Too many return statements
"R0912," # Too many branches
"R0913," # Too many arguments
"R0914," # Too many local variables
"R0915," # Too many statements
" "
"--include-ids=y "
"--output-format=parseable "
"--reports=n "
"--max-line-length=1000"
" '%s'" % f)
if __name__ == "__main__":
main()

297
tests/python/rna_array.py Normal file
View File

@@ -0,0 +1,297 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
import unittest
import random
test= bpy.data.test
# farr - 1-dimensional array of float
# fdarr - dynamic 1-dimensional array of float
# fmarr - 3-dimensional ([3][4][5]) array of float
# fdmarr - dynamic 3-dimensional (ditto size) array of float
# same as above for other types except that the first letter is "i" for int and "b" for bool
class TestArray(unittest.TestCase):
# test that assignment works by: assign -> test value
# - rvalue = list of float
# - rvalue = list of numbers
# test.object
# bpy.data.test.farr[3], iarr[3], barr[...], fmarr, imarr, bmarr
def setUp(self):
test.farr= (1.0, 2.0, 3.0)
test.iarr= (7, 8, 9)
test.barr= (False, True, False)
# test access
# test slice access, negative indices
def test_access(self):
rvals= ([1.0, 2.0, 3.0], [7, 8, 9], [False, True, False])
for arr, rval in zip((test.farr, test.iarr, test.barr), rvals):
self.assertEqual(prop_to_list(arr), rval)
self.assertEqual(arr[0:3], rval)
self.assertEqual(arr[1:2], rval[1:2])
self.assertEqual(arr[-1], arr[2])
self.assertEqual(arr[-2], arr[1])
self.assertEqual(arr[-3], arr[0])
# fail when index out of bounds
def test_access_fail(self):
for arr in (test.farr, test.iarr, test.barr):
self.assertRaises(IndexError, lambda : arr[4])
# test assignment of a whole array
def test_assign_array(self):
# should accept int as float
test.farr= (1, 2, 3)
# fail when: unexpected no. of items, invalid item type
def test_assign_array_fail(self):
def assign_empty_list(arr):
setattr(test, arr, ())
for arr in ("farr", "iarr", "barr"):
self.assertRaises(ValueError, assign_empty_list, arr)
def assign_invalid_float():
test.farr= (1.0, 2.0, "3.0")
def assign_invalid_int():
test.iarr= ("1", 2, 3)
def assign_invalid_bool():
test.barr= (True, 0.123, False)
for func in [assign_invalid_float, assign_invalid_int, assign_invalid_bool]:
self.assertRaises(TypeError, func)
# shouldn't accept float as int
def assign_float_as_int():
test.iarr= (1, 2, 3.0)
self.assertRaises(TypeError, assign_float_as_int)
# non-dynamic arrays cannot change size
def assign_different_size(arr, val):
setattr(test, arr, val)
for arr, val in zip(("iarr", "farr", "barr"), ((1, 2), (1.0, 2.0), (True, False))):
self.assertRaises(ValueError, assign_different_size, arr, val)
# test assignment of specific items
def test_assign_item(self):
for arr, rand_func in zip((test.farr, test.iarr, test.barr), (rand_float, rand_int, rand_bool)):
for i in range(len(arr)):
val= rand_func()
arr[i] = val
self.assertEqual(arr[i], val)
# float prop should accept also int
for i in range(len(test.farr)):
val= rand_int()
test.farr[i] = val
self.assertEqual(test.farr[i], float(val))
#
def test_assign_item_fail(self):
def assign_bad_index(arr):
arr[4] = 1.0
def assign_bad_type(arr):
arr[1] = "123"
for arr in [test.farr, test.iarr, test.barr]:
self.assertRaises(IndexError, assign_bad_index, arr)
# not testing bool because bool allows not only (True|False)
for arr in [test.farr, test.iarr]:
self.assertRaises(TypeError, assign_bad_type, arr)
def test_dynamic_assign_array(self):
# test various lengths here
for arr, rand_func in zip(("fdarr", "idarr", "bdarr"), (rand_float, rand_int, rand_bool)):
for length in range(1, 64):
rval= make_random_array(length, rand_func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
def test_dynamic_assign_array_fail(self):
# could also test too big length here
def assign_empty_list(arr):
setattr(test, arr, ())
for arr in ("fdarr", "idarr", "bdarr"):
self.assertRaises(ValueError, assign_empty_list, arr)
class TestMArray(unittest.TestCase):
def setUp(self):
# reset dynamic array sizes
for arr, func in zip(("fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool)):
setattr(test, arr, make_random_3d_array((3, 4, 5), func))
# test assignment
def test_assign_array(self):
for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
# assignment of [3][4][5]
rval= make_random_3d_array((3, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# test assignment of [2][4][5], [1][4][5] should work on dynamic arrays
def test_assign_array_fail(self):
def assign_empty_array():
test.fmarr= ()
self.assertRaises(ValueError, assign_empty_array)
def assign_invalid_size(arr, rval):
setattr(test, arr, rval)
# assignment of 3,4,4 or 3,3,5 should raise ex
for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
rval= make_random_3d_array((3, 4, 4), func)
self.assertRaises(ValueError, assign_invalid_size, arr, rval)
rval= make_random_3d_array((3, 3, 5), func)
self.assertRaises(ValueError, assign_invalid_size, arr, rval)
rval= make_random_3d_array((3, 3, 3), func)
self.assertRaises(ValueError, assign_invalid_size, arr, rval)
def test_assign_item(self):
# arr[i] = x
for arr, func in zip(("fmarr", "imarr", "bmarr", "fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool) * 2):
rval= make_random_2d_array((4, 5), func)
for i in range(3):
getattr(test, arr)[i] = rval
self.assertEqual(prop_to_list(getattr(test, arr)[i]), rval)
# arr[i][j] = x
for arr, func in zip(("fmarr", "imarr", "bmarr", "fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool) * 2):
arr= getattr(test, arr)
rval= make_random_array(5, func)
for i in range(3):
for j in range(4):
arr[i][j] = rval
self.assertEqual(prop_to_list(arr[i][j]), rval)
def test_assign_item_fail(self):
def assign_wrong_size(arr, i, rval):
getattr(test, arr)[i] = rval
# assign wrong size at level 2
for arr, func in zip(("fmarr", "imarr", "bmarr"), (rand_float, rand_int, rand_bool)):
rval1= make_random_2d_array((3, 5), func)
rval2= make_random_2d_array((4, 3), func)
for i in range(3):
self.assertRaises(ValueError, assign_wrong_size, arr, i, rval1)
self.assertRaises(ValueError, assign_wrong_size, arr, i, rval2)
def test_dynamic_assign_array(self):
for arr, func in zip(("fdmarr", "idmarr", "bdmarr"), (rand_float, rand_int, rand_bool)):
# assignment of [3][4][5]
rval= make_random_3d_array((3, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# [2][4][5]
rval= make_random_3d_array((2, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# [1][4][5]
rval= make_random_3d_array((1, 4, 5), func)
setattr(test, arr, rval)
self.assertEqual(prop_to_list(getattr(test, arr)), rval)
# test access
def test_access(self):
pass
# test slice access, negative indices
def test_access_fail(self):
pass
random.seed()
def rand_int():
return random.randint(-1000, 1000)
def rand_float():
return float(rand_int())
def rand_bool():
return bool(random.randint(0, 1))
def make_random_array(len, rand_func):
arr= []
for i in range(len):
arr.append(rand_func())
return arr
def make_random_2d_array(dimsize, rand_func):
marr= []
for i in range(dimsize[0]):
marr.append([])
for j in range(dimsize[1]):
marr[-1].append(rand_func())
return marr
def make_random_3d_array(dimsize, rand_func):
marr= []
for i in range(dimsize[0]):
marr.append([])
for j in range(dimsize[1]):
marr[-1].append([])
for k in range(dimsize[2]):
marr[-1][-1].append(rand_func())
return marr
def prop_to_list(prop):
ret= []
for x in prop:
if type(x) not in (bool, int, float):
ret.append(prop_to_list(x))
else:
ret.append(x)
return ret
def suite():
return unittest.TestSuite([unittest.TestLoader().loadTestsFromTestCase(TestArray), unittest.TestLoader().loadTestsFromTestCase(TestMArray)])
if __name__ == "__main__":
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@@ -0,0 +1,131 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Used for generating API diffs between releases
# ./blender.bin --background -noaudio --python tests/python/rna_info_dump.py
import bpy
def api_dump(use_properties=True, use_functions=True):
def prop_type(prop):
if prop.type == "pointer":
return prop.fixed_type.identifier
else:
return prop.type
def func_to_str(struct_id_str, func_id, func):
args = []
for prop in func.args:
data_str = "%s %s" % (prop_type(prop), prop.identifier)
if prop.array_length:
data_str += "[%d]" % prop.array_length
if not prop.is_required:
data_str += "=%s" % prop.default_str
args.append(data_str)
data_str = "%s.%s(%s)" % (struct_id_str, func_id, ", ".join(args))
if func.return_values:
return_args = ", ".join(prop_type(arg) for arg in func.return_values)
if len(func.return_values) > 1:
data_str += " --> (%s)" % return_args
else:
data_str += " --> %s" % return_args
return data_str
def prop_to_str(struct_id_str, prop_id, prop):
prop_str = " <-- %s" % prop_type(prop)
if prop.array_length:
prop_str += "[%d]" % prop.array_length
data_str = "%s.%s %s" % (struct_id_str, prop_id, prop_str)
return data_str
def struct_full_id(v):
struct_id_str = v.identifier # "".join(sid for sid in struct_id if struct_id)
for base in v.get_bases():
struct_id_str = base.identifier + "|" + struct_id_str
return struct_id_str
def dump_funcs():
data = []
for struct_id, v in sorted(struct.items()):
struct_id_str = struct_full_id(v)
funcs = [(func.identifier, func) for func in v.functions]
for func_id, func in funcs:
data.append(func_to_str(struct_id_str, func_id, func))
for prop in v.properties:
if prop.collection_type:
funcs = [(prop.identifier + "." + func.identifier, func) for func in prop.collection_type.functions]
for func_id, func in funcs:
data.append(func_to_str(struct_id_str, func_id, func))
data.sort()
data.append("# * functions *")
return data
def dump_props():
data = []
for struct_id, v in sorted(struct.items()):
struct_id_str = struct_full_id(v)
props = [(prop.identifier, prop) for prop in v.properties]
for prop_id, prop in props:
data.append(prop_to_str(struct_id_str, prop_id, prop))
for prop in v.properties:
if prop.collection_type:
props = [(prop.identifier + "." + prop_sub.identifier, prop_sub) for prop_sub in prop.collection_type.properties]
for prop_sub_id, prop_sub in props:
data.append(prop_to_str(struct_id_str, prop_sub_id, prop_sub))
data.sort()
data.insert(0, "# * properties *")
return data
import rna_info
struct = rna_info.BuildRNAInfo()[0]
data = []
if use_functions:
data.extend(dump_funcs())
if use_properties:
data.extend(dump_props())
if bpy.app.background:
import sys
sys.stderr.write("\n".join(data))
sys.stderr.write("\n\nEOF\n")
else:
text = bpy.data.texts.new(name="api.py")
text.from_string(data)
print("END")
if __name__ == "__main__":
api_dump()

View File

@@ -0,0 +1,90 @@
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# Module with function to extract a doctree from an reStructuredText file.
# Named 'Mini' because we only parse the minimum data needed to check
# Python classes, methods and attributes match up to those in existing modules.
# (To test for documentation completeness)
# note: literalinclude's are not followed.
# could be nice to add but not really needed either right now.
import collections
Directive = collections.namedtuple('Directive',
("type",
"value",
"value_strip",
"line",
"indent",
"members"))
def parse_rst_py(filepath):
import re
# Get the prefix assuming the line is lstrip()'d
# ..foo:: bar
# -->
# ("foo", "bar")
re_prefix = re.compile(r"^\.\.\s([a-zA-Z09\-]+)::\s*(.*)\s*$")
tree = collections.defaultdict(list)
indent_map = {}
indent_prev = 0
f = open(filepath, encoding="utf-8")
for i, line in enumerate(f):
line_strip = line.lstrip()
# ^\.\.\s[a-zA-Z09\-]+::.*$
#if line.startswith(".. "):
march = re_prefix.match(line_strip)
if march:
directive, value = march.group(1, 2)
indent = len(line) - len(line_strip)
value_strip = value.replace("(", " ").split()
value_strip = value_strip[0] if value_strip else ""
item = Directive(type=directive,
value=value,
value_strip=value_strip,
line=i,
indent=indent,
members=[])
tree[indent].append(item)
if indent_prev < indent:
indent_map[indent] = indent_prev
if indent > 0:
tree[indent_map[indent]][-1].members.append(item)
indent_prev = indent
f.close()
return tree[0]
if __name__ == "__main__":
# not intended use, but may as well print rst files passed as a test.
import sys
for arg in sys.argv:
if arg.lower().endswith((".txt", ".rst")):
items = parse_rst_py(arg)
for i in items:
print(i)