Patch set by Folkert Vries (flokkievids) with respect to T37565-patch-v1.zip (F32402).
Applied with minor modifications.
This commit is contained in:
@@ -16,6 +16,10 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""
|
||||
Top-level module containing all Freestyle stylization constructs
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import Operators
|
||||
from . import chainingiterators, functions, predicates, shaders, types, utils
|
||||
|
@@ -16,13 +16,20 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""
|
||||
Chaining iterators used for the chaining operation to construct long
|
||||
strokes by concatenating feature edges according to selected chaining
|
||||
rules. Also intended to be a collection of examples for defining
|
||||
chaining iterators in Python
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import (
|
||||
ChainPredicateIterator,
|
||||
ChainSilhouetteIterator,
|
||||
)
|
||||
|
||||
# modules for implementing chaining iterators
|
||||
# constructs for predicate definition in Python
|
||||
from freestyle.types import (
|
||||
AdjacencyIterator,
|
||||
ChainingIterator,
|
||||
@@ -33,14 +40,18 @@ from freestyle.predicates import (
|
||||
ExternalContourUP1D,
|
||||
)
|
||||
from freestyle.utils import ContextFunctions as CF
|
||||
|
||||
import bpy
|
||||
|
||||
## the natural chaining iterator
|
||||
## It follows the edges of same nature following the topology of
|
||||
## objects with preseance on silhouettes, then borders,
|
||||
## then suggestive contours, then everything else. It doesn't chain the same ViewEdge twice
|
||||
## You can specify whether to stay in the selection or not.
|
||||
|
||||
class pyChainSilhouetteIterator(ChainingIterator):
|
||||
"""Natural chaining iterator
|
||||
|
||||
Follows the edges of the same nature following the topology of
|
||||
objects, with decreasing priority for silhouettes, then borders,
|
||||
then suggestive contours, then all other edge types. A ViewEdge
|
||||
is only chained once.
|
||||
"""
|
||||
def __init__(self, stayInSelection=True):
|
||||
ChainingIterator.__init__(self, stayInSelection, True, None, True)
|
||||
def init(self):
|
||||
@@ -83,14 +94,20 @@ class pyChainSilhouetteIterator(ChainingIterator):
|
||||
break
|
||||
return winner
|
||||
|
||||
## the natural chaining iterator
|
||||
## It follows the edges of same nature on the same
|
||||
## objects with preseance on silhouettes, then borders,
|
||||
## then suggestive contours, then everything else. It doesn't chain the same ViewEdge twice
|
||||
## You can specify whether to stay in the selection or not.
|
||||
## You can specify whether to chain iterate over edges that were
|
||||
## already visited or not.
|
||||
|
||||
class pyChainSilhouetteGenericIterator(ChainingIterator):
|
||||
"""Natural chaining iterator
|
||||
|
||||
Follows the edges of the same nature following the topology of
|
||||
objects, with decreasing priority for silhouettes, then borders,
|
||||
then suggestive contours, then all other edge types.
|
||||
|
||||
:arg stayInSelection: True if it is allowed to go out of the selection
|
||||
:type stayInSelection: bool
|
||||
:arg stayInUnvisited: May the same ViewEdge be chained twice
|
||||
:type stayInUnvisited: bool
|
||||
"""
|
||||
|
||||
def __init__(self, stayInSelection=True, stayInUnvisited=True):
|
||||
ChainingIterator.__init__(self, stayInSelection, stayInUnvisited, None, True)
|
||||
def init(self):
|
||||
@@ -137,7 +154,10 @@ class pyChainSilhouetteGenericIterator(ChainingIterator):
|
||||
break
|
||||
return winner
|
||||
|
||||
|
||||
class pyExternalContourChainingIterator(ChainingIterator):
|
||||
"""Chains by external contour"""
|
||||
|
||||
def __init__(self):
|
||||
ChainingIterator.__init__(self, False, True, None, True)
|
||||
self._isExternalContour = ExternalContourUP1D()
|
||||
@@ -153,10 +173,11 @@ class pyExternalContourChainingIterator(ChainingIterator):
|
||||
while not it.is_end:
|
||||
ave = it.object
|
||||
if self._isExternalContour(ave):
|
||||
return 1
|
||||
return True
|
||||
it.increment()
|
||||
print("pyExternlContourChainingIterator : didn't find next edge")
|
||||
return 0
|
||||
if bpy.app.debug_freestyle:
|
||||
print("pyExternalContourChainingIterator : didn't find next edge")
|
||||
return False
|
||||
def traverse(self, iter):
|
||||
winner = None
|
||||
it = AdjacencyIterator(iter)
|
||||
@@ -181,9 +202,18 @@ class pyExternalContourChainingIterator(ChainingIterator):
|
||||
it.increment()
|
||||
return winner
|
||||
|
||||
## the natural chaining iterator
|
||||
## with a sketchy multiple touch
|
||||
|
||||
class pySketchyChainSilhouetteIterator(ChainingIterator):
|
||||
"""Natural chaining iterator with a sketchy multiple touch
|
||||
|
||||
Chains the same ViewEdge multiple times to achieve a sketchy effect.
|
||||
|
||||
:arg rounds: Number of times every Viewedge is chained.
|
||||
:type rounds: int
|
||||
:arg stayInSelection: if False, edges outside of the selection can be chained.
|
||||
:type stayInSelection: bool
|
||||
"""
|
||||
|
||||
def __init__(self, nRounds=3,stayInSelection=True):
|
||||
ChainingIterator.__init__(self, stayInSelection, False, None, True)
|
||||
self._timeStamp = CF.get_time_stamp()+nRounds
|
||||
@@ -237,10 +267,12 @@ class pySketchyChainSilhouetteIterator(ChainingIterator):
|
||||
return winner
|
||||
|
||||
|
||||
# Chaining iterator designed for sketchy style.
|
||||
# can chain several times the same ViewEdge
|
||||
# in order to produce multiple strokes per ViewEdge.
|
||||
class pySketchyChainingIterator(ChainingIterator):
|
||||
"""Chaining iterator designed for sketchy style
|
||||
|
||||
It chaines the same ViewEdge several times in order to produce
|
||||
multiple strokes per ViewEdge.
|
||||
"""
|
||||
def __init__(self, nRounds=3, stayInSelection=True):
|
||||
ChainingIterator.__init__(self, stayInSelection, False, None, True)
|
||||
self._timeStamp = CF.get_time_stamp()+nRounds
|
||||
@@ -272,23 +304,27 @@ class pySketchyChainingIterator(ChainingIterator):
|
||||
return winner
|
||||
|
||||
|
||||
## Chaining iterator that fills small occlusions
|
||||
## percent
|
||||
## The max length of the occluded part
|
||||
## expressed in % of the total chain length
|
||||
class pyFillOcclusionsRelativeChainingIterator(ChainingIterator):
|
||||
"""Chaining iterator that fills small occlusions
|
||||
|
||||
:arg percent: The maximul length of the occluded part, expressed
|
||||
in a percentage of the total chain length.
|
||||
:type percent: float
|
||||
"""
|
||||
|
||||
def __init__(self, percent):
|
||||
ChainingIterator.__init__(self, False, True, None, True)
|
||||
self._length = 0
|
||||
self._percent = float(percent)
|
||||
def init(self):
|
||||
# each time we're evaluating a chain length
|
||||
# we try to do it once. Thus we reinit
|
||||
# the chain length here:
|
||||
# A chain's length should preferably be evaluated only once.
|
||||
# Therefore, the chain length is reset here.
|
||||
self._length = 0
|
||||
def traverse(self, iter):
|
||||
winner = None
|
||||
|
||||
winnerOrientation = False
|
||||
winnerOrientation = 0
|
||||
#print(self.current_edge.id.first, self.current_edge.id.second)
|
||||
it = AdjacencyIterator(iter)
|
||||
tvertex = self.next_vertex
|
||||
@@ -298,10 +334,7 @@ class pyFillOcclusionsRelativeChainingIterator(ChainingIterator):
|
||||
ve = it.object
|
||||
if ve.id == mateVE.id:
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
break
|
||||
it.increment()
|
||||
else:
|
||||
@@ -315,10 +348,7 @@ class pyFillOcclusionsRelativeChainingIterator(ChainingIterator):
|
||||
if (ve.nature & nat) != 0:
|
||||
count = count+1
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
it.increment()
|
||||
if count != 1:
|
||||
winner = None
|
||||
@@ -359,6 +389,7 @@ class pyFillOcclusionsRelativeChainingIterator(ChainingIterator):
|
||||
# let's do the comparison:
|
||||
# nw let's compute the length of this connex non selected part:
|
||||
connexl = 0
|
||||
|
||||
_cit = pyChainSilhouetteGenericIterator(False, False)
|
||||
_cit.begin = winner
|
||||
_cit.current_edge = winner
|
||||
@@ -373,11 +404,13 @@ class pyFillOcclusionsRelativeChainingIterator(ChainingIterator):
|
||||
winner = None
|
||||
return winner
|
||||
|
||||
## Chaining iterator that fills small occlusions
|
||||
## size
|
||||
## The max length of the occluded part
|
||||
## expressed in pixels
|
||||
|
||||
class pyFillOcclusionsAbsoluteChainingIterator(ChainingIterator):
|
||||
"""Chaining iterator that fills small occlusions
|
||||
|
||||
:arg size: The maximum length of the occluded part in pixels.
|
||||
:type size: int
|
||||
"""
|
||||
def __init__(self, length):
|
||||
ChainingIterator.__init__(self, False, True, None, True)
|
||||
self._length = float(length)
|
||||
@@ -395,10 +428,7 @@ class pyFillOcclusionsAbsoluteChainingIterator(ChainingIterator):
|
||||
ve = it.object
|
||||
if ve.id == mateVE.id:
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
break
|
||||
it.increment()
|
||||
else:
|
||||
@@ -412,10 +442,7 @@ class pyFillOcclusionsAbsoluteChainingIterator(ChainingIterator):
|
||||
if (ve.nature & nat) != 0:
|
||||
count = count+1
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
it.increment()
|
||||
if count != 1:
|
||||
winner = None
|
||||
@@ -441,11 +468,14 @@ class pyFillOcclusionsAbsoluteChainingIterator(ChainingIterator):
|
||||
return winner
|
||||
|
||||
|
||||
## Chaining iterator that fills small occlusions
|
||||
## percent
|
||||
## The max length of the occluded part
|
||||
## expressed in % of the total chain length
|
||||
class pyFillOcclusionsAbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
"""Chaining iterator that fills small occlusions regardless of the
|
||||
selection
|
||||
|
||||
:arg percent: The maximul length of the occluded part as a
|
||||
percentage of the total chain length.
|
||||
:type percent: float
|
||||
"""
|
||||
def __init__(self, percent, l):
|
||||
ChainingIterator.__init__(self, False, True, None, True)
|
||||
self._length = 0
|
||||
@@ -468,10 +498,7 @@ class pyFillOcclusionsAbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
ve = it.object
|
||||
if ve.id == mateVE.id:
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
break
|
||||
it.increment()
|
||||
else:
|
||||
@@ -485,10 +512,7 @@ class pyFillOcclusionsAbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
if (ve.nature & nat) != 0:
|
||||
count = count+1
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
it.increment()
|
||||
if count != 1:
|
||||
winner = None
|
||||
@@ -543,25 +567,28 @@ class pyFillOcclusionsAbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
winner = None
|
||||
return winner
|
||||
|
||||
## Chaining iterator that fills small occlusions without caring about the
|
||||
## actual selection
|
||||
## percent
|
||||
## The max length of the occluded part
|
||||
## expressed in % of the total chain length
|
||||
|
||||
class pyFillQi0AbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
"""Chaining iterator that fills small occlusions regardless of the
|
||||
selection
|
||||
|
||||
:arg percent: The maximul length of the occluded part as a
|
||||
percentage of the total chain length.
|
||||
:type percent: float
|
||||
"""
|
||||
def __init__(self, percent, l):
|
||||
ChainingIterator.__init__(self, False, True, None, True)
|
||||
self._length = 0
|
||||
self._absLength = l
|
||||
self._percent = float(percent)
|
||||
def init(self):
|
||||
# each time we're evaluating a chain length
|
||||
# we try to do it once. Thus we reinit
|
||||
# the chain length here:
|
||||
# A chain's length should preverably be evaluated only once.
|
||||
# Therefore, the chain length is reset here.
|
||||
self._length = 0
|
||||
def traverse(self, iter):
|
||||
winner = None
|
||||
winnerOrientation = False
|
||||
|
||||
#print(self.current_edge.id.first, self.current_edge.id.second)
|
||||
it = AdjacencyIterator(iter)
|
||||
tvertex = self.next_vertex
|
||||
@@ -571,10 +598,7 @@ class pyFillQi0AbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
ve = it.object
|
||||
if ve.id == mateVE.id:
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
break
|
||||
it.increment()
|
||||
else:
|
||||
@@ -588,10 +612,7 @@ class pyFillQi0AbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
if (ve.nature & nat) != 0:
|
||||
count = count+1
|
||||
winner = ve
|
||||
if not it.is_incoming:
|
||||
winnerOrientation = True
|
||||
else:
|
||||
winnerOrientation = False
|
||||
winnerOrientation = not it.is_incoming
|
||||
it.increment()
|
||||
if count != 1:
|
||||
winner = None
|
||||
@@ -647,12 +668,18 @@ class pyFillQi0AbsoluteAndRelativeChainingIterator(ChainingIterator):
|
||||
return winner
|
||||
|
||||
|
||||
## the natural chaining iterator
|
||||
## It follows the edges of same nature on the same
|
||||
## objects with preseance on silhouettes, then borders,
|
||||
## then suggestive contours, then everything else. It doesn't chain the same ViewEdge twice
|
||||
## You can specify whether to stay in the selection or not.
|
||||
class pyNoIdChainSilhouetteIterator(ChainingIterator):
|
||||
"""Natural chaining iterator
|
||||
|
||||
Follows the edges of the same nature following the topology of
|
||||
objects, with decreasing priority for silhouettes, then borders,
|
||||
then suggestive contours, then all other edge types. It won't
|
||||
chain the same ViewEdge twice.
|
||||
|
||||
:arg stayInSelection: True if it is allowed to go out of the selection
|
||||
:type stayInSelection: bool
|
||||
"""
|
||||
|
||||
def __init__(self, stayInSelection=True):
|
||||
ChainingIterator.__init__(self, stayInSelection, True, None, True)
|
||||
def init(self):
|
||||
|
@@ -16,6 +16,12 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""
|
||||
Functions operating on vertices (0D elements) and polylines (1D
|
||||
elements). Also intended to be a collection of examples for predicate
|
||||
definition in Python
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import (
|
||||
ChainingTimeStampF1D,
|
||||
@@ -71,7 +77,7 @@ from _freestyle import (
|
||||
ZDiscontinuityF1D,
|
||||
)
|
||||
|
||||
# modules for implementing functions
|
||||
# constructs for function definition in Python
|
||||
from freestyle.types import (
|
||||
CurvePoint,
|
||||
IntegrationType,
|
||||
@@ -82,15 +88,17 @@ from freestyle.types import (
|
||||
)
|
||||
from freestyle.utils import ContextFunctions as CF
|
||||
from freestyle.utils import integrate
|
||||
import math
|
||||
import mathutils
|
||||
|
||||
from mathutils import Vector
|
||||
|
||||
## Functions for 0D elements (vertices)
|
||||
#######################################
|
||||
|
||||
class CurveMaterialF0D(UnaryFunction0DMaterial):
|
||||
# A replacement of the built-in MaterialF0D for stroke creation.
|
||||
# MaterialF0D does not work with Curves and Strokes.
|
||||
"""
|
||||
A replacement of the built-in MaterialF0D for stroke creation.
|
||||
MaterialF0D does not work with Curves and Strokes.
|
||||
"""
|
||||
def __call__(self, inter):
|
||||
cp = inter.object
|
||||
assert(isinstance(cp, CurvePoint))
|
||||
@@ -110,8 +118,8 @@ class pyCurvilinearLengthF0D(UnaryFunction0DDouble):
|
||||
assert(isinstance(cp, CurvePoint))
|
||||
return cp.t2d
|
||||
|
||||
## estimate anisotropy of density
|
||||
class pyDensityAnisotropyF0D(UnaryFunction0DDouble):
|
||||
"""Estimates the anisotropy of density"""
|
||||
def __init__(self,level):
|
||||
UnaryFunction0DDouble.__init__(self)
|
||||
self.IsoDensity = ReadCompleteViewMapPixelF0D(level)
|
||||
@@ -133,34 +141,36 @@ class pyDensityAnisotropyF0D(UnaryFunction0DDouble):
|
||||
v = (cMax-cMin)/c_iso
|
||||
return v
|
||||
|
||||
## Returns the gradient vector for a pixel
|
||||
## l
|
||||
## the level at which one wants to compute the gradient
|
||||
class pyViewMapGradientVectorF0D(UnaryFunction0DVec2f):
|
||||
def __init__(self, l):
|
||||
"""Returns the gradient vector for a pixel
|
||||
|
||||
:arg level: the level at which to compute the gradient
|
||||
:type level: int
|
||||
"""
|
||||
def __init__(self, level):
|
||||
UnaryFunction0DVec2f.__init__(self)
|
||||
self._l = l
|
||||
self._step = math.pow(2,self._l)
|
||||
self._l = level
|
||||
self._step = pow(2, self._l)
|
||||
def __call__(self, iter):
|
||||
p = iter.object.point_2d
|
||||
gx = CF.read_complete_view_map_pixel(self._l, int(p.x+self._step), int(p.y)) - \
|
||||
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
|
||||
gy = CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y+self._step)) - \
|
||||
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
|
||||
return mathutils.Vector([gx, gy])
|
||||
return Vector((gx, gy))
|
||||
|
||||
class pyViewMapGradientNormF0D(UnaryFunction0DDouble):
|
||||
def __init__(self, l):
|
||||
UnaryFunction0DDouble.__init__(self)
|
||||
self._l = l
|
||||
self._step = math.pow(2,self._l)
|
||||
self._step = pow(2,self._l)
|
||||
def __call__(self, iter):
|
||||
p = iter.object.point_2d
|
||||
gx = CF.read_complete_view_map_pixel(self._l, int(p.x+self._step), int(p.y)) - \
|
||||
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
|
||||
gy = CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y+self._step)) - \
|
||||
CF.read_complete_view_map_pixel(self._l, int(p.x), int(p.y))
|
||||
grad = mathutils.Vector([gx, gy])
|
||||
grad = Vector((gx, gy))
|
||||
return grad.length
|
||||
|
||||
## Functions for 1D elements (curves)
|
||||
|
@@ -16,6 +16,12 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""
|
||||
Predicates operating on vertices (0D elements) and polylines (1D
|
||||
elements). Also intended to be a collection of examples for predicate
|
||||
definition in Python
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import (
|
||||
ContourUP1D,
|
||||
@@ -37,7 +43,7 @@ from _freestyle import (
|
||||
WithinImageBoundaryUP1D,
|
||||
)
|
||||
|
||||
# modules for implementing predicates
|
||||
# constructs for predicate definition in Python
|
||||
from freestyle.types import (
|
||||
BinaryPredicate1D,
|
||||
IntegrationType,
|
||||
@@ -66,6 +72,7 @@ from freestyle.functions import (
|
||||
)
|
||||
import random
|
||||
|
||||
|
||||
## Unary predicates for 0D elements (vertices)
|
||||
##############################################
|
||||
|
||||
@@ -105,23 +112,20 @@ class pyBackTVertexUP0D(UnaryPredicate0D):
|
||||
self._getQI = QuantitativeInvisibilityF0D()
|
||||
def __call__(self, iter):
|
||||
if (iter.object.nature & Nature.T_VERTEX) == 0:
|
||||
return 0
|
||||
return False
|
||||
if iter.is_end:
|
||||
return 0
|
||||
return False
|
||||
if self._getQI(iter) != 0:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyParameterUP0DGoodOne(UnaryPredicate0D):
|
||||
def __init__(self,pmin,pmax):
|
||||
UnaryPredicate0D.__init__(self)
|
||||
self._m = pmin
|
||||
self._M = pmax
|
||||
#self.getCurvilinearAbscissa = GetCurvilinearAbscissaF0D()
|
||||
def __call__(self, inter):
|
||||
#s = self.getCurvilinearAbscissa(inter)
|
||||
u = inter.u
|
||||
#print(u)
|
||||
return ((u>=self._m) and (u<=self._M))
|
||||
|
||||
class pyParameterUP0D(UnaryPredicate0D):
|
||||
@@ -129,14 +133,11 @@ class pyParameterUP0D(UnaryPredicate0D):
|
||||
UnaryPredicate0D.__init__(self)
|
||||
self._m = pmin
|
||||
self._M = pmax
|
||||
#self.getCurvilinearAbscissa = GetCurvilinearAbscissaF0D()
|
||||
def __call__(self, inter):
|
||||
func = Curvature2DAngleF0D()
|
||||
c = func(inter)
|
||||
b1 = (c>0.1)
|
||||
#s = self.getCurvilinearAbscissa(inter)
|
||||
u = inter.u
|
||||
#print(u)
|
||||
b = ((u>=self._m) and (u<=self._M))
|
||||
return b and b1
|
||||
|
||||
@@ -174,8 +175,8 @@ class pyNFirstUP1D(UnaryPredicate1D):
|
||||
def __call__(self, inter):
|
||||
self.__count = self.__count + 1
|
||||
if self.__count <= self.__n:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyHigherLengthUP1D(UnaryPredicate1D):
|
||||
def __init__(self,l):
|
||||
@@ -191,8 +192,8 @@ class pyNatureUP1D(UnaryPredicate1D):
|
||||
self._getNature = CurveNatureF1D()
|
||||
def __call__(self, inter):
|
||||
if(self._getNature(inter) & self._nature):
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyHigherNumberOfTurnsUP1D(UnaryPredicate1D):
|
||||
def __init__(self,n,a):
|
||||
@@ -207,9 +208,9 @@ class pyHigherNumberOfTurnsUP1D(UnaryPredicate1D):
|
||||
if func(it) > self._a:
|
||||
count = count+1
|
||||
if count > self._n:
|
||||
return 1
|
||||
return True
|
||||
it.increment()
|
||||
return 0
|
||||
return False
|
||||
|
||||
class pyDensityUP1D(UnaryPredicate1D):
|
||||
def __init__(self,wsize,threshold, integration = IntegrationType.MEAN, sampling=2.0):
|
||||
@@ -219,9 +220,7 @@ class pyDensityUP1D(UnaryPredicate1D):
|
||||
self._integration = integration
|
||||
self._func = DensityF1D(self._wsize, self._integration, sampling)
|
||||
def __call__(self, inter):
|
||||
if self._func(inter) < self._threshold:
|
||||
return 1
|
||||
return 0
|
||||
return (self._func(inter) < self._threshold)
|
||||
|
||||
class pyLowSteerableViewMapDensityUP1D(UnaryPredicate1D):
|
||||
def __init__(self,threshold, level,integration = IntegrationType.MEAN):
|
||||
@@ -231,11 +230,7 @@ class pyLowSteerableViewMapDensityUP1D(UnaryPredicate1D):
|
||||
self._integration = integration
|
||||
def __call__(self, inter):
|
||||
func = GetSteerableViewMapDensityF1D(self._level, self._integration)
|
||||
v = func(inter)
|
||||
#print(v)
|
||||
if v < self._threshold:
|
||||
return 1
|
||||
return 0
|
||||
return (func(inter) < self._threshold)
|
||||
|
||||
class pyLowDirectionalViewMapDensityUP1D(UnaryPredicate1D):
|
||||
def __init__(self,threshold, orientation, level,integration = IntegrationType.MEAN):
|
||||
@@ -246,11 +241,7 @@ class pyLowDirectionalViewMapDensityUP1D(UnaryPredicate1D):
|
||||
self._integration = integration
|
||||
def __call__(self, inter):
|
||||
func = GetDirectionalViewMapDensityF1D(self._orientation, self._level, self._integration)
|
||||
v = func(inter)
|
||||
#print(v)
|
||||
if v < self._threshold:
|
||||
return 1
|
||||
return 0
|
||||
return (func(inter) < self._threshold)
|
||||
|
||||
class pyHighSteerableViewMapDensityUP1D(UnaryPredicate1D):
|
||||
def __init__(self,threshold, level,integration = IntegrationType.MEAN):
|
||||
@@ -260,10 +251,7 @@ class pyHighSteerableViewMapDensityUP1D(UnaryPredicate1D):
|
||||
self._integration = integration
|
||||
self._func = GetSteerableViewMapDensityF1D(self._level, self._integration)
|
||||
def __call__(self, inter):
|
||||
v = self._func(inter)
|
||||
if v > self._threshold:
|
||||
return 1
|
||||
return 0
|
||||
return (self._func(inter) > self._threshold)
|
||||
|
||||
class pyHighDirectionalViewMapDensityUP1D(UnaryPredicate1D):
|
||||
def __init__(self,threshold, orientation, level,integration = IntegrationType.MEAN, sampling=2.0):
|
||||
@@ -275,10 +263,7 @@ class pyHighDirectionalViewMapDensityUP1D(UnaryPredicate1D):
|
||||
self._sampling = sampling
|
||||
def __call__(self, inter):
|
||||
func = GetDirectionalViewMapDensityF1D(self._orientation, self._level, self._integration, self._sampling)
|
||||
v = func(inter)
|
||||
if v > self._threshold:
|
||||
return 1
|
||||
return 0
|
||||
return (func(inter) > self._threshold)
|
||||
|
||||
class pyHighViewMapDensityUP1D(UnaryPredicate1D):
|
||||
def __init__(self,threshold, level,integration = IntegrationType.MEAN, sampling=2.0):
|
||||
@@ -289,13 +274,7 @@ class pyHighViewMapDensityUP1D(UnaryPredicate1D):
|
||||
self._sampling = sampling
|
||||
self._func = GetCompleteViewMapDensityF1D(self._level, self._integration, self._sampling) # 2.0 is the smpling
|
||||
def __call__(self, inter):
|
||||
#print("toto")
|
||||
#print(func.name)
|
||||
#print(inter.name)
|
||||
v= self._func(inter)
|
||||
if v > self._threshold:
|
||||
return 1
|
||||
return 0
|
||||
return (self._func(inter) > self._threshold)
|
||||
|
||||
class pyDensityFunctorUP1D(UnaryPredicate1D):
|
||||
def __init__(self,wsize,threshold, functor, funcmin=0.0, funcmax=1.0, integration = IntegrationType.MEAN):
|
||||
@@ -310,9 +289,8 @@ class pyDensityFunctorUP1D(UnaryPredicate1D):
|
||||
func = DensityF1D(self._wsize, self._integration)
|
||||
res = self._functor(inter)
|
||||
k = (res-self._funcmin)/(self._funcmax-self._funcmin)
|
||||
if func(inter) < self._threshold*k:
|
||||
return 1
|
||||
return 0
|
||||
return (func(inter) < (self._threshold * k))
|
||||
|
||||
|
||||
class pyZSmallerUP1D(UnaryPredicate1D):
|
||||
def __init__(self,z, integration=IntegrationType.MEAN):
|
||||
@@ -321,9 +299,7 @@ class pyZSmallerUP1D(UnaryPredicate1D):
|
||||
self._integration = integration
|
||||
def __call__(self, inter):
|
||||
func = GetProjectedZF1D(self._integration)
|
||||
if func(inter) < self._z:
|
||||
return 1
|
||||
return 0
|
||||
return (func(inter) < self._z)
|
||||
|
||||
class pyIsOccludedByUP1D(UnaryPredicate1D):
|
||||
def __init__(self,id):
|
||||
@@ -334,7 +310,7 @@ class pyIsOccludedByUP1D(UnaryPredicate1D):
|
||||
shapes = func(inter)
|
||||
for s in shapes:
|
||||
if(s.id == self._id):
|
||||
return 0
|
||||
return False
|
||||
it = inter.vertices_begin()
|
||||
itlast = inter.vertices_end()
|
||||
itlast.decrement()
|
||||
@@ -347,7 +323,7 @@ class pyIsOccludedByUP1D(UnaryPredicate1D):
|
||||
while not eit.is_end:
|
||||
ve, incoming = eit.object
|
||||
if ve.id == self._id:
|
||||
return 1
|
||||
return True
|
||||
#print("-------", ve.id.first, "-", ve.id.second)
|
||||
eit.increment()
|
||||
tvertex = vlast.viewvertex
|
||||
@@ -357,10 +333,10 @@ class pyIsOccludedByUP1D(UnaryPredicate1D):
|
||||
while not eit.is_end:
|
||||
ve, incoming = eit.object
|
||||
if ve.id == self._id:
|
||||
return 1
|
||||
return True
|
||||
#print("-------", ve.id.first, "-", ve.id.second)
|
||||
eit.increment()
|
||||
return 0
|
||||
return False
|
||||
|
||||
class pyIsInOccludersListUP1D(UnaryPredicate1D):
|
||||
def __init__(self,id):
|
||||
@@ -371,8 +347,8 @@ class pyIsInOccludersListUP1D(UnaryPredicate1D):
|
||||
occluders = func(inter)
|
||||
for a in occluders:
|
||||
if a.id == self._id:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyIsOccludedByItselfUP1D(UnaryPredicate1D):
|
||||
def __init__(self):
|
||||
@@ -385,8 +361,8 @@ class pyIsOccludedByItselfUP1D(UnaryPredicate1D):
|
||||
for vs1 in lst1:
|
||||
for vs2 in lst2:
|
||||
if vs1.id == vs2.id:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyIsOccludedByIdListUP1D(UnaryPredicate1D):
|
||||
def __init__(self, idlist):
|
||||
@@ -398,8 +374,8 @@ class pyIsOccludedByIdListUP1D(UnaryPredicate1D):
|
||||
for vs1 in lst1:
|
||||
for _id in self._idlist:
|
||||
if vs1.id == _id:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyShapeIdListUP1D(UnaryPredicate1D):
|
||||
def __init__(self,idlist):
|
||||
@@ -409,10 +385,10 @@ class pyShapeIdListUP1D(UnaryPredicate1D):
|
||||
for _id in idlist :
|
||||
self._funcs.append(ShapeUP1D(_id.first, _id.second))
|
||||
def __call__(self, inter):
|
||||
for func in self._funcs :
|
||||
for func in self._funcs:
|
||||
if func(inter) == 1:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
## deprecated
|
||||
class pyShapeIdUP1D(UnaryPredicate1D):
|
||||
@@ -424,8 +400,8 @@ class pyShapeIdUP1D(UnaryPredicate1D):
|
||||
shapes = func(inter)
|
||||
for a in shapes:
|
||||
if a.id == self._id:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
class pyHighDensityAnisotropyUP1D(UnaryPredicate1D):
|
||||
def __init__(self,threshold, level, sampling=2.0):
|
||||
@@ -460,13 +436,9 @@ class pyDensityVariableSigmaUP1D(UnaryPredicate1D):
|
||||
def __call__(self, inter):
|
||||
sigma = (self._sigmaMax-self._sigmaMin)/(self._lmax-self._lmin)*(self._functor(inter)-self._lmin) + self._sigmaMin
|
||||
t = (self._tmax-self._tmin)/(self._lmax-self._lmin)*(self._functor(inter)-self._lmin) + self._tmin
|
||||
if sigma < self._sigmaMin:
|
||||
sigma = self._sigmaMin
|
||||
sigma = max(sigma, self._sigmaMin)
|
||||
self._func = DensityF1D(sigma, self._integration, self._sampling)
|
||||
d = self._func(inter)
|
||||
if d < t:
|
||||
return 1
|
||||
return 0
|
||||
return (self._func(inter) < t)
|
||||
|
||||
class pyClosedCurveUP1D(UnaryPredicate1D):
|
||||
def __call__(self, inter):
|
||||
@@ -478,8 +450,8 @@ class pyClosedCurveUP1D(UnaryPredicate1D):
|
||||
#print(v.id.first, v.id.second)
|
||||
#print(vlast.id.first, vlast.id.second)
|
||||
if v.id == vlast.id:
|
||||
return 1
|
||||
return 0
|
||||
return True
|
||||
return False
|
||||
|
||||
## Binary predicates for 1D elements (curves)
|
||||
#############################################
|
||||
@@ -504,7 +476,7 @@ class pySilhouetteFirstBP1D(BinaryPredicate1D):
|
||||
def __call__(self, inter1, inter2):
|
||||
bpred = SameShapeIdBP1D()
|
||||
if (bpred(inter1, inter2) != 1):
|
||||
return 0
|
||||
return False
|
||||
if (inter1.nature & Nature.SILHOUETTE):
|
||||
return (inter2.nature & Nature.SILHOUETTE) != 0
|
||||
return (inter1.nature == inter2.nature)
|
||||
|
@@ -21,6 +21,11 @@
|
||||
# Date : 11/08/2005
|
||||
# Purpose : Stroke shaders to be used for creation of stylized strokes
|
||||
|
||||
"""
|
||||
Stroke shaders used for creation of stylized strokes. Also intended
|
||||
to be a collection of examples for shader definition in Python
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import (
|
||||
BackboneStretcherShader,
|
||||
@@ -47,7 +52,7 @@ from _freestyle import (
|
||||
streamShader,
|
||||
)
|
||||
|
||||
# modules for implementing shaders
|
||||
# constructs for shader definition in Python
|
||||
from freestyle.types import (
|
||||
Interface0DIterator,
|
||||
Nature,
|
||||
@@ -69,14 +74,20 @@ from freestyle.predicates import (
|
||||
pyVertexNatureUP0D,
|
||||
)
|
||||
from freestyle.utils import ContextFunctions as CF
|
||||
import math
|
||||
import mathutils
|
||||
import random
|
||||
|
||||
from math import atan, cos, pi, pow, sin, sinh, sqrt
|
||||
from mathutils import Vector
|
||||
from random import randint
|
||||
|
||||
|
||||
## thickness modifiers
|
||||
######################
|
||||
|
||||
class pyDepthDiscontinuityThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns a thickness to the stroke based on the stroke's distance
|
||||
to the camera (Z-value)
|
||||
"""
|
||||
def __init__(self, min, max):
|
||||
StrokeShader.__init__(self)
|
||||
self.__min = float(min)
|
||||
@@ -95,6 +106,9 @@ class pyDepthDiscontinuityThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyConstantThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns a constant thickness along the stroke
|
||||
"""
|
||||
def __init__(self, thickness):
|
||||
StrokeShader.__init__(self)
|
||||
self._thickness = thickness
|
||||
@@ -106,6 +120,9 @@ class pyConstantThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyFXSVaryingThicknessWithDensityShader(StrokeShader):
|
||||
"""
|
||||
Assings thickness to a stroke based on the density of the diffuse map
|
||||
"""
|
||||
def __init__(self, wsize, threshold_min, threshold_max, thicknessMin, thicknessMax):
|
||||
StrokeShader.__init__(self)
|
||||
self.wsize= wsize
|
||||
@@ -131,6 +148,9 @@ class pyFXSVaryingThicknessWithDensityShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyIncreasingThicknessShader(StrokeShader):
|
||||
"""
|
||||
Increasingly thickens the stroke
|
||||
"""
|
||||
def __init__(self, thicknessMin, thicknessMax):
|
||||
StrokeShader.__init__(self)
|
||||
self._thicknessMin = thicknessMin
|
||||
@@ -150,6 +170,10 @@ class pyIncreasingThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyConstrainedIncreasingThicknessShader(StrokeShader):
|
||||
"""
|
||||
Increasingly thickens the stroke, constrained by a ratio of the
|
||||
stroke's length
|
||||
"""
|
||||
def __init__(self, thicknessMin, thicknessMax, ratio):
|
||||
StrokeShader.__init__(self)
|
||||
self._thicknessMin = thicknessMin
|
||||
@@ -180,6 +204,9 @@ class pyConstrainedIncreasingThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyDecreasingThicknessShader(StrokeShader):
|
||||
"""
|
||||
Inverse of pyIncreasingThicknessShader, decreasingly thickens the stroke
|
||||
"""
|
||||
def __init__(self, thicknessMin, thicknessMax):
|
||||
StrokeShader.__init__(self)
|
||||
self._thicknessMin = thicknessMin
|
||||
@@ -203,6 +230,9 @@ class pyDecreasingThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyNonLinearVaryingThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns thickness to a stroke based on an exponential function
|
||||
"""
|
||||
def __init__(self, thicknessExtremity, thicknessMiddle, exponent):
|
||||
StrokeShader.__init__(self)
|
||||
self._thicknessMin = thicknessMiddle
|
||||
@@ -223,10 +253,12 @@ class pyNonLinearVaryingThicknessShader(StrokeShader):
|
||||
i = i+1
|
||||
it.increment()
|
||||
def smoothC(self, a, exp):
|
||||
return math.pow(float(a), exp) * math.pow(2.0, exp)
|
||||
return pow(float(a), exp) * pow(2.0, exp)
|
||||
|
||||
## Spherical linear interpolation (cos)
|
||||
class pySLERPThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns thickness to a stroke based on spherical linear interpolation
|
||||
"""
|
||||
def __init__(self, thicknessMin, thicknessMax, omega=1.2):
|
||||
StrokeShader.__init__(self)
|
||||
self._thicknessMin = thicknessMin
|
||||
@@ -244,14 +276,17 @@ class pySLERPThicknessShader(StrokeShader):
|
||||
while not it.is_end:
|
||||
c = float(i)/float(n)
|
||||
if i < float(n)/2.0:
|
||||
t = math.sin((1-c)*self._omega)/math.sinh(self._omega)*self._thicknessMin + math.sin(c*self._omega)/math.sinh(self._omega) * maxT
|
||||
t = sin((1-c)*self._omega)/sinh(self._omega)*self._thicknessMin + sin(c*self._omega)/sinh(self._omega) * maxT
|
||||
else:
|
||||
t = math.sin((1-c)*self._omega)/math.sinh(self._omega)*maxT + math.sin(c*self._omega)/math.sinh(self._omega) * self._thicknessMin
|
||||
t = sin((1-c)*self._omega)/sinh(self._omega)*maxT + sin(c*self._omega)/sinh(self._omega) * self._thicknessMin
|
||||
it.object.attribute.thickness = (t/2.0, t/2.0)
|
||||
i = i+1
|
||||
it.increment()
|
||||
|
||||
class pyTVertexThickenerShader(StrokeShader): ## FIXME
|
||||
"""
|
||||
Thickens TVertices (visual intersections between two edges)
|
||||
"""
|
||||
def __init__(self, a=1.5, n=3):
|
||||
StrokeShader.__init__(self)
|
||||
self._a = a
|
||||
@@ -299,6 +334,11 @@ class pyTVertexThickenerShader(StrokeShader): ## FIXME
|
||||
it.increment()
|
||||
|
||||
class pyImportance2DThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns thickness based on distance to a given point in 2D space.
|
||||
the thickness is inverted, so the vertices closest to the
|
||||
specified point have the lowest thickness
|
||||
"""
|
||||
def __init__(self, x, y, w, kmin, kmax):
|
||||
StrokeShader.__init__(self)
|
||||
self._x = x
|
||||
@@ -307,12 +347,11 @@ class pyImportance2DThicknessShader(StrokeShader):
|
||||
self._kmin = float(kmin)
|
||||
self._kmax = float(kmax)
|
||||
def shade(self, stroke):
|
||||
origin = mathutils.Vector([self._x, self._y])
|
||||
origin = Vector((self._x, self._y))
|
||||
it = stroke.stroke_vertices_begin()
|
||||
while not it.is_end:
|
||||
v = it.object
|
||||
p = mathutils.Vector([v.projected_x, v.projected_y])
|
||||
d = (p-origin).length
|
||||
d = (v.point_2d - self._origin).length
|
||||
if d > self._w:
|
||||
k = self._kmin
|
||||
else:
|
||||
@@ -323,6 +362,9 @@ class pyImportance2DThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyImportance3DThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns thickness based on distance to a given point in 3D space
|
||||
"""
|
||||
def __init__(self, x, y, z, w, kmin, kmax):
|
||||
StrokeShader.__init__(self)
|
||||
self._x = x
|
||||
@@ -332,7 +374,7 @@ class pyImportance3DThicknessShader(StrokeShader):
|
||||
self._kmin = float(kmin)
|
||||
self._kmax = float(kmax)
|
||||
def shade(self, stroke):
|
||||
origin = mathutils.Vector([self._x, self._y, self._z])
|
||||
origin = Vector((self._x, self._y, self._z))
|
||||
it = stroke.stroke_vertices_begin()
|
||||
while not it.is_end:
|
||||
v = it.object
|
||||
@@ -348,6 +390,10 @@ class pyImportance3DThicknessShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyZDependingThicknessShader(StrokeShader):
|
||||
"""
|
||||
Assigns thickness based on an object's local Z depth (point
|
||||
closest to camera is 1, point furthest from camera is zero)
|
||||
"""
|
||||
def __init__(self, min, max):
|
||||
StrokeShader.__init__(self)
|
||||
self.__min = min
|
||||
@@ -377,6 +423,9 @@ class pyZDependingThicknessShader(StrokeShader):
|
||||
##################
|
||||
|
||||
class pyConstantColorShader(StrokeShader):
|
||||
"""
|
||||
Assigns a constant color to the stroke
|
||||
"""
|
||||
def __init__(self,r,g,b, a = 1):
|
||||
StrokeShader.__init__(self)
|
||||
self._r = r
|
||||
@@ -391,8 +440,11 @@ class pyConstantColorShader(StrokeShader):
|
||||
att.alpha = self._a
|
||||
it.increment()
|
||||
|
||||
#c1->c2
|
||||
|
||||
class pyIncreasingColorShader(StrokeShader):
|
||||
"""
|
||||
Fades from one color to another along the stroke
|
||||
"""
|
||||
def __init__(self,r1,g1,b1,a1, r2,g2,b2,a2):
|
||||
StrokeShader.__init__(self)
|
||||
self._c1 = [r1,g1,b1,a1]
|
||||
@@ -412,8 +464,11 @@ class pyIncreasingColorShader(StrokeShader):
|
||||
inc = inc+1
|
||||
it.increment()
|
||||
|
||||
# c1->c2->c1
|
||||
|
||||
class pyInterpolateColorShader(StrokeShader):
|
||||
"""
|
||||
Fades from one color to another and back
|
||||
"""
|
||||
def __init__(self,r1,g1,b1,a1, r2,g2,b2,a2):
|
||||
StrokeShader.__init__(self)
|
||||
self._c1 = [r1,g1,b1,a1]
|
||||
@@ -425,7 +480,7 @@ class pyInterpolateColorShader(StrokeShader):
|
||||
while not it.is_end:
|
||||
att = it.object.attribute
|
||||
u = float(inc)/float(n)
|
||||
c = 1-2*(math.fabs(u-0.5))
|
||||
c = 1 - 2 * abs(u - 0.5)
|
||||
att.color = ((1-c)*self._c1[0] + c*self._c2[0],
|
||||
(1-c)*self._c1[1] + c*self._c2[1],
|
||||
(1-c)*self._c1[2] + c*self._c2[2])
|
||||
@@ -434,6 +489,9 @@ class pyInterpolateColorShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyMaterialColorShader(StrokeShader):
|
||||
"""
|
||||
Assigns the color of the underlying material to the stroke
|
||||
"""
|
||||
def __init__(self, threshold=50):
|
||||
StrokeShader.__init__(self)
|
||||
self._threshold = threshold
|
||||
@@ -443,8 +501,8 @@ class pyMaterialColorShader(StrokeShader):
|
||||
xn = 0.312713
|
||||
yn = 0.329016
|
||||
Yn = 1.0
|
||||
un = 4.* xn/ ( -2.*xn + 12.*yn + 3. )
|
||||
vn= 9.* yn/ ( -2.*xn + 12.*yn +3. )
|
||||
un = 4.* xn / (-2.*xn + 12.*yn + 3.)
|
||||
vn= 9.* yn / (-2.*xn + 12.*yn +3.)
|
||||
while not it.is_end:
|
||||
mat = func(Interface0DIterator(it))
|
||||
|
||||
@@ -456,14 +514,14 @@ class pyMaterialColorShader(StrokeShader):
|
||||
Y = 0.212671*r + 0.71516 *g + 0.072169*b
|
||||
Z = 0.019334*r + 0.119193*g + 0.950227*b
|
||||
|
||||
if X == 0 and Y == 0 and Z == 0:
|
||||
if (X, Y, Z) == (0, 0, 0):
|
||||
X = 0.01
|
||||
Y = 0.01
|
||||
Z = 0.01
|
||||
u = 4.*X / (X + 15.*Y + 3.*Z)
|
||||
v = 9.*Y / (X + 15.*Y + 3.*Z)
|
||||
|
||||
L= 116. * math.pow((Y/Yn),(1./3.)) -16
|
||||
L= 116. * pow((Y/Yn),(1./3.)) -16
|
||||
U = 13. * L * (u - un)
|
||||
V = 13. * L * (v - vn)
|
||||
|
||||
@@ -477,7 +535,7 @@ class pyMaterialColorShader(StrokeShader):
|
||||
u = U / (13. * L) + un
|
||||
v = V / (13. * L) + vn
|
||||
|
||||
Y = Yn * math.pow( ((L+16.)/116.), 3.)
|
||||
Y = Yn * pow(((L+16.)/116.), 3.)
|
||||
X = -9. * Y * u / ((u - 4.)* v - u * v)
|
||||
Z = (9. * Y - 15*v*Y - v*X) /( 3. * v)
|
||||
|
||||
@@ -493,6 +551,9 @@ class pyMaterialColorShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyRandomColorShader(StrokeShader):
|
||||
"""
|
||||
Assigns a color to the stroke based on given seed
|
||||
"""
|
||||
def __init__(self, s=1):
|
||||
StrokeShader.__init__(self)
|
||||
random.seed(s)
|
||||
@@ -508,6 +569,10 @@ class pyRandomColorShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class py2DCurvatureColorShader(StrokeShader):
|
||||
"""
|
||||
Assigns a color (greyscale) to the stroke based on the curvature.
|
||||
A higher curvature will yield a brighter color
|
||||
"""
|
||||
def shade(self, stroke):
|
||||
it = stroke.stroke_vertices_begin()
|
||||
func = Curvature2DAngleF0D()
|
||||
@@ -520,21 +585,25 @@ class py2DCurvatureColorShader(StrokeShader):
|
||||
it.increment()
|
||||
|
||||
class pyTimeColorShader(StrokeShader):
|
||||
"""
|
||||
Assigns a greyscale value that increases for every vertex.
|
||||
The brightness will increase along the stroke
|
||||
"""
|
||||
def __init__(self, step=0.01):
|
||||
StrokeShader.__init__(self)
|
||||
self._t = 0
|
||||
self._step = step
|
||||
def shade(self, stroke):
|
||||
c = self._t*1.0
|
||||
it = stroke.stroke_vertices_begin()
|
||||
while not it.is_end:
|
||||
it.object.attribute.color = (c,c,c)
|
||||
it.increment()
|
||||
self._t = self._t+self._step
|
||||
for i, svert in enumerate(iter(stroke)):
|
||||
c = i * self._step
|
||||
svert.attribute.color = (c,c,c)
|
||||
|
||||
## geometry modifiers
|
||||
|
||||
class pySamplingShader(StrokeShader):
|
||||
"""
|
||||
Resamples the stroke, which gives the stroke the ammount of
|
||||
vertices specified
|
||||
"""
|
||||
def __init__(self, sampling):
|
||||
StrokeShader.__init__(self)
|
||||
self._sampling = sampling
|
||||
@@ -543,6 +612,9 @@ class pySamplingShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyBackboneStretcherShader(StrokeShader):
|
||||
"""
|
||||
Stretches the stroke's backbone by a given length (in pixels)
|
||||
"""
|
||||
def __init__(self, l):
|
||||
StrokeShader.__init__(self)
|
||||
self._l = l
|
||||
@@ -558,14 +630,12 @@ class pyBackboneStretcherShader(StrokeShader):
|
||||
v1 = it1.object
|
||||
vn_1 = itn_1.object
|
||||
vn = itn.object
|
||||
p0 = mathutils.Vector([v0.projected_x, v0.projected_y])
|
||||
pn = mathutils.Vector([vn.projected_x, vn.projected_y])
|
||||
p1 = mathutils.Vector([v1.projected_x, v1.projected_y])
|
||||
pn_1 = mathutils.Vector([vn_1.projected_x, vn_1.projected_y])
|
||||
d1 = p0-p1
|
||||
d1.normalize()
|
||||
dn = pn-pn_1
|
||||
dn.normalize()
|
||||
p0 = v0.point_2d
|
||||
pn = vn.point_2d
|
||||
p1 = v1.point_2d
|
||||
pn_1 = vn_1.point_2d
|
||||
d1 = (p0 - p1).normalized()
|
||||
dn = (pn - pn_1).normalized()
|
||||
newFirst = p0+d1*float(self._l)
|
||||
newLast = pn+dn*float(self._l)
|
||||
v0.point = newFirst
|
||||
@@ -573,6 +643,9 @@ class pyBackboneStretcherShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyLengthDependingBackboneStretcherShader(StrokeShader):
|
||||
"""
|
||||
Stretches the stroke's backbone proportional to the stroke's length
|
||||
"""
|
||||
def __init__(self, l):
|
||||
StrokeShader.__init__(self)
|
||||
self._l = l
|
||||
@@ -590,14 +663,12 @@ class pyLengthDependingBackboneStretcherShader(StrokeShader):
|
||||
v1 = it1.object
|
||||
vn_1 = itn_1.object
|
||||
vn = itn.object
|
||||
p0 = mathutils.Vector([v0.projected_x, v0.projected_y])
|
||||
pn = mathutils.Vector([vn.projected_x, vn.projected_y])
|
||||
p1 = mathutils.Vector([v1.projected_x, v1.projected_y])
|
||||
pn_1 = mathutils.Vector([vn_1.projected_x, vn_1.projected_y])
|
||||
d1 = p0-p1
|
||||
d1.normalize()
|
||||
dn = pn-pn_1
|
||||
dn.normalize()
|
||||
p0 = v0.point_2d
|
||||
pn = vn.point_2d
|
||||
p1 = v1.point_2d
|
||||
pn_1 = vn_1.point_2d
|
||||
d1 = (p0 - p1).normalized()
|
||||
dn = (pn - pn_1).normalized()
|
||||
newFirst = p0+d1*float(stretch)
|
||||
newLast = pn+dn*float(stretch)
|
||||
v0.point = newFirst
|
||||
@@ -605,8 +676,11 @@ class pyLengthDependingBackboneStretcherShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
|
||||
## Shader to replace a stroke by its corresponding tangent
|
||||
|
||||
class pyGuidingLineShader(StrokeShader):
|
||||
"""
|
||||
Replaces the stroke by its corresponding tangent
|
||||
"""
|
||||
def shade(self, stroke):
|
||||
it = stroke.stroke_vertices_begin() ## get the first vertex
|
||||
itlast = stroke.stroke_vertices_end() ##
|
||||
@@ -630,6 +704,9 @@ class pyGuidingLineShader(StrokeShader):
|
||||
|
||||
|
||||
class pyBackboneStretcherNoCuspShader(StrokeShader):
|
||||
"""
|
||||
Stretches the stroke's backbone, excluding cusp vertices (end junctions)
|
||||
"""
|
||||
def __init__(self, l):
|
||||
StrokeShader.__init__(self)
|
||||
self._l = l
|
||||
@@ -644,28 +721,24 @@ class pyBackboneStretcherNoCuspShader(StrokeShader):
|
||||
v0 = it0.object
|
||||
v1 = it1.object
|
||||
if (v0.nature & Nature.CUSP) == 0 and (v1.nature & Nature.CUSP) == 0:
|
||||
p0 = v0.point
|
||||
p1 = v1.point
|
||||
d1 = p0-p1
|
||||
d1.normalize()
|
||||
d1 = (v0.point - v1.point).normalized()
|
||||
newFirst = p0+d1*float(self._l)
|
||||
v0.point = newFirst
|
||||
vn_1 = itn_1.object
|
||||
vn = itn.object
|
||||
if (vn.nature & Nature.CUSP) == 0 and (vn_1.nature & Nature.CUSP) == 0:
|
||||
pn = vn.point
|
||||
pn_1 = vn_1.point
|
||||
dn = pn-pn_1
|
||||
dn.normalize()
|
||||
newLast = pn+dn*float(self._l)
|
||||
dn = (vn.point - vn_1.point).normalized()
|
||||
newLast = vn.point + dn * float(self._l)
|
||||
vn.point = newLast
|
||||
stroke.update_length()
|
||||
|
||||
class pyDiffusion2Shader(StrokeShader):
|
||||
"""This shader iteratively adds an offset to the position of each
|
||||
stroke vertex in the direction perpendicular to the stroke direction
|
||||
at the point. The offset is scaled by the 2D curvature (i.e., how
|
||||
quickly the stroke curve is) at the point."""
|
||||
"""
|
||||
Iteratively adds an offset to the position of each stroke vertex
|
||||
in the direction perpendicular to the stroke direction at the
|
||||
point. The offset is scaled by the 2D curvature (i.e. how quickly
|
||||
the stroke curve is) at the point.
|
||||
"""
|
||||
def __init__(self, lambda1, nbIter):
|
||||
StrokeShader.__init__(self)
|
||||
self._lambda = lambda1
|
||||
@@ -684,6 +757,9 @@ class pyDiffusion2Shader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyTipRemoverShader(StrokeShader):
|
||||
"""
|
||||
Removes the tips of the stroke
|
||||
"""
|
||||
def __init__(self, l):
|
||||
StrokeShader.__init__(self)
|
||||
self._l = l
|
||||
@@ -717,6 +793,9 @@ class pyTipRemoverShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyTVertexRemoverShader(StrokeShader):
|
||||
"""
|
||||
Removes t-vertices from the stroke
|
||||
"""
|
||||
def shade(self, stroke):
|
||||
if stroke.stroke_vertices_size() <= 3:
|
||||
return
|
||||
@@ -733,14 +812,18 @@ class pyTVertexRemoverShader(StrokeShader):
|
||||
#class pyExtremitiesOrientationShader(StrokeShader):
|
||||
# def __init__(self, x1,y1,x2=0,y2=0):
|
||||
# StrokeShader.__init__(self)
|
||||
# self._v1 = mathutils.Vector([x1,y1])
|
||||
# self._v2 = mathutils.Vector([x2,y2])
|
||||
# self._v1 = Vector((x1,y1))
|
||||
# self._v2 = Vector((x2,y2))
|
||||
# def shade(self, stroke):
|
||||
# #print(self._v1.x,self._v1.y)
|
||||
# stroke.setBeginningOrientation(self._v1.x,self._v1.y)
|
||||
# stroke.setEndingOrientation(self._v2.x,self._v2.y)
|
||||
|
||||
class pyHLRShader(StrokeShader):
|
||||
"""
|
||||
Controlls visibility based upon the quantative invisibility (QI)
|
||||
based on hidden line removal (HLR)
|
||||
"""
|
||||
def shade(self, stroke):
|
||||
originalSize = stroke.stroke_vertices_size()
|
||||
if originalSize < 4:
|
||||
@@ -770,6 +853,7 @@ class pyHLRShader(StrokeShader):
|
||||
def get_fedge(self, it1, it2):
|
||||
return it1.get_fedge(it2)
|
||||
|
||||
# broken and a mess
|
||||
class pyTVertexOrientationShader(StrokeShader):
|
||||
def __init__(self):
|
||||
StrokeShader.__init__(self)
|
||||
@@ -846,6 +930,9 @@ class pyTVertexOrientationShader(StrokeShader):
|
||||
return it1.get_fedge(it2)
|
||||
|
||||
class pySinusDisplacementShader(StrokeShader):
|
||||
"""
|
||||
Displaces the stroke in the shape of a sine wave
|
||||
"""
|
||||
def __init__(self, f, a):
|
||||
StrokeShader.__init__(self)
|
||||
self._f = f
|
||||
@@ -859,15 +946,20 @@ class pySinusDisplacementShader(StrokeShader):
|
||||
n = self._getNormal(Interface0DIterator(it))
|
||||
p = v.point
|
||||
u = v.u
|
||||
a = self._a*(1-2*(math.fabs(u-0.5)))
|
||||
n = n*a*math.cos(self._f*u*6.28)
|
||||
a = self._a*(1-2*(abs(u-0.5)))
|
||||
n = n*a*cos(self._f*u*6.28)
|
||||
#print(n.x, n.y)
|
||||
v.point = p+n
|
||||
#v.point = v.point+n*a*math.cos(f*v.u)
|
||||
#v.point = v.point+n*a*cos(f*v.u)
|
||||
it.increment()
|
||||
stroke.update_length()
|
||||
|
||||
class pyPerlinNoise1DShader(StrokeShader):
|
||||
"""
|
||||
Displaces the stroke using the curvilinear abscissa. This means
|
||||
that lines with the same length and sampling interval will be
|
||||
identically distorded
|
||||
"""
|
||||
def __init__(self, freq = 10, amp = 10, oct = 4, seed = -1):
|
||||
StrokeShader.__init__(self)
|
||||
self.__noise = Noise(seed)
|
||||
@@ -885,6 +977,13 @@ class pyPerlinNoise1DShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyPerlinNoise2DShader(StrokeShader):
|
||||
"""
|
||||
Displaces the stroke using the strokes coordinates. This means
|
||||
that in a scene no strokes will be distorded identically
|
||||
|
||||
More information on the noise shaders can be found at
|
||||
freestyleintegration.wordpress.com/2011/09/25/development-updates-on-september-25/
|
||||
"""
|
||||
def __init__(self, freq = 10, amp = 10, oct = 4, seed = -1):
|
||||
StrokeShader.__init__(self)
|
||||
self.__noise = Noise(seed)
|
||||
@@ -895,13 +994,15 @@ class pyPerlinNoise2DShader(StrokeShader):
|
||||
it = stroke.stroke_vertices_begin()
|
||||
while not it.is_end:
|
||||
v = it.object
|
||||
vec = mathutils.Vector([v.projected_x, v.projected_y])
|
||||
nres = self.__noise.turbulence2(vec, self.__freq, self.__amp, self.__oct)
|
||||
nres = self.__noise.turbulence2(v.point_2d, self.__freq, self.__amp, self.__oct)
|
||||
v.point = (v.projected_x + nres, v.projected_y + nres)
|
||||
it.increment()
|
||||
stroke.update_length()
|
||||
|
||||
class pyBluePrintCirclesShader(StrokeShader):
|
||||
"""
|
||||
Draws the silhouette of the object as a circle
|
||||
"""
|
||||
def __init__(self, turns = 1, random_radius = 3, random_center = 5):
|
||||
StrokeShader.__init__(self)
|
||||
self.__turns = turns
|
||||
@@ -934,7 +1035,7 @@ class pyBluePrintCirclesShader(StrokeShader):
|
||||
sv_nb = sv_nb // self.__turns
|
||||
center = (p_min + p_max) / 2
|
||||
radius = (center.x - p_min.x + center.y - p_min.y) / 2
|
||||
p_new = mathutils.Vector([0, 0])
|
||||
p_new = Vector((0,0))
|
||||
#######################################################
|
||||
R = self.__random_radius
|
||||
C = self.__random_center
|
||||
@@ -943,14 +1044,14 @@ class pyBluePrintCirclesShader(StrokeShader):
|
||||
for j in range(self.__turns):
|
||||
prev_radius = radius
|
||||
prev_center = center
|
||||
radius = radius + random.randint(-R, R)
|
||||
center = center + mathutils.Vector([random.randint(-C, C), random.randint(-C, C)])
|
||||
radius = radius + randint(-R, R)
|
||||
center = center + Vector((randint(-C, C), randint(-C, C)))
|
||||
while i < sv_nb and not it.is_end:
|
||||
t = float(i) / float(sv_nb - 1)
|
||||
r = prev_radius + (radius - prev_radius) * t
|
||||
c = prev_center + (center - prev_center) * t
|
||||
p_new.x = c.x + r * math.cos(2 * math.pi * t)
|
||||
p_new.y = c.y + r * math.sin(2 * math.pi * t)
|
||||
p_new.x = c.x + r * cos(2 * pi * t)
|
||||
p_new.y = c.y + r * sin(2 * pi * t)
|
||||
it.object.point = p_new
|
||||
i = i + 1
|
||||
it.increment()
|
||||
@@ -964,6 +1065,9 @@ class pyBluePrintCirclesShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyBluePrintEllipsesShader(StrokeShader):
|
||||
"""
|
||||
Draws the silhouette of the object as an ellips
|
||||
"""
|
||||
def __init__(self, turns = 1, random_radius = 3, random_center = 5):
|
||||
StrokeShader.__init__(self)
|
||||
self.__turns = turns
|
||||
@@ -991,7 +1095,7 @@ class pyBluePrintEllipsesShader(StrokeShader):
|
||||
sv_nb = sv_nb // self.__turns
|
||||
center = (p_min + p_max) / 2
|
||||
radius = center - p_min
|
||||
p_new = mathutils.Vector([0, 0])
|
||||
p_new = Vecor((0,0))
|
||||
#######################################################
|
||||
R = self.__random_radius
|
||||
C = self.__random_center
|
||||
@@ -1000,14 +1104,14 @@ class pyBluePrintEllipsesShader(StrokeShader):
|
||||
for j in range(self.__turns):
|
||||
prev_radius = radius
|
||||
prev_center = center
|
||||
radius = radius + mathutils.Vector([random.randint(-R, R), random.randint(-R, R)])
|
||||
center = center + mathutils.Vector([random.randint(-C, C), random.randint(-C, C)])
|
||||
radius = radius + Vector((randint(-R, R), randint(-R, R)))
|
||||
center = center + Vector((randint(-C, C), randint(-C, C)))
|
||||
while i < sv_nb and not it.is_end:
|
||||
t = float(i) / float(sv_nb - 1)
|
||||
r = prev_radius + (radius - prev_radius) * t
|
||||
c = prev_center + (center - prev_center) * t
|
||||
p_new.x = c.x + r.x * math.cos(2 * math.pi * t)
|
||||
p_new.y = c.y + r.y * math.sin(2 * math.pi * t)
|
||||
p_new.x = c.x + r.x * cos(2 * pi * t)
|
||||
p_new.y = c.y + r.y * sin(2 * pi * t)
|
||||
it.object.point = p_new
|
||||
i = i + 1
|
||||
it.increment()
|
||||
@@ -1022,6 +1126,9 @@ class pyBluePrintEllipsesShader(StrokeShader):
|
||||
|
||||
|
||||
class pyBluePrintSquaresShader(StrokeShader):
|
||||
"""
|
||||
Draws the silhouette of the object as a square
|
||||
"""
|
||||
def __init__(self, turns = 1, bb_len = 10, bb_rand = 0):
|
||||
StrokeShader.__init__(self)
|
||||
self.__turns = turns
|
||||
@@ -1052,28 +1159,28 @@ class pyBluePrintSquaresShader(StrokeShader):
|
||||
second = 2 * first
|
||||
third = 3 * first
|
||||
fourth = sv_nb
|
||||
p_first = mathutils.Vector([p_min.x - self.__bb_len, p_min.y])
|
||||
p_first_end = mathutils.Vector([p_max.x + self.__bb_len, p_min.y])
|
||||
p_second = mathutils.Vector([p_max.x, p_min.y - self.__bb_len])
|
||||
p_second_end = mathutils.Vector([p_max.x, p_max.y + self.__bb_len])
|
||||
p_third = mathutils.Vector([p_max.x + self.__bb_len, p_max.y])
|
||||
p_third_end = mathutils.Vector([p_min.x - self.__bb_len, p_max.y])
|
||||
p_fourth = mathutils.Vector([p_min.x, p_max.y + self.__bb_len])
|
||||
p_fourth_end = mathutils.Vector([p_min.x, p_min.y - self.__bb_len])
|
||||
p_first = Vector((p_min.x - self.__bb_len, p_min.y))
|
||||
p_first_end = Vector((p_max.x + self.__bb_len, p_min.y))
|
||||
p_second = Vector((p_max.x, p_min.y - self.__bb_len))
|
||||
p_second_end = Vector((p_max.x, p_max.y + self.__bb_len))
|
||||
p_third = Vector((p_max.x + self.__bb_len, p_max.y))
|
||||
p_third_end = Vector((p_min.x - self.__bb_len, p_max.y))
|
||||
p_fourth = Vector((p_min.x, p_max.y + self.__bb_len))
|
||||
p_fourth_end = Vector((p_min.x, p_min.y - self.__bb_len))
|
||||
#######################################################
|
||||
R = self.__bb_rand
|
||||
r = self.__bb_rand // 2
|
||||
it = stroke.stroke_vertices_begin()
|
||||
visible = True
|
||||
for j in range(self.__turns):
|
||||
p_first = p_first + mathutils.Vector([random.randint(-R, R), random.randint(-r, r)])
|
||||
p_first_end = p_first_end + mathutils.Vector([random.randint(-R, R), random.randint(-r, r)])
|
||||
p_second = p_second + mathutils.Vector([random.randint(-r, r), random.randint(-R, R)])
|
||||
p_second_end = p_second_end + mathutils.Vector([random.randint(-r, r), random.randint(-R, R)])
|
||||
p_third = p_third + mathutils.Vector([random.randint(-R, R), random.randint(-r, r)])
|
||||
p_third_end = p_third_end + mathutils.Vector([random.randint(-R, R), random.randint(-r, r)])
|
||||
p_fourth = p_fourth + mathutils.Vector([random.randint(-r, r), random.randint(-R, R)])
|
||||
p_fourth_end = p_fourth_end + mathutils.Vector([random.randint(-r, r), random.randint(-R, R)])
|
||||
p_first = p_first + Vector((randint(-R, R), randint(-r, r)))
|
||||
p_first_end = p_first_end + Vector((randint(-R, R), randint(-r, r)))
|
||||
p_second = p_second + Vector((randint(-r, r), randint(-R, R)))
|
||||
p_second_end = p_second_end + Vector((randint(-r, r), randint(-R, R)))
|
||||
p_third = p_third + Vector((randint(-R, R), randint(-r, r)))
|
||||
p_third_end = p_third_end + Vector((randint(-R, R), randint(-r, r)))
|
||||
p_fourth = p_fourth + Vector((randint(-r, r), randint(-R, R)))
|
||||
p_fourth_end = p_fourth_end + Vector((randint(-r, r), randint(-R, R)))
|
||||
vec_first = p_first_end - p_first
|
||||
vec_second = p_second_end - p_second
|
||||
vec_third = p_third_end - p_third
|
||||
@@ -1116,7 +1223,7 @@ class pyBluePrintSquaresShader(StrokeShader):
|
||||
stroke.remove_vertex(sv)
|
||||
stroke.update_length()
|
||||
|
||||
|
||||
# needs a docstring
|
||||
class pyBluePrintDirectedSquaresShader(StrokeShader):
|
||||
def __init__(self, turns = 1, bb_len = 10, mult = 1):
|
||||
StrokeShader.__init__(self)
|
||||
@@ -1125,7 +1232,7 @@ class pyBluePrintDirectedSquaresShader(StrokeShader):
|
||||
self.__bb_len = 1 + float(bb_len) / 100
|
||||
def shade(self, stroke):
|
||||
stroke.resample(32 * self.__turns)
|
||||
p_mean = mathutils.Vector([0, 0])
|
||||
p_mean = Vector((0, 0))
|
||||
it = stroke.stroke_vertices_begin()
|
||||
while not it.is_end:
|
||||
p = it.object.point
|
||||
@@ -1139,8 +1246,8 @@ class pyBluePrintDirectedSquaresShader(StrokeShader):
|
||||
it = stroke.stroke_vertices_begin()
|
||||
while not it.is_end:
|
||||
p = it.object.point
|
||||
p_var_xx = p_var_xx + math.pow(p.x - p_mean.x, 2)
|
||||
p_var_yy = p_var_yy + math.pow(p.y - p_mean.y, 2)
|
||||
p_var_xx = p_var_xx + pow(p.x - p_mean.x, 2)
|
||||
p_var_yy = p_var_yy + pow(p.y - p_mean.y, 2)
|
||||
p_var_xy = p_var_xy + (p.x - p_mean.x) * (p.y - p_mean.y)
|
||||
it.increment()
|
||||
p_var_xx = p_var_xx / sv_nb
|
||||
@@ -1149,18 +1256,18 @@ class pyBluePrintDirectedSquaresShader(StrokeShader):
|
||||
## print(p_var_xx, p_var_yy, p_var_xy)
|
||||
trace = p_var_xx + p_var_yy
|
||||
det = p_var_xx * p_var_yy - p_var_xy * p_var_xy
|
||||
sqrt_coeff = math.sqrt(trace * trace - 4 * det)
|
||||
sqrt_coeff = sqrt(trace * trace - 4 * det)
|
||||
lambda1 = (trace + sqrt_coeff) / 2
|
||||
lambda2 = (trace - sqrt_coeff) / 2
|
||||
## print(lambda1, lambda2)
|
||||
theta = math.atan(2 * p_var_xy / (p_var_xx - p_var_yy)) / 2
|
||||
theta = atan(2 * p_var_xy / (p_var_xx - p_var_yy)) / 2
|
||||
## print(theta)
|
||||
if p_var_yy > p_var_xx:
|
||||
e1 = mathutils.Vector([math.cos(theta + math.pi / 2), math.sin(theta + math.pi / 2)]) * math.sqrt(lambda1) * self.__mult
|
||||
e2 = mathutils.Vector([math.cos(theta + math.pi), math.sin(theta + math.pi)]) * math.sqrt(lambda2) * self.__mult
|
||||
e1 = Vector((cos(theta + pi / 2), sin(theta + pi / 2))) * sqrt(lambda1) * self.__mult
|
||||
e2 = Vector((cos(theta + pi), sin(theta + pi))) * sqrt(lambda2) * self.__mult
|
||||
else:
|
||||
e1 = mathutils.Vector([math.cos(theta), math.sin(theta)]) * math.sqrt(lambda1) * self.__mult
|
||||
e2 = mathutils.Vector([math.cos(theta + math.pi / 2), math.sin(theta + math.pi / 2)]) * math.sqrt(lambda2) * self.__mult
|
||||
e1 = Vector((cos(theta), sin(theta))) * sqrt(lambda1) * self.__mult
|
||||
e2 = Vector((cos(theta + pi / 2), sin(theta + pi / 2))) * sqrt(lambda2) * self.__mult
|
||||
#######################################################
|
||||
sv_nb = sv_nb // self.__turns
|
||||
first = sv_nb // 4
|
||||
@@ -1168,7 +1275,7 @@ class pyBluePrintDirectedSquaresShader(StrokeShader):
|
||||
third = 3 * first
|
||||
fourth = sv_nb
|
||||
bb_len1 = self.__bb_len
|
||||
bb_len2 = 1 + (bb_len1 - 1) * math.sqrt(lambda1 / lambda2)
|
||||
bb_len2 = 1 + (bb_len1 - 1) * sqrt(lambda1 / lambda2)
|
||||
p_first = p_mean - e1 - e2 * bb_len2
|
||||
p_second = p_mean - e1 * bb_len1 + e2
|
||||
p_third = p_mean + e1 + e2 * bb_len2
|
||||
@@ -1214,6 +1321,9 @@ class pyBluePrintDirectedSquaresShader(StrokeShader):
|
||||
stroke.update_length()
|
||||
|
||||
class pyModulateAlphaShader(StrokeShader):
|
||||
"""
|
||||
Limits the stroke's alpha between a min and max value
|
||||
"""
|
||||
def __init__(self, min = 0, max = 1):
|
||||
StrokeShader.__init__(self)
|
||||
self.__min = min
|
||||
|
@@ -16,6 +16,10 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""
|
||||
Submodule containing all Freestyle types
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import (
|
||||
AdjacencyIterator,
|
||||
|
@@ -16,6 +16,10 @@
|
||||
#
|
||||
# ##### END GPL LICENSE BLOCK #####
|
||||
|
||||
"""
|
||||
Helper functions used for Freestyle style module writing
|
||||
"""
|
||||
|
||||
# module members
|
||||
from _freestyle import (
|
||||
ContextFunctions,
|
||||
|
Reference in New Issue
Block a user