2017-08-01 12:30:58 +00:00
/*
Bullet Continuous Collision Detection and Physics Library
Copyright ( c ) 2003 - 2006 Erwin Coumans http : //continuousphysics.com/Bullet/
This software is provided ' as - is ' , without any express or implied warranty .
In no event will the authors be held liable for any damages arising from the use of this software .
Permission is granted to anyone to use this software for any purpose ,
including commercial applications , and to alter it and redistribute it freely ,
subject to the following restrictions :
1. The origin of this software must not be misrepresented ; you must not claim that you wrote the original software . If you use this software in a product , an acknowledgment in the product documentation would be appreciated but is not required .
2. Altered source versions must be plainly marked as such , and must not be misrepresented as being the original software .
3. This notice may not be removed or altered from any source distribution .
*/
# include "btQuantizedBvh.h"
# include "LinearMath/btAabbUtil2.h"
# include "LinearMath/btIDebugDraw.h"
# include "LinearMath/btSerializer.h"
# define RAYAABB2
2019-01-03 13:26:51 +00:00
btQuantizedBvh : : btQuantizedBvh ( ) : m_bulletVersion ( BT_BULLET_VERSION ) ,
m_useQuantization ( false ) ,
//m_traversalMode(TRAVERSAL_STACKLESS_CACHE_FRIENDLY)
m_traversalMode ( TRAVERSAL_STACKLESS )
//m_traversalMode(TRAVERSAL_RECURSIVE)
,
m_subtreeHeaderCount ( 0 ) //PCK: add this line
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
m_bvhAabbMin . setValue ( - SIMD_INFINITY , - SIMD_INFINITY , - SIMD_INFINITY ) ;
m_bvhAabbMax . setValue ( SIMD_INFINITY , SIMD_INFINITY , SIMD_INFINITY ) ;
2017-08-01 12:30:58 +00:00
}
void btQuantizedBvh : : buildInternal ( )
{
///assumes that caller filled in the m_quantizedLeafNodes
m_useQuantization = true ;
int numLeafNodes = 0 ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
if ( m_useQuantization )
{
//now we have an array of leafnodes in m_leafNodes
numLeafNodes = m_quantizedLeafNodes . size ( ) ;
2019-01-03 13:26:51 +00:00
m_quantizedContiguousNodes . resize ( 2 * numLeafNodes ) ;
2017-08-01 12:30:58 +00:00
}
m_curNodeIndex = 0 ;
2019-01-03 13:26:51 +00:00
buildTree ( 0 , numLeafNodes ) ;
2017-08-01 12:30:58 +00:00
///if the entire tree is small then subtree size, we need to create a header info for the tree
2019-01-03 13:26:51 +00:00
if ( m_useQuantization & & ! m_SubtreeHeaders . size ( ) )
2017-08-01 12:30:58 +00:00
{
btBvhSubtreeInfo & subtree = m_SubtreeHeaders . expand ( ) ;
subtree . setAabbFromQuantizeNode ( m_quantizedContiguousNodes [ 0 ] ) ;
subtree . m_rootNodeIndex = 0 ;
subtree . m_subtreeSize = m_quantizedContiguousNodes [ 0 ] . isLeafNode ( ) ? 1 : m_quantizedContiguousNodes [ 0 ] . getEscapeIndex ( ) ;
}
//PCK: update the copy of the size
m_subtreeHeaderCount = m_SubtreeHeaders . size ( ) ;
//PCK: clear m_quantizedLeafNodes and m_leafNodes, they are temporary
m_quantizedLeafNodes . clear ( ) ;
m_leafNodes . clear ( ) ;
}
///just for debugging, to visualize the individual patches/subtrees
# ifdef DEBUG_PATCH_COLORS
2019-01-03 13:26:51 +00:00
btVector3 color [ 4 ] =
{
btVector3 ( 1 , 0 , 0 ) ,
btVector3 ( 0 , 1 , 0 ) ,
btVector3 ( 0 , 0 , 1 ) ,
btVector3 ( 0 , 1 , 1 ) } ;
# endif //DEBUG_PATCH_COLORS
2017-08-01 12:30:58 +00:00
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : setQuantizationValues ( const btVector3 & bvhAabbMin , const btVector3 & bvhAabbMax , btScalar quantizationMargin )
2017-08-01 12:30:58 +00:00
{
//enlarge the AABB to avoid division by zero when initializing the quantization values
2019-01-03 13:26:51 +00:00
btVector3 clampValue ( quantizationMargin , quantizationMargin , quantizationMargin ) ;
2017-08-01 12:30:58 +00:00
m_bvhAabbMin = bvhAabbMin - clampValue ;
m_bvhAabbMax = bvhAabbMax + clampValue ;
btVector3 aabbSize = m_bvhAabbMax - m_bvhAabbMin ;
2019-01-03 13:26:51 +00:00
m_bvhQuantization = btVector3 ( btScalar ( 65533.0 ) , btScalar ( 65533.0 ) , btScalar ( 65533.0 ) ) / aabbSize ;
2017-08-01 12:30:58 +00:00
m_useQuantization = true ;
{
unsigned short vecIn [ 3 ] ;
btVector3 v ;
{
2019-01-03 13:26:51 +00:00
quantize ( vecIn , m_bvhAabbMin , false ) ;
2017-08-01 12:30:58 +00:00
v = unQuantize ( vecIn ) ;
2019-01-03 13:26:51 +00:00
m_bvhAabbMin . setMin ( v - clampValue ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
aabbSize = m_bvhAabbMax - m_bvhAabbMin ;
m_bvhQuantization = btVector3 ( btScalar ( 65533.0 ) , btScalar ( 65533.0 ) , btScalar ( 65533.0 ) ) / aabbSize ;
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
quantize ( vecIn , m_bvhAabbMax , true ) ;
2017-08-01 12:30:58 +00:00
v = unQuantize ( vecIn ) ;
2019-01-03 13:26:51 +00:00
m_bvhAabbMax . setMax ( v + clampValue ) ;
2017-08-01 12:30:58 +00:00
}
aabbSize = m_bvhAabbMax - m_bvhAabbMin ;
2019-01-03 13:26:51 +00:00
m_bvhQuantization = btVector3 ( btScalar ( 65533.0 ) , btScalar ( 65533.0 ) , btScalar ( 65533.0 ) ) / aabbSize ;
2017-08-01 12:30:58 +00:00
}
}
btQuantizedBvh : : ~ btQuantizedBvh ( )
{
}
# ifdef DEBUG_TREE_BUILDING
int gStackDepth = 0 ;
int gMaxStackDepth = 0 ;
2019-01-03 13:26:51 +00:00
# endif //DEBUG_TREE_BUILDING
2017-08-01 12:30:58 +00:00
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : buildTree ( int startIndex , int endIndex )
2017-08-01 12:30:58 +00:00
{
# ifdef DEBUG_TREE_BUILDING
gStackDepth + + ;
if ( gStackDepth > gMaxStackDepth )
gMaxStackDepth = gStackDepth ;
2019-01-03 13:26:51 +00:00
# endif //DEBUG_TREE_BUILDING
2017-08-01 12:30:58 +00:00
int splitAxis , splitIndex , i ;
2019-01-03 13:26:51 +00:00
int numIndices = endIndex - startIndex ;
2017-08-01 12:30:58 +00:00
int curIndex = m_curNodeIndex ;
2019-01-03 13:26:51 +00:00
btAssert ( numIndices > 0 ) ;
2017-08-01 12:30:58 +00:00
2019-01-03 13:26:51 +00:00
if ( numIndices = = 1 )
2017-08-01 12:30:58 +00:00
{
# ifdef DEBUG_TREE_BUILDING
gStackDepth - - ;
2019-01-03 13:26:51 +00:00
# endif //DEBUG_TREE_BUILDING
assignInternalNodeFromLeafNode ( m_curNodeIndex , startIndex ) ;
2017-08-01 12:30:58 +00:00
m_curNodeIndex + + ;
2019-01-03 13:26:51 +00:00
return ;
2017-08-01 12:30:58 +00:00
}
//calculate Best Splitting Axis and where to split it. Sort the incoming 'leafNodes' array within range 'startIndex/endIndex'.
2019-01-03 13:26:51 +00:00
splitAxis = calcSplittingAxis ( startIndex , endIndex ) ;
splitIndex = sortAndCalcSplittingIndex ( startIndex , endIndex , splitAxis ) ;
2017-08-01 12:30:58 +00:00
int internalNodeIndex = m_curNodeIndex ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
//set the min aabb to 'inf' or a max value, and set the max aabb to a -inf/minimum value.
//the aabb will be expanded during buildTree/mergeInternalNodeAabb with actual node values
2019-01-03 13:26:51 +00:00
setInternalNodeAabbMin ( m_curNodeIndex , m_bvhAabbMax ) ; //can't use btVector3(SIMD_INFINITY,SIMD_INFINITY,SIMD_INFINITY)) because of quantization
setInternalNodeAabbMax ( m_curNodeIndex , m_bvhAabbMin ) ; //can't use btVector3(-SIMD_INFINITY,-SIMD_INFINITY,-SIMD_INFINITY)) because of quantization
for ( i = startIndex ; i < endIndex ; i + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
mergeInternalNodeAabb ( m_curNodeIndex , getAabbMin ( i ) , getAabbMax ( i ) ) ;
2017-08-01 12:30:58 +00:00
}
m_curNodeIndex + + ;
//internalNode->m_escapeIndex;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
int leftChildNodexIndex = m_curNodeIndex ;
//build left child tree
2019-01-03 13:26:51 +00:00
buildTree ( startIndex , splitIndex ) ;
2017-08-01 12:30:58 +00:00
int rightChildNodexIndex = m_curNodeIndex ;
//build right child tree
2019-01-03 13:26:51 +00:00
buildTree ( splitIndex , endIndex ) ;
2017-08-01 12:30:58 +00:00
# ifdef DEBUG_TREE_BUILDING
gStackDepth - - ;
2019-01-03 13:26:51 +00:00
# endif //DEBUG_TREE_BUILDING
2017-08-01 12:30:58 +00:00
int escapeIndex = m_curNodeIndex - curIndex ;
if ( m_useQuantization )
{
//escapeIndex is the number of nodes of this subtree
2019-01-03 13:26:51 +00:00
const int sizeQuantizedNode = sizeof ( btQuantizedBvhNode ) ;
2017-08-01 12:30:58 +00:00
const int treeSizeInBytes = escapeIndex * sizeQuantizedNode ;
if ( treeSizeInBytes > MAX_SUBTREE_SIZE_IN_BYTES )
{
2019-01-03 13:26:51 +00:00
updateSubtreeHeaders ( leftChildNodexIndex , rightChildNodexIndex ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
}
2019-01-03 13:26:51 +00:00
setInternalNodeEscapeIndex ( internalNodeIndex , escapeIndex ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : updateSubtreeHeaders ( int leftChildNodexIndex , int rightChildNodexIndex )
2017-08-01 12:30:58 +00:00
{
btAssert ( m_useQuantization ) ;
btQuantizedBvhNode & leftChildNode = m_quantizedContiguousNodes [ leftChildNodexIndex ] ;
int leftSubTreeSize = leftChildNode . isLeafNode ( ) ? 1 : leftChildNode . getEscapeIndex ( ) ;
2019-01-03 13:26:51 +00:00
int leftSubTreeSizeInBytes = leftSubTreeSize * static_cast < int > ( sizeof ( btQuantizedBvhNode ) ) ;
2017-08-01 12:30:58 +00:00
btQuantizedBvhNode & rightChildNode = m_quantizedContiguousNodes [ rightChildNodexIndex ] ;
int rightSubTreeSize = rightChildNode . isLeafNode ( ) ? 1 : rightChildNode . getEscapeIndex ( ) ;
2019-01-03 13:26:51 +00:00
int rightSubTreeSizeInBytes = rightSubTreeSize * static_cast < int > ( sizeof ( btQuantizedBvhNode ) ) ;
2017-08-01 12:30:58 +00:00
2019-01-03 13:26:51 +00:00
if ( leftSubTreeSizeInBytes < = MAX_SUBTREE_SIZE_IN_BYTES )
2017-08-01 12:30:58 +00:00
{
btBvhSubtreeInfo & subtree = m_SubtreeHeaders . expand ( ) ;
subtree . setAabbFromQuantizeNode ( leftChildNode ) ;
subtree . m_rootNodeIndex = leftChildNodexIndex ;
subtree . m_subtreeSize = leftSubTreeSize ;
}
2019-01-03 13:26:51 +00:00
if ( rightSubTreeSizeInBytes < = MAX_SUBTREE_SIZE_IN_BYTES )
2017-08-01 12:30:58 +00:00
{
btBvhSubtreeInfo & subtree = m_SubtreeHeaders . expand ( ) ;
subtree . setAabbFromQuantizeNode ( rightChildNode ) ;
subtree . m_rootNodeIndex = rightChildNodexIndex ;
subtree . m_subtreeSize = rightSubTreeSize ;
}
//PCK: update the copy of the size
m_subtreeHeaderCount = m_SubtreeHeaders . size ( ) ;
}
2019-01-03 13:26:51 +00:00
int btQuantizedBvh : : sortAndCalcSplittingIndex ( int startIndex , int endIndex , int splitAxis )
2017-08-01 12:30:58 +00:00
{
int i ;
2019-01-03 13:26:51 +00:00
int splitIndex = startIndex ;
2017-08-01 12:30:58 +00:00
int numIndices = endIndex - startIndex ;
btScalar splitValue ;
2019-01-03 13:26:51 +00:00
btVector3 means ( btScalar ( 0. ) , btScalar ( 0. ) , btScalar ( 0. ) ) ;
for ( i = startIndex ; i < endIndex ; i + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btVector3 center = btScalar ( 0.5 ) * ( getAabbMax ( i ) + getAabbMin ( i ) ) ;
means + = center ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
means * = ( btScalar ( 1. ) / ( btScalar ) numIndices ) ;
2017-08-01 12:30:58 +00:00
splitValue = means [ splitAxis ] ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
//sort leafNodes so all values larger then splitValue comes first, and smaller values start from 'splitIndex'.
2019-01-03 13:26:51 +00:00
for ( i = startIndex ; i < endIndex ; i + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btVector3 center = btScalar ( 0.5 ) * ( getAabbMax ( i ) + getAabbMin ( i ) ) ;
2017-08-01 12:30:58 +00:00
if ( center [ splitAxis ] > splitValue )
{
//swap
2019-01-03 13:26:51 +00:00
swapLeafNodes ( i , splitIndex ) ;
2017-08-01 12:30:58 +00:00
splitIndex + + ;
}
}
//if the splitIndex causes unbalanced trees, fix this by using the center in between startIndex and endIndex
//otherwise the tree-building might fail due to stack-overflows in certain cases.
//unbalanced1 is unsafe: it can cause stack overflows
//bool unbalanced1 = ((splitIndex==startIndex) || (splitIndex == (endIndex-1)));
2019-01-03 13:26:51 +00:00
//unbalanced2 should work too: always use center (perfect balanced trees)
2017-08-01 12:30:58 +00:00
//bool unbalanced2 = true;
//this should be safe too:
2019-01-03 13:26:51 +00:00
int rangeBalancedIndices = numIndices / 3 ;
bool unbalanced = ( ( splitIndex < = ( startIndex + rangeBalancedIndices ) ) | | ( splitIndex > = ( endIndex - 1 - rangeBalancedIndices ) ) ) ;
2017-08-01 12:30:58 +00:00
if ( unbalanced )
{
2019-01-03 13:26:51 +00:00
splitIndex = startIndex + ( numIndices > > 1 ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
bool unbal = ( splitIndex = = startIndex ) | | ( splitIndex = = ( endIndex ) ) ;
2017-08-01 12:30:58 +00:00
( void ) unbal ;
btAssert ( ! unbal ) ;
return splitIndex ;
}
2019-01-03 13:26:51 +00:00
int btQuantizedBvh : : calcSplittingAxis ( int startIndex , int endIndex )
2017-08-01 12:30:58 +00:00
{
int i ;
2019-01-03 13:26:51 +00:00
btVector3 means ( btScalar ( 0. ) , btScalar ( 0. ) , btScalar ( 0. ) ) ;
btVector3 variance ( btScalar ( 0. ) , btScalar ( 0. ) , btScalar ( 0. ) ) ;
int numIndices = endIndex - startIndex ;
2017-08-01 12:30:58 +00:00
2019-01-03 13:26:51 +00:00
for ( i = startIndex ; i < endIndex ; i + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btVector3 center = btScalar ( 0.5 ) * ( getAabbMax ( i ) + getAabbMin ( i ) ) ;
means + = center ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
means * = ( btScalar ( 1. ) / ( btScalar ) numIndices ) ;
for ( i = startIndex ; i < endIndex ; i + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btVector3 center = btScalar ( 0.5 ) * ( getAabbMax ( i ) + getAabbMin ( i ) ) ;
btVector3 diff2 = center - means ;
2017-08-01 12:30:58 +00:00
diff2 = diff2 * diff2 ;
variance + = diff2 ;
}
2019-01-03 13:26:51 +00:00
variance * = ( btScalar ( 1. ) / ( ( btScalar ) numIndices - 1 ) ) ;
2017-08-01 12:30:58 +00:00
return variance . maxAxis ( ) ;
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : reportAabbOverlappingNodex ( btNodeOverlapCallback * nodeCallback , const btVector3 & aabbMin , const btVector3 & aabbMax ) const
2017-08-01 12:30:58 +00:00
{
//either choose recursive traversal (walkTree) or stackless (walkStacklessTree)
if ( m_useQuantization )
{
///quantize query AABB
unsigned short int quantizedQueryAabbMin [ 3 ] ;
unsigned short int quantizedQueryAabbMax [ 3 ] ;
2019-01-03 13:26:51 +00:00
quantizeWithClamp ( quantizedQueryAabbMin , aabbMin , 0 ) ;
quantizeWithClamp ( quantizedQueryAabbMax , aabbMax , 1 ) ;
2017-08-01 12:30:58 +00:00
switch ( m_traversalMode )
{
2019-01-03 13:26:51 +00:00
case TRAVERSAL_STACKLESS :
walkStacklessQuantizedTree ( nodeCallback , quantizedQueryAabbMin , quantizedQueryAabbMax , 0 , m_curNodeIndex ) ;
break ;
case TRAVERSAL_STACKLESS_CACHE_FRIENDLY :
walkStacklessQuantizedTreeCacheFriendly ( nodeCallback , quantizedQueryAabbMin , quantizedQueryAabbMax ) ;
break ;
case TRAVERSAL_RECURSIVE :
2017-08-01 12:30:58 +00:00
{
const btQuantizedBvhNode * rootNode = & m_quantizedContiguousNodes [ 0 ] ;
2019-01-03 13:26:51 +00:00
walkRecursiveQuantizedTreeAgainstQueryAabb ( rootNode , nodeCallback , quantizedQueryAabbMin , quantizedQueryAabbMax ) ;
2017-08-01 12:30:58 +00:00
}
break ;
2019-01-03 13:26:51 +00:00
default :
//unsupported
btAssert ( 0 ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
walkStacklessTree ( nodeCallback , aabbMin , aabbMax ) ;
2017-08-01 12:30:58 +00:00
}
}
int maxIterations = 0 ;
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : walkStacklessTree ( btNodeOverlapCallback * nodeCallback , const btVector3 & aabbMin , const btVector3 & aabbMax ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( ! m_useQuantization ) ;
const btOptimizedBvhNode * rootNode = & m_contiguousNodes [ 0 ] ;
int escapeIndex , curIndex = 0 ;
int walkIterations = 0 ;
bool isLeafNode ;
//PCK: unsigned instead of bool
unsigned aabbOverlap ;
while ( curIndex < m_curNodeIndex )
{
//catch bugs in tree data
2019-01-03 13:26:51 +00:00
btAssert ( walkIterations < m_curNodeIndex ) ;
2017-08-01 12:30:58 +00:00
walkIterations + + ;
2019-01-03 13:26:51 +00:00
aabbOverlap = TestAabbAgainstAabb2 ( aabbMin , aabbMax , rootNode - > m_aabbMinOrg , rootNode - > m_aabbMaxOrg ) ;
2017-08-01 12:30:58 +00:00
isLeafNode = rootNode - > m_escapeIndex = = - 1 ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( isLeafNode & & ( aabbOverlap ! = 0 ) )
{
2019-01-03 13:26:51 +00:00
nodeCallback - > processNode ( rootNode - > m_subPart , rootNode - > m_triangleIndex ) ;
}
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( ( aabbOverlap ! = 0 ) | | isLeafNode )
{
rootNode + + ;
curIndex + + ;
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
escapeIndex = rootNode - > m_escapeIndex ;
rootNode + = escapeIndex ;
curIndex + = escapeIndex ;
}
}
if ( maxIterations < walkIterations )
maxIterations = walkIterations ;
}
/*
///this was the original recursive traversal, before we optimized towards stackless traversal
void btQuantizedBvh : : walkTree ( btOptimizedBvhNode * rootNode , btNodeOverlapCallback * nodeCallback , const btVector3 & aabbMin , const btVector3 & aabbMax ) const
{
bool isLeafNode , aabbOverlap = TestAabbAgainstAabb2 ( aabbMin , aabbMax , rootNode - > m_aabbMin , rootNode - > m_aabbMax ) ;
if ( aabbOverlap )
{
isLeafNode = ( ! rootNode - > m_leftChild & & ! rootNode - > m_rightChild ) ;
if ( isLeafNode )
{
nodeCallback - > processNode ( rootNode ) ;
} else
{
walkTree ( rootNode - > m_leftChild , nodeCallback , aabbMin , aabbMax ) ;
walkTree ( rootNode - > m_rightChild , nodeCallback , aabbMin , aabbMax ) ;
}
}
}
*/
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : walkRecursiveQuantizedTreeAgainstQueryAabb ( const btQuantizedBvhNode * currentNode , btNodeOverlapCallback * nodeCallback , unsigned short int * quantizedQueryAabbMin , unsigned short int * quantizedQueryAabbMax ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( m_useQuantization ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
bool isLeafNode ;
//PCK: unsigned instead of bool
unsigned aabbOverlap ;
//PCK: unsigned instead of bool
2019-01-03 13:26:51 +00:00
aabbOverlap = testQuantizedAabbAgainstQuantizedAabb ( quantizedQueryAabbMin , quantizedQueryAabbMax , currentNode - > m_quantizedAabbMin , currentNode - > m_quantizedAabbMax ) ;
2017-08-01 12:30:58 +00:00
isLeafNode = currentNode - > isLeafNode ( ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( aabbOverlap ! = 0 )
{
if ( isLeafNode )
{
2019-01-03 13:26:51 +00:00
nodeCallback - > processNode ( currentNode - > getPartId ( ) , currentNode - > getTriangleIndex ( ) ) ;
}
else
2017-08-01 12:30:58 +00:00
{
//process left and right children
2019-01-03 13:26:51 +00:00
const btQuantizedBvhNode * leftChildNode = currentNode + 1 ;
walkRecursiveQuantizedTreeAgainstQueryAabb ( leftChildNode , nodeCallback , quantizedQueryAabbMin , quantizedQueryAabbMax ) ;
2017-08-01 12:30:58 +00:00
2019-01-03 13:26:51 +00:00
const btQuantizedBvhNode * rightChildNode = leftChildNode - > isLeafNode ( ) ? leftChildNode + 1 : leftChildNode + leftChildNode - > getEscapeIndex ( ) ;
walkRecursiveQuantizedTreeAgainstQueryAabb ( rightChildNode , nodeCallback , quantizedQueryAabbMin , quantizedQueryAabbMax ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
}
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : walkStacklessTreeAgainstRay ( btNodeOverlapCallback * nodeCallback , const btVector3 & raySource , const btVector3 & rayTarget , const btVector3 & aabbMin , const btVector3 & aabbMax , int startNodeIndex , int endNodeIndex ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( ! m_useQuantization ) ;
const btOptimizedBvhNode * rootNode = & m_contiguousNodes [ 0 ] ;
int escapeIndex , curIndex = 0 ;
int walkIterations = 0 ;
bool isLeafNode ;
//PCK: unsigned instead of bool
2019-01-03 13:26:51 +00:00
unsigned aabbOverlap = 0 ;
unsigned rayBoxOverlap = 0 ;
2017-08-01 12:30:58 +00:00
btScalar lambda_max = 1.0 ;
2019-01-03 13:26:51 +00:00
/* Quick pruning by quantized box */
2017-08-01 12:30:58 +00:00
btVector3 rayAabbMin = raySource ;
btVector3 rayAabbMax = raySource ;
rayAabbMin . setMin ( rayTarget ) ;
rayAabbMax . setMax ( rayTarget ) ;
/* Add box cast extents to bounding box */
rayAabbMin + = aabbMin ;
rayAabbMax + = aabbMax ;
# ifdef RAYAABB2
2019-01-03 13:26:51 +00:00
btVector3 rayDir = ( rayTarget - raySource ) ;
2020-04-27 08:15:23 +00:00
rayDir . safeNormalize ( ) ; // stephengold changed normalize to safeNormalize 2020-02-17
2019-01-03 13:26:51 +00:00
lambda_max = rayDir . dot ( rayTarget - raySource ) ;
2017-08-01 12:30:58 +00:00
///what about division by zero? --> just set rayDirection[i] to 1.0
btVector3 rayDirectionInverse ;
rayDirectionInverse [ 0 ] = rayDir [ 0 ] = = btScalar ( 0.0 ) ? btScalar ( BT_LARGE_FLOAT ) : btScalar ( 1.0 ) / rayDir [ 0 ] ;
rayDirectionInverse [ 1 ] = rayDir [ 1 ] = = btScalar ( 0.0 ) ? btScalar ( BT_LARGE_FLOAT ) : btScalar ( 1.0 ) / rayDir [ 1 ] ;
rayDirectionInverse [ 2 ] = rayDir [ 2 ] = = btScalar ( 0.0 ) ? btScalar ( BT_LARGE_FLOAT ) : btScalar ( 1.0 ) / rayDir [ 2 ] ;
2019-01-03 13:26:51 +00:00
unsigned int sign [ 3 ] = { rayDirectionInverse [ 0 ] < 0.0 , rayDirectionInverse [ 1 ] < 0.0 , rayDirectionInverse [ 2 ] < 0.0 } ;
2017-08-01 12:30:58 +00:00
# endif
btVector3 bounds [ 2 ] ;
while ( curIndex < m_curNodeIndex )
{
btScalar param = 1.0 ;
//catch bugs in tree data
2019-01-03 13:26:51 +00:00
btAssert ( walkIterations < m_curNodeIndex ) ;
2017-08-01 12:30:58 +00:00
walkIterations + + ;
bounds [ 0 ] = rootNode - > m_aabbMinOrg ;
bounds [ 1 ] = rootNode - > m_aabbMaxOrg ;
/* Add box cast extents */
bounds [ 0 ] - = aabbMax ;
bounds [ 1 ] - = aabbMin ;
2019-01-03 13:26:51 +00:00
aabbOverlap = TestAabbAgainstAabb2 ( rayAabbMin , rayAabbMax , rootNode - > m_aabbMinOrg , rootNode - > m_aabbMaxOrg ) ;
2017-08-01 12:30:58 +00:00
//perhaps profile if it is worth doing the aabbOverlap test first
# ifdef RAYAABB2
2019-01-03 13:26:51 +00:00
///careful with this check: need to check division by zero (above) and fix the unQuantize method
///thanks Joerg/hiker for the reproduction case!
///http://www.bulletphysics.com/Bullet/phpBB3/viewtopic.php?f=9&t=1858
rayBoxOverlap = aabbOverlap ? btRayAabb2 ( raySource , rayDirectionInverse , sign , bounds , param , 0.0f , lambda_max ) : false ;
2017-08-01 12:30:58 +00:00
# else
btVector3 normal ;
2019-01-03 13:26:51 +00:00
rayBoxOverlap = btRayAabb ( raySource , rayTarget , bounds [ 0 ] , bounds [ 1 ] , param , normal ) ;
2017-08-01 12:30:58 +00:00
# endif
isLeafNode = rootNode - > m_escapeIndex = = - 1 ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( isLeafNode & & ( rayBoxOverlap ! = 0 ) )
{
2019-01-03 13:26:51 +00:00
nodeCallback - > processNode ( rootNode - > m_subPart , rootNode - > m_triangleIndex ) ;
}
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( ( rayBoxOverlap ! = 0 ) | | isLeafNode )
{
rootNode + + ;
curIndex + + ;
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
escapeIndex = rootNode - > m_escapeIndex ;
rootNode + = escapeIndex ;
curIndex + = escapeIndex ;
}
}
if ( maxIterations < walkIterations )
maxIterations = walkIterations ;
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : walkStacklessQuantizedTreeAgainstRay ( btNodeOverlapCallback * nodeCallback , const btVector3 & raySource , const btVector3 & rayTarget , const btVector3 & aabbMin , const btVector3 & aabbMax , int startNodeIndex , int endNodeIndex ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( m_useQuantization ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
int curIndex = startNodeIndex ;
int walkIterations = 0 ;
int subTreeSize = endNodeIndex - startNodeIndex ;
( void ) subTreeSize ;
const btQuantizedBvhNode * rootNode = & m_quantizedContiguousNodes [ startNodeIndex ] ;
int escapeIndex ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
bool isLeafNode ;
//PCK: unsigned instead of bool
unsigned boxBoxOverlap = 0 ;
unsigned rayBoxOverlap = 0 ;
btScalar lambda_max = 1.0 ;
# ifdef RAYAABB2
2019-01-03 13:26:51 +00:00
btVector3 rayDirection = ( rayTarget - raySource ) ;
2020-04-27 08:15:23 +00:00
rayDirection . safeNormalize ( ) ; // stephengold changed normalize to safeNormalize 2020-02-17
2019-01-03 13:26:51 +00:00
lambda_max = rayDirection . dot ( rayTarget - raySource ) ;
2017-08-01 12:30:58 +00:00
///what about division by zero? --> just set rayDirection[i] to 1.0
rayDirection [ 0 ] = rayDirection [ 0 ] = = btScalar ( 0.0 ) ? btScalar ( BT_LARGE_FLOAT ) : btScalar ( 1.0 ) / rayDirection [ 0 ] ;
rayDirection [ 1 ] = rayDirection [ 1 ] = = btScalar ( 0.0 ) ? btScalar ( BT_LARGE_FLOAT ) : btScalar ( 1.0 ) / rayDirection [ 1 ] ;
rayDirection [ 2 ] = rayDirection [ 2 ] = = btScalar ( 0.0 ) ? btScalar ( BT_LARGE_FLOAT ) : btScalar ( 1.0 ) / rayDirection [ 2 ] ;
2019-01-03 13:26:51 +00:00
unsigned int sign [ 3 ] = { rayDirection [ 0 ] < 0.0 , rayDirection [ 1 ] < 0.0 , rayDirection [ 2 ] < 0.0 } ;
2017-08-01 12:30:58 +00:00
# endif
/* Quick pruning by quantized box */
btVector3 rayAabbMin = raySource ;
btVector3 rayAabbMax = raySource ;
rayAabbMin . setMin ( rayTarget ) ;
rayAabbMax . setMax ( rayTarget ) ;
/* Add box cast extents to bounding box */
rayAabbMin + = aabbMin ;
rayAabbMax + = aabbMax ;
unsigned short int quantizedQueryAabbMin [ 3 ] ;
unsigned short int quantizedQueryAabbMax [ 3 ] ;
2019-01-03 13:26:51 +00:00
quantizeWithClamp ( quantizedQueryAabbMin , rayAabbMin , 0 ) ;
quantizeWithClamp ( quantizedQueryAabbMax , rayAabbMax , 1 ) ;
2017-08-01 12:30:58 +00:00
while ( curIndex < endNodeIndex )
{
//#define VISUALLY_ANALYZE_BVH 1
# ifdef VISUALLY_ANALYZE_BVH
//some code snippet to debugDraw aabb, to visually analyze bvh structure
static int drawPatch = 0 ;
//need some global access to a debugDrawer
extern btIDebugDraw * debugDrawerPtr ;
2019-01-03 13:26:51 +00:00
if ( curIndex = = drawPatch )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btVector3 aabbMin , aabbMax ;
2017-08-01 12:30:58 +00:00
aabbMin = unQuantize ( rootNode - > m_quantizedAabbMin ) ;
aabbMax = unQuantize ( rootNode - > m_quantizedAabbMax ) ;
2019-01-03 13:26:51 +00:00
btVector3 color ( 1 , 0 , 0 ) ;
debugDrawerPtr - > drawAabb ( aabbMin , aabbMax , color ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
# endif //VISUALLY_ANALYZE_BVH
2017-08-01 12:30:58 +00:00
//catch bugs in tree data
2019-01-03 13:26:51 +00:00
btAssert ( walkIterations < subTreeSize ) ;
2017-08-01 12:30:58 +00:00
walkIterations + + ;
//PCK: unsigned instead of bool
// only interested if this is closer than any previous hit
btScalar param = 1.0 ;
rayBoxOverlap = 0 ;
2019-01-03 13:26:51 +00:00
boxBoxOverlap = testQuantizedAabbAgainstQuantizedAabb ( quantizedQueryAabbMin , quantizedQueryAabbMax , rootNode - > m_quantizedAabbMin , rootNode - > m_quantizedAabbMax ) ;
2017-08-01 12:30:58 +00:00
isLeafNode = rootNode - > isLeafNode ( ) ;
if ( boxBoxOverlap )
{
btVector3 bounds [ 2 ] ;
bounds [ 0 ] = unQuantize ( rootNode - > m_quantizedAabbMin ) ;
bounds [ 1 ] = unQuantize ( rootNode - > m_quantizedAabbMax ) ;
/* Add box cast extents */
bounds [ 0 ] - = aabbMax ;
bounds [ 1 ] - = aabbMin ;
btVector3 normal ;
#if 0
bool ra2 = btRayAabb2 ( raySource , rayDirection , sign , bounds , param , 0.0 , lambda_max ) ;
bool ra = btRayAabb ( raySource , rayTarget , bounds [ 0 ] , bounds [ 1 ] , param , normal ) ;
if ( ra2 ! = ra )
{
printf ( " functions don't match \n " ) ;
}
# endif
# ifdef RAYAABB2
///careful with this check: need to check division by zero (above) and fix the unQuantize method
///thanks Joerg/hiker for the reproduction case!
///http://www.bulletphysics.com/Bullet/phpBB3/viewtopic.php?f=9&t=1858
//BT_PROFILE("btRayAabb2");
2019-01-03 13:26:51 +00:00
rayBoxOverlap = btRayAabb2 ( raySource , rayDirection , sign , bounds , param , 0.0f , lambda_max ) ;
2017-08-01 12:30:58 +00:00
# else
2019-01-03 13:26:51 +00:00
rayBoxOverlap = true ; //btRayAabb(raySource, rayTarget, bounds[0], bounds[1], param, normal);
2017-08-01 12:30:58 +00:00
# endif
}
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
if ( isLeafNode & & rayBoxOverlap )
{
2019-01-03 13:26:51 +00:00
nodeCallback - > processNode ( rootNode - > getPartId ( ) , rootNode - > getTriangleIndex ( ) ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( ( rayBoxOverlap ! = 0 ) | | isLeafNode )
{
rootNode + + ;
curIndex + + ;
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
escapeIndex = rootNode - > getEscapeIndex ( ) ;
rootNode + = escapeIndex ;
curIndex + = escapeIndex ;
}
}
if ( maxIterations < walkIterations )
maxIterations = walkIterations ;
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : walkStacklessQuantizedTree ( btNodeOverlapCallback * nodeCallback , unsigned short int * quantizedQueryAabbMin , unsigned short int * quantizedQueryAabbMax , int startNodeIndex , int endNodeIndex ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( m_useQuantization ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
int curIndex = startNodeIndex ;
int walkIterations = 0 ;
int subTreeSize = endNodeIndex - startNodeIndex ;
( void ) subTreeSize ;
const btQuantizedBvhNode * rootNode = & m_quantizedContiguousNodes [ startNodeIndex ] ;
int escapeIndex ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
bool isLeafNode ;
//PCK: unsigned instead of bool
unsigned aabbOverlap ;
while ( curIndex < endNodeIndex )
{
//#define VISUALLY_ANALYZE_BVH 1
# ifdef VISUALLY_ANALYZE_BVH
//some code snippet to debugDraw aabb, to visually analyze bvh structure
static int drawPatch = 0 ;
//need some global access to a debugDrawer
extern btIDebugDraw * debugDrawerPtr ;
2019-01-03 13:26:51 +00:00
if ( curIndex = = drawPatch )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btVector3 aabbMin , aabbMax ;
2017-08-01 12:30:58 +00:00
aabbMin = unQuantize ( rootNode - > m_quantizedAabbMin ) ;
aabbMax = unQuantize ( rootNode - > m_quantizedAabbMax ) ;
2019-01-03 13:26:51 +00:00
btVector3 color ( 1 , 0 , 0 ) ;
debugDrawerPtr - > drawAabb ( aabbMin , aabbMax , color ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
# endif //VISUALLY_ANALYZE_BVH
2017-08-01 12:30:58 +00:00
//catch bugs in tree data
2019-01-03 13:26:51 +00:00
btAssert ( walkIterations < subTreeSize ) ;
2017-08-01 12:30:58 +00:00
walkIterations + + ;
//PCK: unsigned instead of bool
2019-01-03 13:26:51 +00:00
aabbOverlap = testQuantizedAabbAgainstQuantizedAabb ( quantizedQueryAabbMin , quantizedQueryAabbMax , rootNode - > m_quantizedAabbMin , rootNode - > m_quantizedAabbMax ) ;
2017-08-01 12:30:58 +00:00
isLeafNode = rootNode - > isLeafNode ( ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
if ( isLeafNode & & aabbOverlap )
{
2019-01-03 13:26:51 +00:00
nodeCallback - > processNode ( rootNode - > getPartId ( ) , rootNode - > getTriangleIndex ( ) ) ;
}
2017-08-01 12:30:58 +00:00
//PCK: unsigned instead of bool
if ( ( aabbOverlap ! = 0 ) | | isLeafNode )
{
rootNode + + ;
curIndex + + ;
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
escapeIndex = rootNode - > getEscapeIndex ( ) ;
rootNode + = escapeIndex ;
curIndex + = escapeIndex ;
}
}
if ( maxIterations < walkIterations )
maxIterations = walkIterations ;
}
//This traversal can be called from Playstation 3 SPU
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : walkStacklessQuantizedTreeCacheFriendly ( btNodeOverlapCallback * nodeCallback , unsigned short int * quantizedQueryAabbMin , unsigned short int * quantizedQueryAabbMax ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( m_useQuantization ) ;
int i ;
2019-01-03 13:26:51 +00:00
for ( i = 0 ; i < this - > m_SubtreeHeaders . size ( ) ; i + + )
2017-08-01 12:30:58 +00:00
{
const btBvhSubtreeInfo & subtree = m_SubtreeHeaders [ i ] ;
//PCK: unsigned instead of bool
2019-01-03 13:26:51 +00:00
unsigned overlap = testQuantizedAabbAgainstQuantizedAabb ( quantizedQueryAabbMin , quantizedQueryAabbMax , subtree . m_quantizedAabbMin , subtree . m_quantizedAabbMax ) ;
2017-08-01 12:30:58 +00:00
if ( overlap ! = 0 )
{
2019-01-03 13:26:51 +00:00
walkStacklessQuantizedTree ( nodeCallback , quantizedQueryAabbMin , quantizedQueryAabbMax ,
subtree . m_rootNodeIndex ,
subtree . m_rootNodeIndex + subtree . m_subtreeSize ) ;
2017-08-01 12:30:58 +00:00
}
}
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : reportRayOverlappingNodex ( btNodeOverlapCallback * nodeCallback , const btVector3 & raySource , const btVector3 & rayTarget ) const
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
reportBoxCastOverlappingNodex ( nodeCallback , raySource , rayTarget , btVector3 ( 0 , 0 , 0 ) , btVector3 ( 0 , 0 , 0 ) ) ;
2017-08-01 12:30:58 +00:00
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : reportBoxCastOverlappingNodex ( btNodeOverlapCallback * nodeCallback , const btVector3 & raySource , const btVector3 & rayTarget , const btVector3 & aabbMin , const btVector3 & aabbMax ) const
2017-08-01 12:30:58 +00:00
{
//always use stackless
if ( m_useQuantization )
{
walkStacklessQuantizedTreeAgainstRay ( nodeCallback , raySource , rayTarget , aabbMin , aabbMax , 0 , m_curNodeIndex ) ;
}
else
{
walkStacklessTreeAgainstRay ( nodeCallback , raySource , rayTarget , aabbMin , aabbMax , 0 , m_curNodeIndex ) ;
}
/*
{
//recursive traversal
btVector3 qaabbMin = raySource ;
btVector3 qaabbMax = raySource ;
qaabbMin . setMin ( rayTarget ) ;
qaabbMax . setMax ( rayTarget ) ;
qaabbMin + = aabbMin ;
qaabbMax + = aabbMax ;
reportAabbOverlappingNodex ( nodeCallback , qaabbMin , qaabbMax ) ;
}
*/
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : swapLeafNodes ( int i , int splitIndex )
2017-08-01 12:30:58 +00:00
{
if ( m_useQuantization )
{
2019-01-03 13:26:51 +00:00
btQuantizedBvhNode tmp = m_quantizedLeafNodes [ i ] ;
m_quantizedLeafNodes [ i ] = m_quantizedLeafNodes [ splitIndex ] ;
m_quantizedLeafNodes [ splitIndex ] = tmp ;
}
else
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
btOptimizedBvhNode tmp = m_leafNodes [ i ] ;
m_leafNodes [ i ] = m_leafNodes [ splitIndex ] ;
m_leafNodes [ splitIndex ] = tmp ;
2017-08-01 12:30:58 +00:00
}
}
2019-01-03 13:26:51 +00:00
void btQuantizedBvh : : assignInternalNodeFromLeafNode ( int internalNode , int leafNodeIndex )
2017-08-01 12:30:58 +00:00
{
if ( m_useQuantization )
{
m_quantizedContiguousNodes [ internalNode ] = m_quantizedLeafNodes [ leafNodeIndex ] ;
2019-01-03 13:26:51 +00:00
}
else
2017-08-01 12:30:58 +00:00
{
m_contiguousNodes [ internalNode ] = m_leafNodes [ leafNodeIndex ] ;
}
}
//PCK: include
# include <new>
#if 0
//PCK: consts
static const unsigned BVH_ALIGNMENT = 16 ;
static const unsigned BVH_ALIGNMENT_MASK = BVH_ALIGNMENT - 1 ;
static const unsigned BVH_ALIGNMENT_BLOCKS = 2 ;
# endif
unsigned int btQuantizedBvh : : getAlignmentSerializationPadding ( )
{
// I changed this to 0 since the extra padding is not needed or used.
2019-01-03 13:26:51 +00:00
return 0 ; //BVH_ALIGNMENT_BLOCKS * BVH_ALIGNMENT;
2017-08-01 12:30:58 +00:00
}
unsigned btQuantizedBvh : : calculateSerializeBufferSize ( ) const
{
unsigned baseSize = sizeof ( btQuantizedBvh ) + getAlignmentSerializationPadding ( ) ;
baseSize + = sizeof ( btBvhSubtreeInfo ) * m_subtreeHeaderCount ;
if ( m_useQuantization )
{
return baseSize + m_curNodeIndex * sizeof ( btQuantizedBvhNode ) ;
}
return baseSize + m_curNodeIndex * sizeof ( btOptimizedBvhNode ) ;
}
2019-01-03 13:26:51 +00:00
bool btQuantizedBvh : : serialize ( void * o_alignedDataBuffer , unsigned /*i_dataBufferSize */ , bool i_swapEndian ) const
2017-08-01 12:30:58 +00:00
{
btAssert ( m_subtreeHeaderCount = = m_SubtreeHeaders . size ( ) ) ;
m_subtreeHeaderCount = m_SubtreeHeaders . size ( ) ;
2019-01-03 13:26:51 +00:00
/* if (i_dataBufferSize < calculateSerializeBufferSize() || o_alignedDataBuffer == NULL || (((unsigned)o_alignedDataBuffer & BVH_ALIGNMENT_MASK) != 0))
2017-08-01 12:30:58 +00:00
{
///check alignedment for buffer?
btAssert ( 0 ) ;
return false ;
}
*/
2019-01-03 13:26:51 +00:00
btQuantizedBvh * targetBvh = ( btQuantizedBvh * ) o_alignedDataBuffer ;
2017-08-01 12:30:58 +00:00
// construct the class so the virtual function table, etc will be set up
// Also, m_leafNodes and m_quantizedLeafNodes will be initialized to default values by the constructor
new ( targetBvh ) btQuantizedBvh ;
if ( i_swapEndian )
{
targetBvh - > m_curNodeIndex = static_cast < int > ( btSwapEndian ( m_curNodeIndex ) ) ;
2019-01-03 13:26:51 +00:00
btSwapVector3Endian ( m_bvhAabbMin , targetBvh - > m_bvhAabbMin ) ;
btSwapVector3Endian ( m_bvhAabbMax , targetBvh - > m_bvhAabbMax ) ;
btSwapVector3Endian ( m_bvhQuantization , targetBvh - > m_bvhQuantization ) ;
2017-08-01 12:30:58 +00:00
targetBvh - > m_traversalMode = ( btTraversalMode ) btSwapEndian ( m_traversalMode ) ;
targetBvh - > m_subtreeHeaderCount = static_cast < int > ( btSwapEndian ( m_subtreeHeaderCount ) ) ;
}
else
{
targetBvh - > m_curNodeIndex = m_curNodeIndex ;
targetBvh - > m_bvhAabbMin = m_bvhAabbMin ;
targetBvh - > m_bvhAabbMax = m_bvhAabbMax ;
targetBvh - > m_bvhQuantization = m_bvhQuantization ;
targetBvh - > m_traversalMode = m_traversalMode ;
targetBvh - > m_subtreeHeaderCount = m_subtreeHeaderCount ;
}
targetBvh - > m_useQuantization = m_useQuantization ;
2019-01-03 13:26:51 +00:00
unsigned char * nodeData = ( unsigned char * ) targetBvh ;
2017-08-01 12:30:58 +00:00
nodeData + = sizeof ( btQuantizedBvh ) ;
2019-01-03 13:26:51 +00:00
unsigned sizeToAdd = 0 ; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
2017-08-01 12:30:58 +00:00
nodeData + = sizeToAdd ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
int nodeCount = m_curNodeIndex ;
if ( m_useQuantization )
{
targetBvh - > m_quantizedContiguousNodes . initializeFromBuffer ( nodeData , nodeCount , nodeCount ) ;
if ( i_swapEndian )
{
for ( int nodeIndex = 0 ; nodeIndex < nodeCount ; nodeIndex + + )
{
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 0 ] = btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 0 ] ) ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 1 ] = btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 1 ] ) ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 2 ] = btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 2 ] ) ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 0 ] = btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 0 ] ) ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 1 ] = btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 1 ] ) ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 2 ] = btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 2 ] ) ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_escapeIndexOrTriangleIndex = static_cast < int > ( btSwapEndian ( m_quantizedContiguousNodes [ nodeIndex ] . m_escapeIndexOrTriangleIndex ) ) ;
}
}
else
{
for ( int nodeIndex = 0 ; nodeIndex < nodeCount ; nodeIndex + + )
{
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 0 ] = m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 0 ] ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 1 ] = m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 1 ] ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 2 ] = m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 2 ] ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 0 ] = m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 0 ] ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 1 ] = m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 1 ] ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 2 ] = m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 2 ] ;
targetBvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_escapeIndexOrTriangleIndex = m_quantizedContiguousNodes [ nodeIndex ] . m_escapeIndexOrTriangleIndex ;
}
}
nodeData + = sizeof ( btQuantizedBvhNode ) * nodeCount ;
// this clears the pointer in the member variable it doesn't really do anything to the data
// it does call the destructor on the contained objects, but they are all classes with no destructor defined
// so the memory (which is not freed) is left alone
targetBvh - > m_quantizedContiguousNodes . initializeFromBuffer ( NULL , 0 , 0 ) ;
}
else
{
targetBvh - > m_contiguousNodes . initializeFromBuffer ( nodeData , nodeCount , nodeCount ) ;
if ( i_swapEndian )
{
for ( int nodeIndex = 0 ; nodeIndex < nodeCount ; nodeIndex + + )
{
btSwapVector3Endian ( m_contiguousNodes [ nodeIndex ] . m_aabbMinOrg , targetBvh - > m_contiguousNodes [ nodeIndex ] . m_aabbMinOrg ) ;
btSwapVector3Endian ( m_contiguousNodes [ nodeIndex ] . m_aabbMaxOrg , targetBvh - > m_contiguousNodes [ nodeIndex ] . m_aabbMaxOrg ) ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_escapeIndex = static_cast < int > ( btSwapEndian ( m_contiguousNodes [ nodeIndex ] . m_escapeIndex ) ) ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_subPart = static_cast < int > ( btSwapEndian ( m_contiguousNodes [ nodeIndex ] . m_subPart ) ) ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_triangleIndex = static_cast < int > ( btSwapEndian ( m_contiguousNodes [ nodeIndex ] . m_triangleIndex ) ) ;
}
}
else
{
for ( int nodeIndex = 0 ; nodeIndex < nodeCount ; nodeIndex + + )
{
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_aabbMinOrg = m_contiguousNodes [ nodeIndex ] . m_aabbMinOrg ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_aabbMaxOrg = m_contiguousNodes [ nodeIndex ] . m_aabbMaxOrg ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_escapeIndex = m_contiguousNodes [ nodeIndex ] . m_escapeIndex ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_subPart = m_contiguousNodes [ nodeIndex ] . m_subPart ;
targetBvh - > m_contiguousNodes [ nodeIndex ] . m_triangleIndex = m_contiguousNodes [ nodeIndex ] . m_triangleIndex ;
}
}
nodeData + = sizeof ( btOptimizedBvhNode ) * nodeCount ;
// this clears the pointer in the member variable it doesn't really do anything to the data
// it does call the destructor on the contained objects, but they are all classes with no destructor defined
// so the memory (which is not freed) is left alone
targetBvh - > m_contiguousNodes . initializeFromBuffer ( NULL , 0 , 0 ) ;
}
2019-01-03 13:26:51 +00:00
sizeToAdd = 0 ; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
2017-08-01 12:30:58 +00:00
nodeData + = sizeToAdd ;
// Now serialize the subtree headers
targetBvh - > m_SubtreeHeaders . initializeFromBuffer ( nodeData , m_subtreeHeaderCount , m_subtreeHeaderCount ) ;
if ( i_swapEndian )
{
for ( int i = 0 ; i < m_subtreeHeaderCount ; i + + )
{
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] = btSwapEndian ( m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] = btSwapEndian ( m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] = btSwapEndian ( m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] = btSwapEndian ( m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] = btSwapEndian ( m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] = btSwapEndian ( m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_rootNodeIndex = static_cast < int > ( btSwapEndian ( m_SubtreeHeaders [ i ] . m_rootNodeIndex ) ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_subtreeSize = static_cast < int > ( btSwapEndian ( m_SubtreeHeaders [ i ] . m_subtreeSize ) ) ;
}
}
else
{
for ( int i = 0 ; i < m_subtreeHeaderCount ; i + + )
{
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] = ( m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] = ( m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] = ( m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] = ( m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] = ( m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] = ( m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_rootNodeIndex = ( m_SubtreeHeaders [ i ] . m_rootNodeIndex ) ;
targetBvh - > m_SubtreeHeaders [ i ] . m_subtreeSize = ( m_SubtreeHeaders [ i ] . m_subtreeSize ) ;
// need to clear padding in destination buffer
targetBvh - > m_SubtreeHeaders [ i ] . m_padding [ 0 ] = 0 ;
targetBvh - > m_SubtreeHeaders [ i ] . m_padding [ 1 ] = 0 ;
targetBvh - > m_SubtreeHeaders [ i ] . m_padding [ 2 ] = 0 ;
}
}
nodeData + = sizeof ( btBvhSubtreeInfo ) * m_subtreeHeaderCount ;
// this clears the pointer in the member variable it doesn't really do anything to the data
// it does call the destructor on the contained objects, but they are all classes with no destructor defined
// so the memory (which is not freed) is left alone
targetBvh - > m_SubtreeHeaders . initializeFromBuffer ( NULL , 0 , 0 ) ;
// this wipes the virtual function table pointer at the start of the buffer for the class
* ( ( void * * ) o_alignedDataBuffer ) = NULL ;
return true ;
}
2019-01-03 13:26:51 +00:00
btQuantizedBvh * btQuantizedBvh : : deSerializeInPlace ( void * i_alignedDataBuffer , unsigned int i_dataBufferSize , bool i_swapEndian )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
if ( i_alignedDataBuffer = = NULL ) // || (((unsigned)i_alignedDataBuffer & BVH_ALIGNMENT_MASK) != 0))
2017-08-01 12:30:58 +00:00
{
return NULL ;
}
2019-01-03 13:26:51 +00:00
btQuantizedBvh * bvh = ( btQuantizedBvh * ) i_alignedDataBuffer ;
2017-08-01 12:30:58 +00:00
if ( i_swapEndian )
{
bvh - > m_curNodeIndex = static_cast < int > ( btSwapEndian ( bvh - > m_curNodeIndex ) ) ;
btUnSwapVector3Endian ( bvh - > m_bvhAabbMin ) ;
btUnSwapVector3Endian ( bvh - > m_bvhAabbMax ) ;
btUnSwapVector3Endian ( bvh - > m_bvhQuantization ) ;
bvh - > m_traversalMode = ( btTraversalMode ) btSwapEndian ( bvh - > m_traversalMode ) ;
bvh - > m_subtreeHeaderCount = static_cast < int > ( btSwapEndian ( bvh - > m_subtreeHeaderCount ) ) ;
}
unsigned int calculatedBufSize = bvh - > calculateSerializeBufferSize ( ) ;
btAssert ( calculatedBufSize < = i_dataBufferSize ) ;
if ( calculatedBufSize > i_dataBufferSize )
{
return NULL ;
}
2019-01-03 13:26:51 +00:00
unsigned char * nodeData = ( unsigned char * ) bvh ;
2017-08-01 12:30:58 +00:00
nodeData + = sizeof ( btQuantizedBvh ) ;
2019-01-03 13:26:51 +00:00
unsigned sizeToAdd = 0 ; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
2017-08-01 12:30:58 +00:00
nodeData + = sizeToAdd ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
int nodeCount = bvh - > m_curNodeIndex ;
// Must call placement new to fill in virtual function table, etc, but we don't want to overwrite most data, so call a special version of the constructor
// Also, m_leafNodes and m_quantizedLeafNodes will be initialized to default values by the constructor
new ( bvh ) btQuantizedBvh ( * bvh , false ) ;
if ( bvh - > m_useQuantization )
{
bvh - > m_quantizedContiguousNodes . initializeFromBuffer ( nodeData , nodeCount , nodeCount ) ;
if ( i_swapEndian )
{
for ( int nodeIndex = 0 ; nodeIndex < nodeCount ; nodeIndex + + )
{
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 0 ] = btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 0 ] ) ;
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 1 ] = btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 1 ] ) ;
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 2 ] = btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMin [ 2 ] ) ;
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 0 ] = btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 0 ] ) ;
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 1 ] = btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 1 ] ) ;
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 2 ] = btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_quantizedAabbMax [ 2 ] ) ;
bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_escapeIndexOrTriangleIndex = static_cast < int > ( btSwapEndian ( bvh - > m_quantizedContiguousNodes [ nodeIndex ] . m_escapeIndexOrTriangleIndex ) ) ;
}
}
nodeData + = sizeof ( btQuantizedBvhNode ) * nodeCount ;
}
else
{
bvh - > m_contiguousNodes . initializeFromBuffer ( nodeData , nodeCount , nodeCount ) ;
if ( i_swapEndian )
{
for ( int nodeIndex = 0 ; nodeIndex < nodeCount ; nodeIndex + + )
{
btUnSwapVector3Endian ( bvh - > m_contiguousNodes [ nodeIndex ] . m_aabbMinOrg ) ;
btUnSwapVector3Endian ( bvh - > m_contiguousNodes [ nodeIndex ] . m_aabbMaxOrg ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
bvh - > m_contiguousNodes [ nodeIndex ] . m_escapeIndex = static_cast < int > ( btSwapEndian ( bvh - > m_contiguousNodes [ nodeIndex ] . m_escapeIndex ) ) ;
bvh - > m_contiguousNodes [ nodeIndex ] . m_subPart = static_cast < int > ( btSwapEndian ( bvh - > m_contiguousNodes [ nodeIndex ] . m_subPart ) ) ;
bvh - > m_contiguousNodes [ nodeIndex ] . m_triangleIndex = static_cast < int > ( btSwapEndian ( bvh - > m_contiguousNodes [ nodeIndex ] . m_triangleIndex ) ) ;
}
}
nodeData + = sizeof ( btOptimizedBvhNode ) * nodeCount ;
}
2019-01-03 13:26:51 +00:00
sizeToAdd = 0 ; //(BVH_ALIGNMENT-((unsigned)nodeData & BVH_ALIGNMENT_MASK))&BVH_ALIGNMENT_MASK;
2017-08-01 12:30:58 +00:00
nodeData + = sizeToAdd ;
// Now serialize the subtree headers
bvh - > m_SubtreeHeaders . initializeFromBuffer ( nodeData , bvh - > m_subtreeHeaderCount , bvh - > m_subtreeHeaderCount ) ;
if ( i_swapEndian )
{
for ( int i = 0 ; i < bvh - > m_subtreeHeaderCount ; i + + )
{
bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] = btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] ) ;
bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] = btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] ) ;
bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] = btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] ) ;
bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] = btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] ) ;
bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] = btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] ) ;
bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] = btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] ) ;
bvh - > m_SubtreeHeaders [ i ] . m_rootNodeIndex = static_cast < int > ( btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_rootNodeIndex ) ) ;
bvh - > m_SubtreeHeaders [ i ] . m_subtreeSize = static_cast < int > ( btSwapEndian ( bvh - > m_SubtreeHeaders [ i ] . m_subtreeSize ) ) ;
}
}
return bvh ;
}
// Constructor that prevents btVector3's default constructor from being called
2019-01-03 13:26:51 +00:00
btQuantizedBvh : : btQuantizedBvh ( btQuantizedBvh & self , bool /* ownsMemory */ ) : m_bvhAabbMin ( self . m_bvhAabbMin ) ,
m_bvhAabbMax ( self . m_bvhAabbMax ) ,
m_bvhQuantization ( self . m_bvhQuantization ) ,
m_bulletVersion ( BT_BULLET_VERSION )
2017-08-01 12:30:58 +00:00
{
}
void btQuantizedBvh : : deSerializeFloat ( struct btQuantizedBvhFloatData & quantizedBvhFloatData )
{
m_bvhAabbMax . deSerializeFloat ( quantizedBvhFloatData . m_bvhAabbMax ) ;
m_bvhAabbMin . deSerializeFloat ( quantizedBvhFloatData . m_bvhAabbMin ) ;
m_bvhQuantization . deSerializeFloat ( quantizedBvhFloatData . m_bvhQuantization ) ;
m_curNodeIndex = quantizedBvhFloatData . m_curNodeIndex ;
2019-01-03 13:26:51 +00:00
m_useQuantization = quantizedBvhFloatData . m_useQuantization ! = 0 ;
2017-08-01 12:30:58 +00:00
{
int numElem = quantizedBvhFloatData . m_numContiguousLeafNodes ;
m_contiguousNodes . resize ( numElem ) ;
if ( numElem )
{
btOptimizedBvhNodeFloatData * memPtr = quantizedBvhFloatData . m_contiguousNodesPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
m_contiguousNodes [ i ] . m_aabbMaxOrg . deSerializeFloat ( memPtr - > m_aabbMaxOrg ) ;
m_contiguousNodes [ i ] . m_aabbMinOrg . deSerializeFloat ( memPtr - > m_aabbMinOrg ) ;
m_contiguousNodes [ i ] . m_escapeIndex = memPtr - > m_escapeIndex ;
m_contiguousNodes [ i ] . m_subPart = memPtr - > m_subPart ;
m_contiguousNodes [ i ] . m_triangleIndex = memPtr - > m_triangleIndex ;
}
}
}
{
int numElem = quantizedBvhFloatData . m_numQuantizedContiguousNodes ;
m_quantizedContiguousNodes . resize ( numElem ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
if ( numElem )
{
btQuantizedBvhNodeData * memPtr = quantizedBvhFloatData . m_quantizedContiguousNodesPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
m_quantizedContiguousNodes [ i ] . m_escapeIndexOrTriangleIndex = memPtr - > m_escapeIndexOrTriangleIndex ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 0 ] = memPtr - > m_quantizedAabbMax [ 0 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 1 ] = memPtr - > m_quantizedAabbMax [ 1 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 2 ] = memPtr - > m_quantizedAabbMax [ 2 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 0 ] = memPtr - > m_quantizedAabbMin [ 0 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 1 ] = memPtr - > m_quantizedAabbMin [ 1 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 2 ] = memPtr - > m_quantizedAabbMin [ 2 ] ;
}
}
}
m_traversalMode = btTraversalMode ( quantizedBvhFloatData . m_traversalMode ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
{
int numElem = quantizedBvhFloatData . m_numSubtreeHeaders ;
m_SubtreeHeaders . resize ( numElem ) ;
if ( numElem )
{
btBvhSubtreeInfoData * memPtr = quantizedBvhFloatData . m_subTreeInfoPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] = memPtr - > m_quantizedAabbMax [ 0 ] ;
2017-08-01 12:30:58 +00:00
m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] = memPtr - > m_quantizedAabbMax [ 1 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] = memPtr - > m_quantizedAabbMax [ 2 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] = memPtr - > m_quantizedAabbMin [ 0 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] = memPtr - > m_quantizedAabbMin [ 1 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] = memPtr - > m_quantizedAabbMin [ 2 ] ;
m_SubtreeHeaders [ i ] . m_rootNodeIndex = memPtr - > m_rootNodeIndex ;
m_SubtreeHeaders [ i ] . m_subtreeSize = memPtr - > m_subtreeSize ;
}
}
}
}
void btQuantizedBvh : : deSerializeDouble ( struct btQuantizedBvhDoubleData & quantizedBvhDoubleData )
{
m_bvhAabbMax . deSerializeDouble ( quantizedBvhDoubleData . m_bvhAabbMax ) ;
m_bvhAabbMin . deSerializeDouble ( quantizedBvhDoubleData . m_bvhAabbMin ) ;
m_bvhQuantization . deSerializeDouble ( quantizedBvhDoubleData . m_bvhQuantization ) ;
m_curNodeIndex = quantizedBvhDoubleData . m_curNodeIndex ;
2019-01-03 13:26:51 +00:00
m_useQuantization = quantizedBvhDoubleData . m_useQuantization ! = 0 ;
2017-08-01 12:30:58 +00:00
{
int numElem = quantizedBvhDoubleData . m_numContiguousLeafNodes ;
m_contiguousNodes . resize ( numElem ) ;
if ( numElem )
{
btOptimizedBvhNodeDoubleData * memPtr = quantizedBvhDoubleData . m_contiguousNodesPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
m_contiguousNodes [ i ] . m_aabbMaxOrg . deSerializeDouble ( memPtr - > m_aabbMaxOrg ) ;
m_contiguousNodes [ i ] . m_aabbMinOrg . deSerializeDouble ( memPtr - > m_aabbMinOrg ) ;
m_contiguousNodes [ i ] . m_escapeIndex = memPtr - > m_escapeIndex ;
m_contiguousNodes [ i ] . m_subPart = memPtr - > m_subPart ;
m_contiguousNodes [ i ] . m_triangleIndex = memPtr - > m_triangleIndex ;
}
}
}
{
int numElem = quantizedBvhDoubleData . m_numQuantizedContiguousNodes ;
m_quantizedContiguousNodes . resize ( numElem ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
if ( numElem )
{
btQuantizedBvhNodeData * memPtr = quantizedBvhDoubleData . m_quantizedContiguousNodesPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
m_quantizedContiguousNodes [ i ] . m_escapeIndexOrTriangleIndex = memPtr - > m_escapeIndexOrTriangleIndex ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 0 ] = memPtr - > m_quantizedAabbMax [ 0 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 1 ] = memPtr - > m_quantizedAabbMax [ 1 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 2 ] = memPtr - > m_quantizedAabbMax [ 2 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 0 ] = memPtr - > m_quantizedAabbMin [ 0 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 1 ] = memPtr - > m_quantizedAabbMin [ 1 ] ;
m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 2 ] = memPtr - > m_quantizedAabbMin [ 2 ] ;
}
}
}
m_traversalMode = btTraversalMode ( quantizedBvhDoubleData . m_traversalMode ) ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
{
int numElem = quantizedBvhDoubleData . m_numSubtreeHeaders ;
m_SubtreeHeaders . resize ( numElem ) ;
if ( numElem )
{
btBvhSubtreeInfoData * memPtr = quantizedBvhDoubleData . m_subTreeInfoPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
2019-01-03 13:26:51 +00:00
m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] = memPtr - > m_quantizedAabbMax [ 0 ] ;
2017-08-01 12:30:58 +00:00
m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] = memPtr - > m_quantizedAabbMax [ 1 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] = memPtr - > m_quantizedAabbMax [ 2 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] = memPtr - > m_quantizedAabbMin [ 0 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] = memPtr - > m_quantizedAabbMin [ 1 ] ;
m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] = memPtr - > m_quantizedAabbMin [ 2 ] ;
m_SubtreeHeaders [ i ] . m_rootNodeIndex = memPtr - > m_rootNodeIndex ;
m_SubtreeHeaders [ i ] . m_subtreeSize = memPtr - > m_subtreeSize ;
}
}
}
}
///fills the dataBuffer and returns the struct name (and 0 on failure)
2019-01-03 13:26:51 +00:00
const char * btQuantizedBvh : : serialize ( void * dataBuffer , btSerializer * serializer ) const
2017-08-01 12:30:58 +00:00
{
btQuantizedBvhData * quantizedData = ( btQuantizedBvhData * ) dataBuffer ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
m_bvhAabbMax . serialize ( quantizedData - > m_bvhAabbMax ) ;
m_bvhAabbMin . serialize ( quantizedData - > m_bvhAabbMin ) ;
m_bvhQuantization . serialize ( quantizedData - > m_bvhQuantization ) ;
quantizedData - > m_curNodeIndex = m_curNodeIndex ;
quantizedData - > m_useQuantization = m_useQuantization ;
2019-01-03 13:26:51 +00:00
2017-08-01 12:30:58 +00:00
quantizedData - > m_numContiguousLeafNodes = m_contiguousNodes . size ( ) ;
2019-01-03 13:26:51 +00:00
quantizedData - > m_contiguousNodesPtr = ( btOptimizedBvhNodeData * ) ( m_contiguousNodes . size ( ) ? serializer - > getUniquePointer ( ( void * ) & m_contiguousNodes [ 0 ] ) : 0 ) ;
2017-08-01 12:30:58 +00:00
if ( quantizedData - > m_contiguousNodesPtr )
{
int sz = sizeof ( btOptimizedBvhNodeData ) ;
int numElem = m_contiguousNodes . size ( ) ;
2019-01-03 13:26:51 +00:00
btChunk * chunk = serializer - > allocate ( sz , numElem ) ;
2017-08-01 12:30:58 +00:00
btOptimizedBvhNodeData * memPtr = ( btOptimizedBvhNodeData * ) chunk - > m_oldPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
m_contiguousNodes [ i ] . m_aabbMaxOrg . serialize ( memPtr - > m_aabbMaxOrg ) ;
m_contiguousNodes [ i ] . m_aabbMinOrg . serialize ( memPtr - > m_aabbMinOrg ) ;
memPtr - > m_escapeIndex = m_contiguousNodes [ i ] . m_escapeIndex ;
memPtr - > m_subPart = m_contiguousNodes [ i ] . m_subPart ;
memPtr - > m_triangleIndex = m_contiguousNodes [ i ] . m_triangleIndex ;
// Fill padding with zeros to appease msan.
memset ( memPtr - > m_pad , 0 , sizeof ( memPtr - > m_pad ) ) ;
}
2019-01-03 13:26:51 +00:00
serializer - > finalizeChunk ( chunk , " btOptimizedBvhNodeData " , BT_ARRAY_CODE , ( void * ) & m_contiguousNodes [ 0 ] ) ;
2017-08-01 12:30:58 +00:00
}
quantizedData - > m_numQuantizedContiguousNodes = m_quantizedContiguousNodes . size ( ) ;
2019-01-03 13:26:51 +00:00
// printf("quantizedData->m_numQuantizedContiguousNodes=%d\n",quantizedData->m_numQuantizedContiguousNodes);
quantizedData - > m_quantizedContiguousNodesPtr = ( btQuantizedBvhNodeData * ) ( m_quantizedContiguousNodes . size ( ) ? serializer - > getUniquePointer ( ( void * ) & m_quantizedContiguousNodes [ 0 ] ) : 0 ) ;
2017-08-01 12:30:58 +00:00
if ( quantizedData - > m_quantizedContiguousNodesPtr )
{
int sz = sizeof ( btQuantizedBvhNodeData ) ;
int numElem = m_quantizedContiguousNodes . size ( ) ;
2019-01-03 13:26:51 +00:00
btChunk * chunk = serializer - > allocate ( sz , numElem ) ;
2017-08-01 12:30:58 +00:00
btQuantizedBvhNodeData * memPtr = ( btQuantizedBvhNodeData * ) chunk - > m_oldPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
memPtr - > m_escapeIndexOrTriangleIndex = m_quantizedContiguousNodes [ i ] . m_escapeIndexOrTriangleIndex ;
memPtr - > m_quantizedAabbMax [ 0 ] = m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 0 ] ;
memPtr - > m_quantizedAabbMax [ 1 ] = m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 1 ] ;
memPtr - > m_quantizedAabbMax [ 2 ] = m_quantizedContiguousNodes [ i ] . m_quantizedAabbMax [ 2 ] ;
memPtr - > m_quantizedAabbMin [ 0 ] = m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 0 ] ;
memPtr - > m_quantizedAabbMin [ 1 ] = m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 1 ] ;
memPtr - > m_quantizedAabbMin [ 2 ] = m_quantizedContiguousNodes [ i ] . m_quantizedAabbMin [ 2 ] ;
}
2019-01-03 13:26:51 +00:00
serializer - > finalizeChunk ( chunk , " btQuantizedBvhNodeData " , BT_ARRAY_CODE , ( void * ) & m_quantizedContiguousNodes [ 0 ] ) ;
2017-08-01 12:30:58 +00:00
}
quantizedData - > m_traversalMode = int ( m_traversalMode ) ;
quantizedData - > m_numSubtreeHeaders = m_SubtreeHeaders . size ( ) ;
2019-01-03 13:26:51 +00:00
quantizedData - > m_subTreeInfoPtr = ( btBvhSubtreeInfoData * ) ( m_SubtreeHeaders . size ( ) ? serializer - > getUniquePointer ( ( void * ) & m_SubtreeHeaders [ 0 ] ) : 0 ) ;
2017-08-01 12:30:58 +00:00
if ( quantizedData - > m_subTreeInfoPtr )
{
int sz = sizeof ( btBvhSubtreeInfoData ) ;
int numElem = m_SubtreeHeaders . size ( ) ;
2019-01-03 13:26:51 +00:00
btChunk * chunk = serializer - > allocate ( sz , numElem ) ;
2017-08-01 12:30:58 +00:00
btBvhSubtreeInfoData * memPtr = ( btBvhSubtreeInfoData * ) chunk - > m_oldPtr ;
2019-01-03 13:26:51 +00:00
for ( int i = 0 ; i < numElem ; i + + , memPtr + + )
2017-08-01 12:30:58 +00:00
{
memPtr - > m_quantizedAabbMax [ 0 ] = m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 0 ] ;
memPtr - > m_quantizedAabbMax [ 1 ] = m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 1 ] ;
memPtr - > m_quantizedAabbMax [ 2 ] = m_SubtreeHeaders [ i ] . m_quantizedAabbMax [ 2 ] ;
memPtr - > m_quantizedAabbMin [ 0 ] = m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 0 ] ;
memPtr - > m_quantizedAabbMin [ 1 ] = m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 1 ] ;
memPtr - > m_quantizedAabbMin [ 2 ] = m_SubtreeHeaders [ i ] . m_quantizedAabbMin [ 2 ] ;
memPtr - > m_rootNodeIndex = m_SubtreeHeaders [ i ] . m_rootNodeIndex ;
memPtr - > m_subtreeSize = m_SubtreeHeaders [ i ] . m_subtreeSize ;
}
2019-01-03 13:26:51 +00:00
serializer - > finalizeChunk ( chunk , " btBvhSubtreeInfoData " , BT_ARRAY_CODE , ( void * ) & m_SubtreeHeaders [ 0 ] ) ;
2017-08-01 12:30:58 +00:00
}
return btQuantizedBvhDataName ;
}