Merge pull request #61135 from akien-mga/recast-20220320
This commit is contained in:
commit
e2d0aa3779
|
@ -551,7 +551,7 @@ Files extracted from upstream source:
|
||||||
## recastnavigation
|
## recastnavigation
|
||||||
|
|
||||||
- Upstream: https://github.com/recastnavigation/recastnavigation
|
- Upstream: https://github.com/recastnavigation/recastnavigation
|
||||||
- Version: git (57610fa6ef31b39020231906f8c5d40eaa8294ae, 2019)
|
- Version: git (5a870d427e47abd4a8e4ce58a95582ec049434d5, 2022)
|
||||||
- License: zlib
|
- License: zlib
|
||||||
|
|
||||||
Files extracted from upstream source:
|
Files extracted from upstream source:
|
||||||
|
|
|
@ -22,7 +22,7 @@
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
|
|
||||||
#include <RecastAssert.h>
|
#include "RecastAssert.h"
|
||||||
|
|
||||||
/// Provides hint values to the memory allocator on how long the
|
/// Provides hint values to the memory allocator on how long the
|
||||||
/// memory is expected to be used.
|
/// memory is expected to be used.
|
||||||
|
@ -106,6 +106,8 @@ class rcVectorBase {
|
||||||
// Creates an array of the given size, copies all of this vector's data into it, and returns it.
|
// Creates an array of the given size, copies all of this vector's data into it, and returns it.
|
||||||
T* allocate_and_copy(rcSizeType size);
|
T* allocate_and_copy(rcSizeType size);
|
||||||
void resize_impl(rcSizeType size, const T* value);
|
void resize_impl(rcSizeType size, const T* value);
|
||||||
|
// Requires: min_capacity > m_cap.
|
||||||
|
rcSizeType get_new_capacity(rcSizeType min_capacity);
|
||||||
public:
|
public:
|
||||||
typedef rcSizeType size_type;
|
typedef rcSizeType size_type;
|
||||||
typedef T value_type;
|
typedef T value_type;
|
||||||
|
@ -196,8 +198,7 @@ void rcVectorBase<T, H>::push_back(const T& value) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
rcAssert(RC_SIZE_MAX / 2 >= m_size);
|
const rcSizeType new_cap = get_new_capacity(m_cap + 1);
|
||||||
rcSizeType new_cap = m_size ? 2*m_size : 1;
|
|
||||||
T* data = allocate_and_copy(new_cap);
|
T* data = allocate_and_copy(new_cap);
|
||||||
// construct between allocate and destroy+free in case value is
|
// construct between allocate and destroy+free in case value is
|
||||||
// in this vector.
|
// in this vector.
|
||||||
|
@ -208,25 +209,44 @@ void rcVectorBase<T, H>::push_back(const T& value) {
|
||||||
rcFree(m_data);
|
rcFree(m_data);
|
||||||
m_data = data;
|
m_data = data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
template <typename T, rcAllocHint H>
|
||||||
|
rcSizeType rcVectorBase<T, H>::get_new_capacity(rcSizeType min_capacity) {
|
||||||
|
rcAssert(min_capacity <= RC_SIZE_MAX);
|
||||||
|
if (rcUnlikely(m_cap >= RC_SIZE_MAX / 2))
|
||||||
|
return RC_SIZE_MAX;
|
||||||
|
return 2 * m_cap > min_capacity ? 2 * m_cap : min_capacity;
|
||||||
|
}
|
||||||
|
|
||||||
template <typename T, rcAllocHint H>
|
template <typename T, rcAllocHint H>
|
||||||
void rcVectorBase<T, H>::resize_impl(rcSizeType size, const T* value) {
|
void rcVectorBase<T, H>::resize_impl(rcSizeType size, const T* value) {
|
||||||
if (size < m_size) {
|
if (size < m_size) {
|
||||||
destroy_range(size, m_size);
|
destroy_range(size, m_size);
|
||||||
m_size = size;
|
m_size = size;
|
||||||
} else if (size > m_size) {
|
} else if (size > m_size) {
|
||||||
T* new_data = allocate_and_copy(size);
|
if (size <= m_cap) {
|
||||||
// We defer deconstructing/freeing old data until after constructing
|
if (value) {
|
||||||
// new elements in case "value" is there.
|
construct_range(m_data + m_size, m_data + size, *value);
|
||||||
if (value) {
|
} else {
|
||||||
construct_range(new_data + m_size, new_data + size, *value);
|
construct_range(m_data + m_size, m_data + size);
|
||||||
|
}
|
||||||
|
m_size = size;
|
||||||
} else {
|
} else {
|
||||||
construct_range(new_data + m_size, new_data + size);
|
const rcSizeType new_cap = get_new_capacity(size);
|
||||||
|
T* new_data = allocate_and_copy(new_cap);
|
||||||
|
// We defer deconstructing/freeing old data until after constructing
|
||||||
|
// new elements in case "value" is there.
|
||||||
|
if (value) {
|
||||||
|
construct_range(new_data + m_size, new_data + size, *value);
|
||||||
|
} else {
|
||||||
|
construct_range(new_data + m_size, new_data + size);
|
||||||
|
}
|
||||||
|
destroy_range(0, m_size);
|
||||||
|
rcFree(m_data);
|
||||||
|
m_data = new_data;
|
||||||
|
m_cap = new_cap;
|
||||||
|
m_size = size;
|
||||||
}
|
}
|
||||||
destroy_range(0, m_size);
|
|
||||||
rcFree(m_data);
|
|
||||||
m_data = new_data;
|
|
||||||
m_cap = size;
|
|
||||||
m_size = size;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
template <typename T, rcAllocHint H>
|
template <typename T, rcAllocHint H>
|
||||||
|
@ -303,6 +323,7 @@ public:
|
||||||
rcIntArray(int n) : m_impl(n, 0) {}
|
rcIntArray(int n) : m_impl(n, 0) {}
|
||||||
void push(int item) { m_impl.push_back(item); }
|
void push(int item) { m_impl.push_back(item); }
|
||||||
void resize(int size) { m_impl.resize(size); }
|
void resize(int size) { m_impl.resize(size); }
|
||||||
|
void clear() { m_impl.clear(); }
|
||||||
int pop()
|
int pop()
|
||||||
{
|
{
|
||||||
int v = m_impl.back();
|
int v = m_impl.back();
|
||||||
|
|
|
@ -921,8 +921,8 @@ bool rcBuildContours(rcContext* ctx, rcCompactHeightfield& chf,
|
||||||
continue;
|
continue;
|
||||||
const unsigned char area = chf.areas[i];
|
const unsigned char area = chf.areas[i];
|
||||||
|
|
||||||
verts.resize(0);
|
verts.clear();
|
||||||
simplified.resize(0);
|
simplified.clear();
|
||||||
|
|
||||||
ctx->startTimer(RC_TIMER_BUILD_CONTOURS_TRACE);
|
ctx->startTimer(RC_TIMER_BUILD_CONTOURS_TRACE);
|
||||||
walkContour(x, y, i, chf, flags, verts);
|
walkContour(x, y, i, chf, flags, verts);
|
||||||
|
|
|
@ -653,8 +653,8 @@ static bool buildPolyDetail(rcContext* ctx, const float* in, const int nin,
|
||||||
for (int i = 0; i < nin; ++i)
|
for (int i = 0; i < nin; ++i)
|
||||||
rcVcopy(&verts[i*3], &in[i*3]);
|
rcVcopy(&verts[i*3], &in[i*3]);
|
||||||
|
|
||||||
edges.resize(0);
|
edges.clear();
|
||||||
tris.resize(0);
|
tris.clear();
|
||||||
|
|
||||||
const float cs = chf.cs;
|
const float cs = chf.cs;
|
||||||
const float ics = 1.0f/cs;
|
const float ics = 1.0f/cs;
|
||||||
|
@ -803,7 +803,7 @@ static bool buildPolyDetail(rcContext* ctx, const float* in, const int nin,
|
||||||
int x1 = (int)ceilf(bmax[0]/sampleDist);
|
int x1 = (int)ceilf(bmax[0]/sampleDist);
|
||||||
int z0 = (int)floorf(bmin[2]/sampleDist);
|
int z0 = (int)floorf(bmin[2]/sampleDist);
|
||||||
int z1 = (int)ceilf(bmax[2]/sampleDist);
|
int z1 = (int)ceilf(bmax[2]/sampleDist);
|
||||||
samples.resize(0);
|
samples.clear();
|
||||||
for (int z = z0; z < z1; ++z)
|
for (int z = z0; z < z1; ++z)
|
||||||
{
|
{
|
||||||
for (int x = x0; x < x1; ++x)
|
for (int x = x0; x < x1; ++x)
|
||||||
|
@ -864,8 +864,8 @@ static bool buildPolyDetail(rcContext* ctx, const float* in, const int nin,
|
||||||
|
|
||||||
// Create new triangulation.
|
// Create new triangulation.
|
||||||
// TODO: Incremental add instead of full rebuild.
|
// TODO: Incremental add instead of full rebuild.
|
||||||
edges.resize(0);
|
edges.clear();
|
||||||
tris.resize(0);
|
tris.clear();
|
||||||
delaunayHull(ctx, nverts, verts, nhull, hull, tris, edges);
|
delaunayHull(ctx, nverts, verts, nhull, hull, tris, edges);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -935,7 +935,7 @@ static void seedArrayWithPolyCenter(rcContext* ctx, const rcCompactHeightfield&
|
||||||
pcy /= npoly;
|
pcy /= npoly;
|
||||||
|
|
||||||
// Use seeds array as a stack for DFS
|
// Use seeds array as a stack for DFS
|
||||||
array.resize(0);
|
array.clear();
|
||||||
array.push(startCellX);
|
array.push(startCellX);
|
||||||
array.push(startCellY);
|
array.push(startCellY);
|
||||||
array.push(startSpanIndex);
|
array.push(startSpanIndex);
|
||||||
|
@ -1001,7 +1001,7 @@ static void seedArrayWithPolyCenter(rcContext* ctx, const rcCompactHeightfield&
|
||||||
rcSwap(dirs[directDir], dirs[3]);
|
rcSwap(dirs[directDir], dirs[3]);
|
||||||
}
|
}
|
||||||
|
|
||||||
array.resize(0);
|
array.clear();
|
||||||
// getHeightData seeds are given in coordinates with borders
|
// getHeightData seeds are given in coordinates with borders
|
||||||
array.push(cx+bs);
|
array.push(cx+bs);
|
||||||
array.push(cy+bs);
|
array.push(cy+bs);
|
||||||
|
@ -1030,7 +1030,7 @@ static void getHeightData(rcContext* ctx, const rcCompactHeightfield& chf,
|
||||||
// Note: Reads to the compact heightfield are offset by border size (bs)
|
// Note: Reads to the compact heightfield are offset by border size (bs)
|
||||||
// since border size offset is already removed from the polymesh vertices.
|
// since border size offset is already removed from the polymesh vertices.
|
||||||
|
|
||||||
queue.resize(0);
|
queue.clear();
|
||||||
// Set all heights to RC_UNSET_HEIGHT.
|
// Set all heights to RC_UNSET_HEIGHT.
|
||||||
memset(hp.data, 0xff, sizeof(unsigned short)*hp.width*hp.height);
|
memset(hp.data, 0xff, sizeof(unsigned short)*hp.width*hp.height);
|
||||||
|
|
||||||
|
|
|
@ -650,7 +650,7 @@ static bool mergeRegions(rcRegion& rega, rcRegion& regb)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// Merge neighbours.
|
// Merge neighbours.
|
||||||
rega.connections.resize(0);
|
rega.connections.clear();
|
||||||
for (int i = 0, ni = acon.size(); i < ni-1; ++i)
|
for (int i = 0, ni = acon.size(); i < ni-1; ++i)
|
||||||
rega.connections.push(acon[(insa+1+i) % ni]);
|
rega.connections.push(acon[(insa+1+i) % ni]);
|
||||||
|
|
||||||
|
@ -876,8 +876,8 @@ static bool mergeAndFilterRegions(rcContext* ctx, int minRegionArea, int mergeRe
|
||||||
// Also keep track of the regions connects to a tile border.
|
// Also keep track of the regions connects to a tile border.
|
||||||
bool connectsToBorder = false;
|
bool connectsToBorder = false;
|
||||||
int spanCount = 0;
|
int spanCount = 0;
|
||||||
stack.resize(0);
|
stack.clear();
|
||||||
trace.resize(0);
|
trace.clear();
|
||||||
|
|
||||||
reg.visited = true;
|
reg.visited = true;
|
||||||
stack.push(i);
|
stack.push(i);
|
||||||
|
@ -1068,7 +1068,7 @@ static bool mergeAndFilterLayerRegions(rcContext* ctx, int minRegionArea,
|
||||||
{
|
{
|
||||||
const rcCompactCell& c = chf.cells[x+y*w];
|
const rcCompactCell& c = chf.cells[x+y*w];
|
||||||
|
|
||||||
lregs.resize(0);
|
lregs.clear();
|
||||||
|
|
||||||
for (int i = (int)c.index, ni = (int)(c.index+c.count); i < ni; ++i)
|
for (int i = (int)c.index, ni = (int)(c.index+c.count); i < ni; ++i)
|
||||||
{
|
{
|
||||||
|
@ -1139,7 +1139,7 @@ static bool mergeAndFilterLayerRegions(rcContext* ctx, int minRegionArea,
|
||||||
// Start search.
|
// Start search.
|
||||||
root.id = layerId;
|
root.id = layerId;
|
||||||
|
|
||||||
stack.resize(0);
|
stack.clear();
|
||||||
stack.push(i);
|
stack.push(i);
|
||||||
|
|
||||||
while (stack.size() > 0)
|
while (stack.size() > 0)
|
||||||
|
|
Loading…
Reference in New Issue