Curve Fitting: Add alternate 'refit' method

This is an alternative method for fitting a curve which incrementally simplifies the curve, then re-fits.

Generally gives better results, also improves corner detection.
This commit is contained in:
Campbell Barton 2016-07-25 14:12:17 +10:00
parent f23fecf306
commit 2418daede5
10 changed files with 2043 additions and 13 deletions

View File

@ -26,10 +26,14 @@ set(INC_SYS
set(SRC
intern/curve_fit_cubic.c
intern/curve_fit_cubic_refit.c
intern/curve_fit_corners_detect.c
intern/curve_fit_inline.h
curve_fit_nd.h
intern/curve_fit_inline.h
intern/generic_alloc_impl.h
intern/generic_heap.c
intern/generic_heap.h
)
blender_add_lib(extern_curve_fit_nd "${SRC}" "${INC}" "${INC_SYS}")

View File

@ -86,6 +86,7 @@ int curve_fit_cubic_to_points_fl(
*
* \param points, points_len: The array of points to calculate a cubics from.
* \param dims: The number of dimensions for for each element in \a points.
* \param points_length_cache: Optional pre-calculated lengths between points.
* \param error_threshold: the error threshold to allow for,
* \param tan_l, tan_r: Normalized tangents the handles will be aligned to.
* Note that tangents must both point along the direction of the \a points,
@ -98,6 +99,7 @@ int curve_fit_cubic_to_points_fl(
int curve_fit_cubic_to_points_single_db(
const double *points,
const unsigned int points_len,
const double *points_length_cache,
const unsigned int dims,
const double error_threshold,
const double tan_l[],
@ -110,6 +112,7 @@ int curve_fit_cubic_to_points_single_db(
int curve_fit_cubic_to_points_single_fl(
const float *points,
const unsigned int points_len,
const float *points_length_cache,
const unsigned int dims,
const float error_threshold,
const float tan_l[],
@ -121,8 +124,40 @@ int curve_fit_cubic_to_points_single_fl(
enum {
CURVE_FIT_CALC_HIGH_QUALIY = (1 << 0),
CURVE_FIT_CALC_CYCLIC = (1 << 1),
};
/* curve_fit_cubic_refit.c */
int curve_fit_cubic_to_points_refit_db(
const double *points,
const unsigned int points_len,
const unsigned int dims,
const double error_threshold,
const unsigned int calc_flag,
const unsigned int *corners,
unsigned int corners_len,
const double corner_angle,
double **r_cubic_array, unsigned int *r_cubic_array_len,
unsigned int **r_cubic_orig_index,
unsigned int **r_corner_index_array, unsigned int *r_corner_index_len);
int curve_fit_cubic_to_points_refit_fl(
const float *points,
const unsigned int points_len,
const unsigned int dims,
const float error_threshold,
const unsigned int calc_flag,
const unsigned int *corners,
unsigned int corners_len,
const float corner_angle,
float **r_cubic_array, unsigned int *r_cubic_array_len,
unsigned int **r_cubic_orig_index,
unsigned int **r_corner_index_array, unsigned int *r_corner_index_len);
/* curve_fit_corners_detect.c */
/**

View File

@ -474,7 +474,7 @@ static double points_calc_circumference_factor(
* We could try support this but will likely cause extreme >1 scales which could cause other issues. */
// assert(angle >= len_tangent);
double factor = (angle / len_tangent);
assert(factor < (M_PI / 2) + DBL_EPSILON);
assert(factor < (M_PI / 2) + (DBL_EPSILON * 10));
return factor;
}
else {
@ -876,7 +876,6 @@ static double points_calc_coord_length(
#ifdef USE_LENGTH_CACHE
length = points_length_cache[i];
assert(len_vnvn(pt, pt_prev, dims) == points_length_cache[i]);
#else
length = len_vnvn(pt, pt_prev, dims);
@ -1435,6 +1434,7 @@ int curve_fit_cubic_to_points_fl(
int curve_fit_cubic_to_points_single_db(
const double *points,
const uint points_len,
const double *points_length_cache,
const uint dims,
const double error_threshold,
const double tan_l[],
@ -1451,10 +1451,14 @@ int curve_fit_cubic_to_points_single_db(
/* in this instance theres no advantage in using length cache,
* since we're not recursively calculating values. */
#ifdef USE_LENGTH_CACHE
double *points_length_cache = malloc(sizeof(double) * points_len);
points_calc_coord_length_cache(
points, points_len, dims,
points_length_cache);
double *points_length_cache_alloc = NULL;
if (points_length_cache == NULL) {
points_length_cache_alloc = malloc(sizeof(double) * points_len);
points_calc_coord_length_cache(
points, points_len, dims,
points_length_cache_alloc);
points_length_cache = points_length_cache_alloc;
}
#endif
fit_cubic_to_points(
@ -1467,7 +1471,9 @@ int curve_fit_cubic_to_points_single_db(
cubic, r_error_max_sq, &split_index);
#ifdef USE_LENGTH_CACHE
free(points_length_cache);
if (points_length_cache_alloc) {
free(points_length_cache_alloc);
}
#endif
copy_vnvn(r_handle_l, CUBIC_PT(cubic, 1, dims), dims);
@ -1479,6 +1485,7 @@ int curve_fit_cubic_to_points_single_db(
int curve_fit_cubic_to_points_single_fl(
const float *points,
const uint points_len,
const float *points_length_cache,
const uint dims,
const float error_threshold,
const float tan_l[],
@ -1490,9 +1497,15 @@ int curve_fit_cubic_to_points_single_fl(
{
const uint points_flat_len = points_len * dims;
double *points_db = malloc(sizeof(double) * points_flat_len);
double *points_length_cache_db = NULL;
copy_vndb_vnfl(points_db, points, points_flat_len);
if (points_length_cache) {
points_length_cache_db = malloc(sizeof(double) * points_len);
copy_vndb_vnfl(points_length_cache_db, points_length_cache, points_len);
}
#ifdef USE_VLA
double tan_l_db[dims];
double tan_r_db[dims];
@ -1510,7 +1523,7 @@ int curve_fit_cubic_to_points_single_fl(
copy_vndb_vnfl(tan_r_db, tan_r, dims);
int result = curve_fit_cubic_to_points_single_db(
points_db, points_len, dims,
points_db, points_len, points_length_cache_db, dims,
(double)error_threshold,
tan_l_db, tan_r_db,
r_handle_l_db, r_handle_r_db,
@ -1518,6 +1531,10 @@ int curve_fit_cubic_to_points_single_fl(
free(points_db);
if (points_length_cache_db) {
free(points_length_cache_db);
}
copy_vnfl_vndb(r_handle_l, r_handle_l_db, dims);
copy_vnfl_vndb(r_handle_r, r_handle_r_db, dims);
*r_error_sq = (float)r_error_sq_db;

File diff suppressed because it is too large Load Diff

View File

@ -290,14 +290,12 @@ MINLINE bool equals_vnvn(
return true;
}
#if 0
MINLINE void project_vn_vnvn(
double v_out[], const double p[], const double v_proj[], const uint dims)
{
const double mul = dot_vnvn(p, v_proj, dims) / dot_vnvn(v_proj, v_proj, dims);
mul_vnvn_fl(v_out, v_proj, mul, dims);
}
#endif
MINLINE void project_vn_vnvn_normalized(
double v_out[], const double p[], const double v_proj[], const uint dims)

View File

@ -0,0 +1,220 @@
/*
* Copyright (c) 2016, Blender Foundation.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \file generic_alloc_impl.c
* \ingroup curve_fit
*
* Simple Memory Chunking Allocator
* ================================
*
* Defines need to be set:
* - #TPOOL_IMPL_PREFIX: Prefix to use for the API.
* - #TPOOL_ALLOC_TYPE: Struct type this pool handles.
* - #TPOOL_STRUCT: Name for pool struct name.
* - #TPOOL_CHUNK_SIZE: Chunk size (optional), use 64kb when not defined.
*
* \note #TPOOL_ALLOC_TYPE must be at least ``sizeof(void *)``.
*
* Defines the API, uses #TPOOL_IMPL_PREFIX to prefix each function.
*
* - *_pool_create()
* - *_pool_destroy()
* - *_pool_clear()
*
* - *_pool_elem_alloc()
* - *_pool_elem_calloc()
* - *_pool_elem_free()
*/
/* check we're not building directly */
#if !defined(TPOOL_IMPL_PREFIX) || \
!defined(TPOOL_ALLOC_TYPE) || \
!defined(TPOOL_STRUCT)
# error "This file can't be compiled directly, include in another source file"
#endif
#define _CONCAT_AUX(MACRO_ARG1, MACRO_ARG2) MACRO_ARG1 ## MACRO_ARG2
#define _CONCAT(MACRO_ARG1, MACRO_ARG2) _CONCAT_AUX(MACRO_ARG1, MACRO_ARG2)
#define _TPOOL_PREFIX(id) _CONCAT(TPOOL_IMPL_PREFIX, _##id)
/* local identifiers */
#define pool_create _TPOOL_PREFIX(pool_create)
#define pool_destroy _TPOOL_PREFIX(pool_destroy)
#define pool_clear _TPOOL_PREFIX(pool_clear)
#define pool_elem_alloc _TPOOL_PREFIX(pool_elem_alloc)
#define pool_elem_calloc _TPOOL_PREFIX(pool_elem_calloc)
#define pool_elem_free _TPOOL_PREFIX(pool_elem_free)
/* private identifiers (only for this file, undefine after) */
#define pool_alloc_chunk _TPOOL_PREFIX(pool_alloc_chunk)
#define TPoolChunk _TPOOL_PREFIX(TPoolChunk)
#define TPoolChunkElemFree _TPOOL_PREFIX(TPoolChunkElemFree)
#ifndef TPOOL_CHUNK_SIZE
#define TPOOL_CHUNK_SIZE (1 << 16) /* 64kb */
#define _TPOOL_CHUNK_SIZE_UNDEF
#endif
#ifndef UNLIKELY
# ifdef __GNUC__
# define UNLIKELY(x) __builtin_expect(!!(x), 0)
# else
# define UNLIKELY(x) (x)
# endif
#endif
#ifdef __GNUC__
# define MAYBE_UNUSED __attribute__((unused))
#else
# define MAYBE_UNUSED
#endif
struct TPoolChunk {
struct TPoolChunk *prev;
unsigned int size;
unsigned int bufsize;
TPOOL_ALLOC_TYPE buf[0];
};
struct TPoolChunkElemFree {
struct TPoolChunkElemFree *next;
};
struct TPOOL_STRUCT {
/* Always keep at least one chunk (never NULL) */
struct TPoolChunk *chunk;
/* when NULL, allocate a new chunk */
struct TPoolChunkElemFree *free;
};
/**
* Number of elems to include per #TPoolChunk when no reserved size is passed,
* or we allocate past the reserved number.
*
* \note Optimize number for 64kb allocs.
*/
#define _TPOOL_CHUNK_DEFAULT_NUM \
(((1 << 16) - sizeof(struct TPoolChunk)) / sizeof(TPOOL_ALLOC_TYPE))
/** \name Internal Memory Management
* \{ */
static struct TPoolChunk *pool_alloc_chunk(
unsigned int tot_elems, struct TPoolChunk *chunk_prev)
{
struct TPoolChunk *chunk = malloc(
sizeof(struct TPoolChunk) + (sizeof(TPOOL_ALLOC_TYPE) * tot_elems));
chunk->prev = chunk_prev;
chunk->bufsize = tot_elems;
chunk->size = 0;
return chunk;
}
static TPOOL_ALLOC_TYPE *pool_elem_alloc(struct TPOOL_STRUCT *pool)
{
TPOOL_ALLOC_TYPE *elem;
if (pool->free) {
elem = (TPOOL_ALLOC_TYPE *)pool->free;
pool->free = pool->free->next;
}
else {
struct TPoolChunk *chunk = pool->chunk;
if (UNLIKELY(chunk->size == chunk->bufsize)) {
chunk = pool->chunk = pool_alloc_chunk(_TPOOL_CHUNK_DEFAULT_NUM, chunk);
}
elem = &chunk->buf[chunk->size++];
}
return elem;
}
MAYBE_UNUSED
static TPOOL_ALLOC_TYPE *pool_elem_calloc(struct TPOOL_STRUCT *pool)
{
TPOOL_ALLOC_TYPE *elem = pool_elem_alloc(pool);
memset(elem, 0, sizeof(*elem));
return elem;
}
static void pool_elem_free(struct TPOOL_STRUCT *pool, TPOOL_ALLOC_TYPE *elem)
{
struct TPoolChunkElemFree *elem_free = (struct TPoolChunkElemFree *)elem;
elem_free->next = pool->free;
pool->free = elem_free;
}
static void pool_create(struct TPOOL_STRUCT *pool, unsigned int tot_reserve)
{
pool->chunk = pool_alloc_chunk((tot_reserve > 1) ? tot_reserve : _TPOOL_CHUNK_DEFAULT_NUM, NULL);
pool->free = NULL;
}
MAYBE_UNUSED
static void pool_clear(struct TPOOL_STRUCT *pool)
{
/* Remove all except the last chunk */
while (pool->chunk->prev) {
struct TPoolChunk *chunk_prev = pool->chunk->prev;
free(pool->chunk);
pool->chunk = chunk_prev;
}
pool->chunk->size = 0;
pool->free = NULL;
}
static void pool_destroy(struct TPOOL_STRUCT *pool)
{
struct TPoolChunk *chunk = pool->chunk;
do {
struct TPoolChunk *chunk_prev;
chunk_prev = chunk->prev;
free(chunk);
chunk = chunk_prev;
} while (chunk);
pool->chunk = NULL;
pool->free = NULL;
}
/** \} */
#undef _TPOOL_CHUNK_DEFAULT_NUM
#undef _CONCAT_AUX
#undef _CONCAT
#undef _TPOOL_PREFIX
#undef TPoolChunk
#undef TPoolChunkElemFree
#ifdef _TPOOL_CHUNK_SIZE_UNDEF
# undef TPOOL_CHUNK_SIZE
# undef _TPOOL_CHUNK_SIZE_UNDEF
#endif

View File

@ -0,0 +1,278 @@
/*
* Copyright (c) 2016, Blender Foundation.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/** \file generic_heap.c
* \ingroup curve_fit
*/
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <assert.h>
#include "generic_heap.h"
/* swap with a temp value */
#define SWAP_TVAL(tval, a, b) { \
(tval) = (a); \
(a) = (b); \
(b) = (tval); \
} (void)0
#ifdef __GNUC__
# define UNLIKELY(x) __builtin_expect(!!(x), 0)
#else
# define UNLIKELY(x) (x)
#endif
/***/
struct HeapNode {
void *ptr;
double value;
unsigned int index;
};
/* heap_* pool allocator */
#define TPOOL_IMPL_PREFIX heap
#define TPOOL_ALLOC_TYPE HeapNode
#define TPOOL_STRUCT HeapMemPool
#include "generic_alloc_impl.h"
#undef TPOOL_IMPL_PREFIX
#undef TPOOL_ALLOC_TYPE
#undef TPOOL_STRUCT
struct Heap {
unsigned int size;
unsigned int bufsize;
HeapNode **tree;
struct HeapMemPool pool;
};
/** \name Internal Functions
* \{ */
#define HEAP_PARENT(i) (((i) - 1) >> 1)
#define HEAP_LEFT(i) (((i) << 1) + 1)
#define HEAP_RIGHT(i) (((i) << 1) + 2)
#define HEAP_COMPARE(a, b) ((a)->value < (b)->value)
#if 0 /* UNUSED */
#define HEAP_EQUALS(a, b) ((a)->value == (b)->value)
#endif
static void heap_swap(Heap *heap, const unsigned int i, const unsigned int j)
{
#if 0
SWAP(unsigned int, heap->tree[i]->index, heap->tree[j]->index);
SWAP(HeapNode *, heap->tree[i], heap->tree[j]);
#else
HeapNode **tree = heap->tree;
union {
unsigned int index;
HeapNode *node;
} tmp;
SWAP_TVAL(tmp.index, tree[i]->index, tree[j]->index);
SWAP_TVAL(tmp.node, tree[i], tree[j]);
#endif
}
static void heap_down(Heap *heap, unsigned int i)
{
/* size won't change in the loop */
const unsigned int size = heap->size;
while (1) {
const unsigned int l = HEAP_LEFT(i);
const unsigned int r = HEAP_RIGHT(i);
unsigned int smallest;
smallest = ((l < size) && HEAP_COMPARE(heap->tree[l], heap->tree[i])) ? l : i;
if ((r < size) && HEAP_COMPARE(heap->tree[r], heap->tree[smallest])) {
smallest = r;
}
if (smallest == i) {
break;
}
heap_swap(heap, i, smallest);
i = smallest;
}
}
static void heap_up(Heap *heap, unsigned int i)
{
while (i > 0) {
const unsigned int p = HEAP_PARENT(i);
if (HEAP_COMPARE(heap->tree[p], heap->tree[i])) {
break;
}
heap_swap(heap, p, i);
i = p;
}
}
/** \} */
/** \name Public Heap API
* \{ */
/* use when the size of the heap is known in advance */
Heap *HEAP_new(unsigned int tot_reserve)
{
Heap *heap = malloc(sizeof(Heap));
/* ensure we have at least one so we can keep doubling it */
heap->size = 0;
heap->bufsize = tot_reserve ? tot_reserve : 1;
heap->tree = malloc(heap->bufsize * sizeof(HeapNode *));
heap_pool_create(&heap->pool, tot_reserve);
return heap;
}
void HEAP_free(Heap *heap, HeapFreeFP ptrfreefp)
{
if (ptrfreefp) {
unsigned int i;
for (i = 0; i < heap->size; i++) {
ptrfreefp(heap->tree[i]->ptr);
}
}
heap_pool_destroy(&heap->pool);
free(heap->tree);
free(heap);
}
void HEAP_clear(Heap *heap, HeapFreeFP ptrfreefp)
{
if (ptrfreefp) {
unsigned int i;
for (i = 0; i < heap->size; i++) {
ptrfreefp(heap->tree[i]->ptr);
}
}
heap->size = 0;
heap_pool_clear(&heap->pool);
}
HeapNode *HEAP_insert(Heap *heap, double value, void *ptr)
{
HeapNode *node;
if (UNLIKELY(heap->size >= heap->bufsize)) {
heap->bufsize *= 2;
heap->tree = realloc(heap->tree, heap->bufsize * sizeof(*heap->tree));
}
node = heap_pool_elem_alloc(&heap->pool);
node->ptr = ptr;
node->value = value;
node->index = heap->size;
heap->tree[node->index] = node;
heap->size++;
heap_up(heap, node->index);
return node;
}
bool HEAP_is_empty(Heap *heap)
{
return (heap->size == 0);
}
unsigned int HEAP_size(Heap *heap)
{
return heap->size;
}
HeapNode *HEAP_top(Heap *heap)
{
return heap->tree[0];
}
double HEAP_top_value(Heap *heap)
{
return heap->tree[0]->value;
}
void *HEAP_popmin(Heap *heap)
{
void *ptr = heap->tree[0]->ptr;
assert(heap->size != 0);
heap_pool_elem_free(&heap->pool, heap->tree[0]);
if (--heap->size) {
heap_swap(heap, 0, heap->size);
heap_down(heap, 0);
}
return ptr;
}
void HEAP_remove(Heap *heap, HeapNode *node)
{
unsigned int i = node->index;
assert(heap->size != 0);
while (i > 0) {
unsigned int p = HEAP_PARENT(i);
heap_swap(heap, p, i);
i = p;
}
HEAP_popmin(heap);
}
double HEAP_node_value(HeapNode *node)
{
return node->value;
}
void *HEAP_node_ptr(HeapNode *node)
{
return node->ptr;
}

View File

@ -0,0 +1,54 @@
/*
* Copyright (c) 2016, Blender Foundation.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef __GENERIC_HEAP_IMPL_H__
#define __GENERIC_HEAP_IMPL_H__
/** \file generic_heap.h
* \ingroup curve_fit
*/
struct Heap;
struct HeapNode;
typedef struct Heap Heap;
typedef struct HeapNode HeapNode;
typedef void (*HeapFreeFP)(void *ptr);
Heap *HEAP_new(unsigned int tot_reserve);
bool HEAP_is_empty(Heap *heap);
void HEAP_free(Heap *heap, HeapFreeFP ptrfreefp);
void *HEAP_node_ptr(HeapNode *node);
void HEAP_remove(Heap *heap, HeapNode *node);
HeapNode *HEAP_insert(Heap *heap, double value, void *ptr);
void *HEAP_popmin(Heap *heap);
void HEAP_clear(Heap *heap, HeapFreeFP ptrfreefp);
unsigned int HEAP_size(Heap *heap);
HeapNode *HEAP_top(Heap *heap);
double HEAP_top_value(Heap *heap);
double HEAP_node_value(HeapNode *node);
#endif /* __GENERIC_HEAP_IMPL_H__ */

View File

@ -5843,7 +5843,7 @@ static int curve_dissolve_exec(bContext *C, wmOperator *UNUSED(op))
normalize_v3(tan_r);
curve_fit_cubic_to_points_single_fl(
points, points_len, dims, FLT_EPSILON,
points, points_len, NULL, dims, FLT_EPSILON,
tan_l, tan_r,
bezt_prev->vec[2], bezt_next->vec[0],
&error_sq_dummy);

View File

@ -912,7 +912,7 @@ static int curve_draw_exec(bContext *C, wmOperator *op)
const int result = curve_fit_cubic_to_points_fl(
coords, stroke_len, dims, error_threshold, CURVE_FIT_CALC_HIGH_QUALIY,
corners, corners_len,
corners, NULL, corners_len,
&cubic_spline, &cubic_spline_len,
NULL,
&corners_index, &corners_index_len);