int main() {
kvec_t(int) array;
kv_init(array);
- kv_push(int, array, 10); // append
+ kv_push_safe(int, array, 10, e0); // append
kv_a(int, array, 20) = 5; // dynamic
kv_A(array, 20) = 4; // static
kv_destroy(array);
return 0;
+e0:
+ return 1;
}
*/
#define kv_size(v) ((v).n)
#define kv_max(v) ((v).m)
-#define kv_resize(type, v, s) ((v).m = (s), (v).a = (type*)realloc((v).a, sizeof(type) * (v).m))
+#define kv_resize_safe(type, v, s, el) do { \
+ type *_tp = (type*)realloc((v).a, sizeof(type) * (s)); \
+ if (_tp == NULL) { \
+ goto el; \
+ } else { \
+ (v).a = _tp; \
+ (v).m = (s); \
+ } \
+ } while (0)
+
#define kv_grow_factor 1.5
+#define kv_grow_safe(type, v, el) do { \
+ size_t _ts = ((v).m > 1 ? (v).m * kv_grow_factor : 2); \
+ type *_tp = (type*)realloc((v).a, sizeof(type) * _ts); \
+ if (_tp == NULL) { \
+ goto el; \
+ } else { \
+ (v).a = _tp; \
+ (v).m = _ts; \
+ } \
+ } while (0)
+
+#define kv_copy_safe(type, v1, v0, el) do { \
+ if ((v1).m < (v0).n) kv_resize_safe(type, v1, (v0).n, el); \
+ (v1).n = (v0).n; \
+ memcpy((v1).a, (v0).a, sizeof(type) * (v0).n); \
+ } while (0)
+
+#define kv_push_safe(type, v, x, el) do { \
+ if ((v).n == (v).m) { \
+ kv_grow_safe(type, v, el); \
+ } \
+ (v).a[(v).n++] = (x); \
+ } while (0)
+
+#define kv_prepend_safe(type, v, x, el) do { \
+ if ((v).n == (v).m) { \
+ kv_grow_safe(type, v, el); \
+ } \
+ memmove((v).a + 1, (v).a, sizeof(type) * (v).n); \
+ (v).a[0] = (x); \
+ (v).n ++; \
+ } while (0)
+
+#define kv_concat_safe(type, v1, v0, el) do { \
+ if ((v1).m < (v0).n + (v1).n) \
+ kv_resize_safe(type, v1, (v0).n + (v1).n, el); \
+ memcpy((v1).a + (v1).n, (v0).a, sizeof(type) * (v0).n); \
+ (v1).n = (v0).n + (v1).n; \
+ } while (0)
+
+#define kv_del(type, v, i) do { \
+ if ((i) < (v).n) { \
+ memmove((v).a + (i), (v).a + ((i) + 1), sizeof(type) * ((v).n - (i) - 1)); \
+ (v).n --; \
+ } \
+} while (0)
+
+/*
+ * Old (ENOMEM-unsafe) version of kv_xxx macros. Compat-only, not for use in
+ * the new library code.
+ */
+
+#define kv_resize(type, v, s) ((v).m = (s), (v).a = (type*)realloc((v).a, sizeof(type) * (v).m))
+
#define kv_grow(type, v) ((v).m = ((v).m > 1 ? (v).m * kv_grow_factor : 2), \
(v).a = (type*)realloc((v).a, sizeof(type) * (v).m))
#define kv_concat(type, v1, v0) do { \
if ((v1).m < (v0).n + (v1).n) kv_resize(type, v1, (v0).n + (v1).n); \
- memcpy((v1).a + (v1).n, (v0).a, sizeof(type) * ((v0).n + (v1).n)); \
+ memcpy((v1).a + (v1).n, (v0).a, sizeof(type) * (v0).n); \
(v1).n = (v0).n + (v1).n; \
} while (0)
-#define kv_del(type, v, i) do { \
- if ((i) < (v).n) { \
- memmove((v).a + (i), (v).a + ((i) + 1), sizeof(type) * ((v).n - (i) - 1)); \
- (v).n --; \
- } \
-} while (0)
-
-#endif
+#endif /* AC_KVEC_H */
* Reserve space in ucl array or object for `elt` elements
* @param obj object to reserve
* @param reserved size to reserve in an object
+ * @return 0 on success, -1 on failure (i.e. ENOMEM)
*/
-UCL_EXTERN void ucl_object_reserve (ucl_object_t *obj, size_t reserved);
+UCL_EXTERN bool ucl_object_reserve (ucl_object_t *obj, size_t reserved);
/**
* Append an element to the end of array object
* @param iter opaque iterator, must be set to NULL on the first call:
* ucl_object_iter_t it = NULL;
* while ((cur = ucl_iterate_object (obj, &it)) != NULL) ...
+ * @param ep pointer record exception (such as ENOMEM), could be NULL
* @return the next object or NULL
*/
-UCL_EXTERN const ucl_object_t* ucl_object_iterate (const ucl_object_t *obj,
- ucl_object_iter_t *iter, bool expand_values);
+UCL_EXTERN const ucl_object_t* ucl_object_iterate_with_error (const ucl_object_t *obj,
+ ucl_object_iter_t *iter, bool expand_values, int *ep);
+
#define ucl_iterate_object ucl_object_iterate
+#define ucl_object_iterate(ob, it, ev) ucl_object_iterate_with_error((ob), (it), (ev), NULL)
/**
* Create new safe iterator for the specified object
*/
UCL_EXTERN ucl_object_iter_t ucl_object_iterate_new (const ucl_object_t *obj)
UCL_WARN_UNUSED_RESULT;
+/**
+ * Check safe iterator object after performing some operations on it
+ * (such as ucl_object_iterate_safe()) to see if operation has encountered
+ * fatal exception while performing that operation (e.g. ENOMEM).
+ * @param iter opaque iterator
+ * @return true if exception has occured, false otherwise
+ */
+UCL_EXTERN bool ucl_object_iter_chk_excpn(ucl_object_iter_t *it);
+
/**
* Reset initialized iterator to a new object
* @param obj new object to iterate
* @param macro macro name (without leading dot)
* @param handler handler (it is called immediately after macro is parsed)
* @param ud opaque user data for a handler
+ * @return true on success, false on failure (i.e. ENOMEM)
*/
-UCL_EXTERN void ucl_parser_register_macro (struct ucl_parser *parser,
+UCL_EXTERN bool ucl_parser_register_macro (struct ucl_parser *parser,
const char *macro,
ucl_macro_handler handler, void* ud);
* @param macro macro name (without leading dot)
* @param handler handler (it is called immediately after macro is parsed)
* @param ud opaque user data for a handler
+ * @return true on success, false on failure (i.e. ENOMEM)
*/
-UCL_EXTERN void ucl_parser_register_context_macro (struct ucl_parser *parser,
+UCL_EXTERN bool ucl_parser_register_context_macro (struct ucl_parser *parser,
const char *macro,
ucl_context_macro_handler handler,
void* ud);
new = UCL_ALLOC (sizeof (ucl_hash_t));
if (new != NULL) {
+ void *h;
kv_init (new->ar);
new->caseless = ignore_case;
if (ignore_case) {
- khash_t(ucl_hash_caseless_node) *h = kh_init (ucl_hash_caseless_node);
- new->hash = (void *)h;
+ h = (void *)kh_init (ucl_hash_caseless_node);
}
else {
- khash_t(ucl_hash_node) *h = kh_init (ucl_hash_node);
- new->hash = (void *)h;
+ h = (void *)kh_init (ucl_hash_node);
}
+ if (h == NULL) {
+ UCL_FREE (sizeof (ucl_hash_t), new);
+ return NULL;
+ }
+ new->hash = h;
}
return new;
}
if (hashlin->caseless) {
khash_t(ucl_hash_caseless_node) *h = (khash_t(ucl_hash_caseless_node) *)
- hashlin->hash;
+ hashlin->hash;
kh_destroy (ucl_hash_caseless_node, h);
}
else {
khash_t(ucl_hash_node) *h = (khash_t(ucl_hash_node) *)
- hashlin->hash;
+ hashlin->hash;
kh_destroy (ucl_hash_node, h);
}
UCL_FREE (sizeof (*hashlin), hashlin);
}
-void
+bool
ucl_hash_insert (ucl_hash_t* hashlin, const ucl_object_t *obj,
- const char *key, unsigned keylen)
+ const char *key, unsigned keylen)
{
khiter_t k;
int ret;
struct ucl_hash_elt *elt;
if (hashlin == NULL) {
- return;
+ return false;
}
if (hashlin->caseless) {
k = kh_put (ucl_hash_caseless_node, h, obj, &ret);
if (ret > 0) {
elt = &kh_value (h, k);
- kv_push (const ucl_object_t *, hashlin->ar, obj);
+ kv_push_safe (const ucl_object_t *, hashlin->ar, obj, e0);
elt->obj = obj;
elt->ar_idx = kv_size (hashlin->ar) - 1;
}
k = kh_put (ucl_hash_node, h, obj, &ret);
if (ret > 0) {
elt = &kh_value (h, k);
- kv_push (const ucl_object_t *, hashlin->ar, obj);
+ kv_push_safe (const ucl_object_t *, hashlin->ar, obj, e0);
elt->obj = obj;
elt->ar_idx = kv_size (hashlin->ar) - 1;
+ } else if (ret < 0) {
+ goto e0;
}
}
+ return true;
+ e0:
+ return false;
}
void ucl_hash_replace (ucl_hash_t* hashlin, const ucl_object_t *old,
- const ucl_object_t *new)
+ const ucl_object_t *new)
{
khiter_t k;
int ret;
const ucl_object_t **end;
};
+#define UHI_SETERR(ep, ern) {if (ep != NULL) *ep = (ern);}
+
const void*
-ucl_hash_iterate (ucl_hash_t *hashlin, ucl_hash_iter_t *iter)
+ucl_hash_iterate2 (ucl_hash_t *hashlin, ucl_hash_iter_t *iter, int *ep)
{
struct ucl_hash_real_iter *it = (struct ucl_hash_real_iter *)(*iter);
const ucl_object_t *ret = NULL;
if (hashlin == NULL) {
+ UHI_SETERR(ep, EINVAL);
return NULL;
}
it = UCL_ALLOC (sizeof (*it));
if (it == NULL) {
+ UHI_SETERR(ep, ENOMEM);
return NULL;
}
it->end = it->cur + hashlin->ar.n;
}
+ UHI_SETERR(ep, 0);
if (it->cur < it->end) {
ret = *it->cur++;
}
if (hashlin->caseless) {
khash_t(ucl_hash_caseless_node) *h = (khash_t(ucl_hash_caseless_node) *)
- hashlin->hash;
+ hashlin->hash;
k = kh_get (ucl_hash_caseless_node, h, &search);
if (k != kh_end (h)) {
}
else {
khash_t(ucl_hash_node) *h = (khash_t(ucl_hash_node) *)
- hashlin->hash;
+ hashlin->hash;
k = kh_get (ucl_hash_node, h, &search);
if (k != kh_end (h)) {
elt = &kh_value (h, k);
if (hashlin->caseless) {
khash_t(ucl_hash_caseless_node) *h = (khash_t(ucl_hash_caseless_node) *)
- hashlin->hash;
+ hashlin->hash;
k = kh_get (ucl_hash_caseless_node, h, obj);
if (k != kh_end (h)) {
}
else {
khash_t(ucl_hash_node) *h = (khash_t(ucl_hash_node) *)
- hashlin->hash;
+ hashlin->hash;
k = kh_get (ucl_hash_node, h, obj);
if (k != kh_end (h)) {
elt = &kh_value (h, k);
}
}
-void ucl_hash_reserve (ucl_hash_t *hashlin, size_t sz)
+bool
+ucl_hash_reserve (ucl_hash_t *hashlin, size_t sz)
{
if (hashlin == NULL) {
- return;
+ return false;
}
if (sz > hashlin->ar.m) {
- kv_resize (const ucl_object_t *, hashlin->ar, sz);
+ kv_resize_safe (const ucl_object_t *, hashlin->ar, sz, e0);
if (hashlin->caseless) {
khash_t(ucl_hash_caseless_node) *h = (khash_t(
kh_resize (ucl_hash_node, h, sz * 2);
}
}
+ return true;
+ e0:
+ return false;
}
\ No newline at end of file
/**
* Inserts an element in the the hashtable.
+ * @return true on success, false on failure (i.e. ENOMEM)
*/
-void ucl_hash_insert (ucl_hash_t* hashlin, const ucl_object_t *obj, const char *key,
+bool ucl_hash_insert (ucl_hash_t* hashlin, const ucl_object_t *obj, const char *key,
unsigned keylen);
/**
* Iterate over hash table
* @param hashlin hash
* @param iter iterator (must be NULL on first iteration)
+ * @param ep pointer record exception (such as ENOMEM), could be NULL
* @return the next object
*/
-const void* ucl_hash_iterate (ucl_hash_t *hashlin, ucl_hash_iter_t *iter);
+const void* ucl_hash_iterate2 (ucl_hash_t *hashlin, ucl_hash_iter_t *iter, int *ep);
+
+/**
+ * Helper macro to support older code
+ */
+#define ucl_hash_iterate(hl, ip) ucl_hash_iterate2((hl), (ip), NULL)
/**
* Check whether an iterator has next element
/**
* Reserves space in hash
+ * @return true on sucess, false on failure (e.g. ENOMEM)
* @param hashlin
*/
-void ucl_hash_reserve (ucl_hash_t *hashlin, size_t sz);
+bool ucl_hash_reserve (ucl_hash_t *hashlin, size_t sz);
#endif
const ucl_object_t *obj,
bool ignore_case)
{
+ ucl_hash_t *nhp;
+
if (hashlin == NULL) {
- hashlin = ucl_hash_create (ignore_case);
+ nhp = ucl_hash_create (ignore_case);
+ if (nhp == NULL) {
+ return NULL;
+ }
+ } else {
+ nhp = hashlin;
+ }
+ if (!ucl_hash_insert (nhp, obj, obj->key, obj->keylen)) {
+ if (nhp != hashlin) {
+ ucl_hash_destroy(nhp, NULL);
+ }
+ return NULL;
}
- ucl_hash_insert (hashlin, obj, obj->key, obj->keylen);
- return hashlin;
+ return nhp;
}
/**
bool is_array, uint32_t level, bool has_obrace)
{
struct ucl_stack *st;
- bool need_free = false;
-
- if (!is_array) {
- if (obj == NULL) {
- obj = ucl_object_new_full (UCL_OBJECT, parser->chunks->priority);
- need_free = true;
- }
- else {
- if (obj->type == UCL_ARRAY) {
- /* Bad combination for merge: array and object */
- ucl_set_err (parser, UCL_EMERGE,
- "cannot merge an array with an object",
- &parser->err);
-
- return NULL;
- }
+ ucl_object_t *nobj;
- obj->type = UCL_OBJECT;
+ if (obj == NULL) {
+ nobj = ucl_object_new_full (is_array ? UCL_ARRAY : UCL_OBJECT, parser->chunks->priority);
+ if (nobj == NULL) {
+ goto enomem0;
}
+ } else {
+ if (obj->type == (is_array ? UCL_OBJECT : UCL_ARRAY)) {
+ /* Bad combination for merge: array and object */
+ ucl_set_err (parser, UCL_EMERGE,
+ "cannot merge an object with an array",
+ &parser->err);
- if (obj->value.ov == NULL) {
- obj->value.ov = ucl_hash_create (parser->flags & UCL_PARSER_KEY_LOWERCASE);
+ return NULL;
}
- parser->state = UCL_STATE_KEY;
+ nobj = obj;
+ nobj->type = is_array ? UCL_ARRAY : UCL_OBJECT;
}
- else {
- if (obj == NULL) {
- obj = ucl_object_new_full (UCL_ARRAY, parser->chunks->priority);
- need_free = true;
- }
- else {
- if (obj->type == UCL_OBJECT) {
- /* Bad combination for merge: array and object */
- ucl_set_err (parser, UCL_EMERGE,
- "cannot merge an object with an array",
- &parser->err);
- return NULL;
+ if (!is_array) {
+ if (nobj->value.ov == NULL) {
+ nobj->value.ov = ucl_hash_create (parser->flags & UCL_PARSER_KEY_LOWERCASE);
+ if (nobj->value.ov == NULL) {
+ goto enomem1;
}
-
- obj->type = UCL_ARRAY;
}
+ parser->state = UCL_STATE_KEY;
+ } else {
parser->state = UCL_STATE_VALUE;
}
st = UCL_ALLOC (sizeof (struct ucl_stack));
if (st == NULL) {
- ucl_set_err (parser, UCL_EINTERNAL, "cannot allocate memory for an object",
- &parser->err);
- if (need_free) {
- ucl_object_unref (obj);
- }
-
- return NULL;
+ goto enomem1;
}
- st->obj = obj;
+ st->obj = nobj;
if (level >= UINT16_MAX) {
ucl_set_err (parser, UCL_ENESTED,
"objects are nesting too deep (over 65535 limit)",
&parser->err);
- if (need_free) {
+ if (nobj != obj) {
ucl_object_unref (obj);
}
}
LL_PREPEND (parser->stack, st);
- parser->cur_obj = obj;
+ parser->cur_obj = nobj;
- return obj;
+ return nobj;
+enomem1:
+ if (nobj != obj)
+ ucl_object_unref (nobj);
+enomem0:
+ ucl_set_err (parser, UCL_EINTERNAL, "cannot allocate memory for an object",
+ &parser->err);
+ return NULL;
}
int
if (tobj == NULL) {
container = ucl_hash_insert_object (container, nobj,
parser->flags & UCL_PARSER_KEY_LOWERCASE);
+ if (container == NULL) {
+ return false;
+ }
nobj->prev = nobj;
nobj->next = NULL;
parser->stack->obj->len ++;
/* Create a new object */
nobj = ucl_object_new_full (UCL_NULL, parser->chunks->priority);
+ if (nobj == NULL) {
+ return false;
+ }
keylen = ucl_copy_or_store_ptr (parser, c, &nobj->trash_stack[UCL_TRASH_KEY],
&key, end - c, need_unescape, parser->flags & UCL_PARSER_KEY_LOWERCASE,
false, false);
return true;
}
+#define UPRM_SAFE(fn, a, b, c, el) do { \
+ if (!fn(a, b, c, a)) \
+ goto el; \
+ } while (0)
+
struct ucl_parser*
ucl_parser_new (int flags)
{
memset (parser, 0, sizeof (struct ucl_parser));
- ucl_parser_register_macro (parser, "include", ucl_include_handler, parser);
- ucl_parser_register_macro (parser, "try_include", ucl_try_include_handler, parser);
- ucl_parser_register_macro (parser, "includes", ucl_includes_handler, parser);
- ucl_parser_register_macro (parser, "priority", ucl_priority_handler, parser);
- ucl_parser_register_macro (parser, "load", ucl_load_handler, parser);
- ucl_parser_register_context_macro (parser, "inherit", ucl_inherit_handler, parser);
+ UPRM_SAFE(ucl_parser_register_macro, parser, "include", ucl_include_handler, e0);
+ UPRM_SAFE(ucl_parser_register_macro, parser, "try_include", ucl_try_include_handler, e0);
+ UPRM_SAFE(ucl_parser_register_macro, parser, "includes", ucl_includes_handler, e0);
+ UPRM_SAFE(ucl_parser_register_macro, parser, "priority", ucl_priority_handler, e0);
+ UPRM_SAFE(ucl_parser_register_macro, parser, "load", ucl_load_handler, e0);
+ UPRM_SAFE(ucl_parser_register_context_macro, parser, "inherit", ucl_inherit_handler, e0);
parser->flags = flags;
parser->includepaths = NULL;
}
return parser;
+e0:
+ ucl_parser_free(parser);
+ return NULL;
}
bool
return parser->default_priority;
}
-void
+bool
ucl_parser_register_macro (struct ucl_parser *parser, const char *macro,
ucl_macro_handler handler, void* ud)
{
struct ucl_macro *new;
if (macro == NULL || handler == NULL) {
- return;
+ return false;
}
new = UCL_ALLOC (sizeof (struct ucl_macro));
if (new == NULL) {
- return;
+ return false;
}
memset (new, 0, sizeof (struct ucl_macro));
new->h.handler = handler;
new->name = strdup (macro);
+ if (new->name == NULL) {
+ UCL_FREE (sizeof (struct ucl_macro), new);
+ return false;
+ }
new->ud = ud;
HASH_ADD_KEYPTR (hh, parser->macroes, new->name, strlen (new->name), new);
+ return true;
}
-void
+bool
ucl_parser_register_context_macro (struct ucl_parser *parser, const char *macro,
ucl_context_macro_handler handler, void* ud)
{
struct ucl_macro *new;
if (macro == NULL || handler == NULL) {
- return;
+ return false;
}
new = UCL_ALLOC (sizeof (struct ucl_macro));
if (new == NULL) {
- return;
+ return false;
}
memset (new, 0, sizeof (struct ucl_macro));
new->h.context_handler = handler;
new->name = strdup (macro);
+ if (new->name == NULL) {
+ UCL_FREE (sizeof (struct ucl_macro), new);
+ return false;
+ }
new->ud = ud;
new->is_context = true;
HASH_ADD_KEYPTR (hh, parser->macroes, new->name, strlen (new->name), new);
+ return true;
}
void
}
const ucl_object_t*
-ucl_object_iterate (const ucl_object_t *obj, ucl_object_iter_t *iter, bool expand_values)
+ucl_object_iterate_with_error (const ucl_object_t *obj, ucl_object_iter_t *iter, bool expand_values,
+ int *ep)
{
const ucl_object_t *elt = NULL;
if (expand_values) {
switch (obj->type) {
case UCL_OBJECT:
- return (const ucl_object_t*)ucl_hash_iterate (obj->value.ov, iter);
+ return (const ucl_object_t*)ucl_hash_iterate2 (obj->value.ov, iter, ep);
break;
case UCL_ARRAY: {
unsigned int idx;
UCL_ITERATE_FLAG_INSIDE_ARRAY,
UCL_ITERATE_FLAG_INSIDE_OBJECT,
UCL_ITERATE_FLAG_IMPLICIT,
+ UCL_ITERATE_FLAG_EXCEPTION
};
const char safe_iter_magic[4] = {'u', 'i', 't', 'e'};
return (ucl_object_iter_t)it;
}
+bool
+ucl_object_iter_chk_excpn(ucl_object_iter_t *it)
+{
+ struct ucl_object_safe_iter *rit = UCL_SAFE_ITER (it);
+
+ UCL_SAFE_ITER_CHECK (rit);
+
+ return (rit->flags == UCL_ITERATE_FLAG_EXCEPTION);
+}
ucl_object_iter_t
ucl_object_iterate_reset (ucl_object_iter_t it, const ucl_object_t *obj)
{
struct ucl_object_safe_iter *rit = UCL_SAFE_ITER (it);
const ucl_object_t *ret = NULL;
+ int ern;
UCL_SAFE_ITER_CHECK (rit);
if (rit->impl_it->type == UCL_OBJECT) {
rit->flags = UCL_ITERATE_FLAG_INSIDE_OBJECT;
- ret = ucl_object_iterate (rit->impl_it, &rit->expl_it, true);
+ ret = ucl_object_iterate_with_error (rit->impl_it, &rit->expl_it, true, &ern);
+
+ if (ret == NULL && ern != 0) {
+ rit->flags = UCL_ITERATE_FLAG_EXCEPTION;
+ return NULL;
+ }
if (ret == NULL && (type & UCL_ITERATE_IMPLICIT)) {
/* Need to switch to another implicit object in chain */
UCL_ARRAY_GET (vec, new);
/* Preallocate some space for arrays */
- kv_resize (ucl_object_t *, *vec, 8);
+ kv_resize_safe (ucl_object_t *, *vec, 8, enomem);
}
}
}
new = ucl_object_new_userdata (NULL, NULL, NULL);
ucl_object_set_priority (new, priority);
}
-
+enomem:
return new;
}
-void ucl_object_reserve (ucl_object_t *obj, size_t reserved)
+bool ucl_object_reserve (ucl_object_t *obj, size_t reserved)
{
if (obj->type == UCL_ARRAY) {
UCL_ARRAY_GET (vec, obj);
if (vec->m < reserved) {
/* Preallocate some space for arrays */
- kv_resize (ucl_object_t *, *vec, reserved);
+ kv_resize_safe (ucl_object_t *, *vec, reserved, e0);
}
}
else if (obj->type == UCL_OBJECT) {
ucl_hash_reserve (obj->value.ov, reserved);
}
+ return true;
+e0:
+ return false;
}
ucl_object_t*
top->value.av = (void *)vec;
}
- kv_push (ucl_object_t *, *vec, elt);
+ kv_push_safe (ucl_object_t *, *vec, elt, e0);
top->len ++;
return true;
+e0:
+ return false;
}
bool
vec = UCL_ALLOC (sizeof (*vec));
kv_init (*vec);
top->value.av = (void *)vec;
- kv_push (ucl_object_t *, *vec, elt);
+ kv_push_safe (ucl_object_t *, *vec, elt, e0);
}
else {
/* Slow O(n) algorithm */
- kv_prepend (ucl_object_t *, *vec, elt);
+ kv_prepend_safe (ucl_object_t *, *vec, elt, e0);
}
top->len ++;
return true;
+e0:
+ return false;
}
bool
UCL_ARRAY_GET (v2, cp);
if (v1 && v2) {
- kv_concat (ucl_object_t *, *v1, *v2);
+ kv_concat_safe (ucl_object_t *, *v1, *v2, e0);
for (i = v2->n; i < v1->n; i ++) {
obj = &kv_A (*v1, i);
}
return true;
+e0:
+ return false;
}
ucl_object_t *