aboutsummaryrefslogtreecommitdiffstats
path: root/src/node.c
diff options
context:
space:
mode:
authorDavid Robillard <d@drobilla.net>2018-05-13 00:48:31 +0200
committerDavid Robillard <d@drobilla.net>2019-04-13 19:15:32 +0200
commita339e2422fcffe41e515aff7eee3f501ca70f311 (patch)
tree28d87d4f2d8d09dc95554bb99398a6eb412e88dc /src/node.c
parentd540aa4d29944f7716afe8c587f5145b551d8a50 (diff)
downloadserd-a339e2422fcffe41e515aff7eee3f501ca70f311.tar.gz
serd-a339e2422fcffe41e515aff7eee3f501ca70f311.tar.bz2
serd-a339e2422fcffe41e515aff7eee3f501ca70f311.zip
Zero node padding before passing to reader sinks
Diffstat (limited to 'src/node.c')
-rw-r--r--src/node.c25
1 files changed, 25 insertions, 0 deletions
diff --git a/src/node.c b/src/node.c
index e6dfd451..8f32c536 100644
--- a/src/node.c
+++ b/src/node.c
@@ -210,6 +210,24 @@ serd_node_new_uri(const char* str)
return serd_node_new_simple(SERD_URI, str);
}
+/**
+ Zero node padding.
+
+ This is used for nodes which live in re-used stack memory during reading,
+ which must be normalized before being passed to a sink so comparison will
+ work correctly.
+*/
+void
+serd_node_zero_pad(SerdNode* node)
+{
+ char* buf = serd_node_buffer(node);
+ const size_t size = node->n_bytes;
+ const size_t padded_size = serd_node_pad_size(node->n_bytes);
+ if (padded_size > size) {
+ memset(buf + size, 0, padded_size - size);
+ }
+}
+
SerdNode*
serd_node_copy(const SerdNode* node)
{
@@ -218,6 +236,13 @@ serd_node_copy(const SerdNode* node)
}
const size_t size = serd_node_total_size(node);
+#ifndef NDEBUG
+ const size_t unpadded_size = node->n_bytes;
+ const size_t padded_size = serd_node_pad_size(node->n_bytes);
+ for (size_t i = 0; i < padded_size - unpadded_size; ++i) {
+ assert(serd_node_buffer_c(node)[unpadded_size + i] == '\0');
+ }
+#endif
SerdNode* copy = (SerdNode*)calloc(1, size + 3);
memcpy(copy, node, size);
return copy;