aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/lua-lpeg
diff options
context:
space:
mode:
authorVsevolod Stakhov <vsevolod@highsecure.ru>2020-07-09 12:40:43 +0100
committerVsevolod Stakhov <vsevolod@highsecure.ru>2020-07-09 13:15:27 +0100
commit4eba0ef16211ae27f13e826fb1957858439666ec (patch)
tree87566166ab363df93ce961f268dad63f0fe11626 /contrib/lua-lpeg
parent4975b41adddf9a324d5a7b33fc97153323e5192d (diff)
downloadrspamd-4eba0ef16211ae27f13e826fb1957858439666ec.tar.gz
rspamd-4eba0ef16211ae27f13e826fb1957858439666ec.zip
[Fix] Fix poor man allocator algorithm
Diffstat (limited to 'contrib/lua-lpeg')
-rw-r--r--contrib/lua-lpeg/lpvm.c8
1 files changed, 4 insertions, 4 deletions
diff --git a/contrib/lua-lpeg/lpvm.c b/contrib/lua-lpeg/lpvm.c
index 6058bf5b1..e107292e2 100644
--- a/contrib/lua-lpeg/lpvm.c
+++ b/contrib/lua-lpeg/lpvm.c
@@ -32,7 +32,7 @@ struct poor_slab {
};
/* Used to optimize pages allocation */
-struct poor_slab slabs;
+RSPAMD_ALIGNED (64) struct poor_slab slabs;
static uint64_t xorshifto_seed[2] = {0xdeadbabe, 0xdeadbeef};
@@ -64,7 +64,7 @@ lpeg_allocate_mem_low (size_t sz)
uint64_t s1 = xorshifto_seed[1];
s1 ^= s0;
- xorshifto_seed[0] = xoroshiro_rotl(s0, 55) ^ s1 ^ (s1 << 14);
+ xorshifto_seed[0] = xoroshiro_rotl (s0, 55) ^ s1 ^ (s1 << 14);
xorshifto_seed[1] = xoroshiro_rotl (s1, 36);
flags |= MAP_FIXED;
/* Get 46 bits */
@@ -77,7 +77,7 @@ lpeg_allocate_mem_low (size_t sz)
memcpy (cp, &sz, sizeof (sz));
for (unsigned i = 0; i < MAX_PIECES; i ++) {
- if (slabs.pieces[i].sz == 0) {
+ if (slabs.pieces[i].occupied == 0) {
/* Store piece */
slabs.pieces[i].sz = sz;
slabs.pieces[i].ptr = cp;
@@ -90,7 +90,7 @@ lpeg_allocate_mem_low (size_t sz)
/* Not enough free pieces, pop some */
unsigned sel = ((uintptr_t)cp) & ((MAX_PIECES * 2) - 1);
/* Here we free memory in fact */
- munmap (slabs.pieces[sel].ptr, slabs.pieces[sel].sz);
+ munmap (slabs.pieces[sel].ptr, slabs.pieces[sel].sz + sizeof (sz));
slabs.pieces[sel].sz = sz;
slabs.pieces[sel].ptr = cp;
slabs.pieces[sel].occupied = 1;