Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
b84a93b
Partially complete reassembler; does not build yet
pavel-kirienko Jul 25, 2023
5dda226
The first rough approximation of the reassembler is done; now we need…
pavel-kirienko Jul 25, 2023
aa8a417
Should build now I suppose
pavel-kirienko Jul 25, 2023
1137a42
Remove special case for empty transfers
pavel-kirienko Jul 25, 2023
98c0f53
Add tests for rxSlotRestart
pavel-kirienko Jul 26, 2023
77fdca1
Finish the slot ejection tests
pavel-kirienko Jul 26, 2023
f9fe930
nit
pavel-kirienko Jul 26, 2023
da7c3c1
Slight internal refactoring to uphold orthogonality
pavel-kirienko Jul 27, 2023
deb6426
Add tests for rxSlotAccept()
pavel-kirienko Jul 27, 2023
8fe690c
Iface acceptance test WIP
pavel-kirienko Jul 27, 2023
84ec9b5
Partial tests for rxIfaceAccept
pavel-kirienko Jul 27, 2023
3151ff5
Refactor the iface acceptor and add more tests
pavel-kirienko Jul 28, 2023
3aa6385
Increase test coverage
pavel-kirienko Jul 28, 2023
6c439de
Add tests for rxIfaceFindMatchingSlot
pavel-kirienko Jul 31, 2023
0b40de0
Add session-level frame acceptance logic and transfer deduplication; …
pavel-kirienko Jul 31, 2023
8e29b91
Add test for rxSessionDeduplicate
pavel-kirienko Jul 31, 2023
c12c1d4
Add session acceptance test with deduplication
pavel-kirienko Jul 31, 2023
b6443d9
Add sections for clarity
pavel-kirienko Jul 31, 2023
102bbbe
Add rxPort implementation
pavel-kirienko Aug 1, 2023
ea5d94c
Add port test
pavel-kirienko Aug 1, 2023
8605051
Add destructors & tests; the coverage should be at 100%
pavel-kirienko Aug 2, 2023
bc9b93f
Rename prio -> priority as requested
pavel-kirienko Aug 2, 2023
81378f0
Suppress a bogus alignment warning
pavel-kirienko Aug 2, 2023
b286340
SONAR
pavel-kirienko Aug 2, 2023
f3e35ae
Extract compare32
pavel-kirienko Aug 2, 2023
1d25274
Remove the dependency on fixed-size 8-bit types to avoid portability …
pavel-kirienko Aug 2, 2023
11b7b97
SONAR
pavel-kirienko Aug 2, 2023
75dfe91
Fix a minor naming consistency issue: udpardFragmentFree -> udpardRxF…
pavel-kirienko Aug 3, 2023
1d87ac8
Implement UdpardMemoryDeleter; switch MemoryResource and MemoryDelete…
pavel-kirienko Aug 8, 2023
89563f5
Add clarifying comments
pavel-kirienko Aug 8, 2023
92044da
Remove typedef struct UdpardInternalRxSession
pavel-kirienko Aug 8, 2023
87e49bc
Rename free --> deallocate because Sonar detected a compliance issue
pavel-kirienko Aug 8, 2023
6e4a207
Add comments
pavel-kirienko Aug 8, 2023
8fa5a4b
Split rxPortAccept into two cases: general case and anonymous case #s…
pavel-kirienko Aug 8, 2023
306acf6
Comment
pavel-kirienko Aug 8, 2023
23358bd
Document the maximum recursion depth explicitly
pavel-kirienko Aug 9, 2023
74b30b6
Split rxSlotAccept into thre functions and remove goto
pavel-kirienko Aug 9, 2023
fd7f930
SONAR
pavel-kirienko Aug 9, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions libudpard/_udpard_cavl.h
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ typedef struct UdpardTreeNode Cavl;

/// Returns POSITIVE if the search target is GREATER than the provided node, negative if smaller, zero on match (found).
/// Values other than {-1, 0, +1} are not recommended to avoid overflow during the narrowing conversion of the result.
typedef int8_t (*CavlPredicate)(void* user_reference, const Cavl* node);
typedef int_fast8_t (*CavlPredicate)(void* user_reference, const Cavl* node);

/// If provided, the factory will be invoked when the sought node does not exist in the tree.
/// It is expected to return a new node that will be inserted immediately (without the need to traverse the tree again).
Expand Down Expand Up @@ -117,13 +117,13 @@ static inline void cavlPrivateRotate(Cavl* const x, const bool r)
static inline Cavl* cavlPrivateAdjustBalance(Cavl* const x, const bool increment)
{
CAVL_ASSERT((x != NULL) && ((x->bf >= -1) && (x->bf <= +1)));
Cavl* out = x;
const int8_t new_bf = (int8_t) (x->bf + (increment ? +1 : -1));
Cavl* out = x;
const int_fast8_t new_bf = (int_fast8_t) (x->bf + (increment ? +1 : -1));
if ((new_bf < -1) || (new_bf > 1))
{
const bool r = new_bf < 0; // bf<0 if left-heavy --> right rotation is needed.
const int8_t sign = r ? +1 : -1; // Positive if we are rotating right.
Cavl* const z = x->lr[!r];
const bool r = new_bf < 0; // bf<0 if left-heavy --> right rotation is needed.
const int_fast8_t sign = r ? +1 : -1; // Positive if we are rotating right.
Cavl* const z = x->lr[!r];
CAVL_ASSERT(z != NULL); // Heavy side cannot be empty.
// NOLINTNEXTLINE(clang-analyzer-core.NullDereference)
if ((z->bf * sign) <= 0) // Parent and child are heavy on the same side or the child is balanced.
Expand All @@ -132,8 +132,8 @@ static inline Cavl* cavlPrivateAdjustBalance(Cavl* const x, const bool increment
cavlPrivateRotate(x, r);
if (0 == z->bf)
{
x->bf = (int8_t) (-sign);
z->bf = (int8_t) (+sign);
x->bf = (int_fast8_t) (-sign);
z->bf = (int_fast8_t) (+sign);
}
else
{
Expand All @@ -150,15 +150,15 @@ static inline Cavl* cavlPrivateAdjustBalance(Cavl* const x, const bool increment
cavlPrivateRotate(x, r);
if ((y->bf * sign) < 0)
{
x->bf = (int8_t) (+sign);
x->bf = (int_fast8_t) (+sign);
y->bf = 0;
z->bf = 0;
}
else if ((y->bf * sign) > 0)
{
x->bf = 0;
y->bf = 0;
z->bf = (int8_t) (-sign);
z->bf = (int_fast8_t) (-sign);
}
else
{
Expand Down Expand Up @@ -209,7 +209,7 @@ static inline Cavl* cavlSearch(Cavl** const root,
Cavl** n = root;
while (*n != NULL)
{
const int8_t cmp = predicate(user_reference, *n);
const int_fast8_t cmp = predicate(user_reference, *n);
if (0 == cmp)
{
out = *n;
Expand Down
1,086 changes: 985 additions & 101 deletions libudpard/udpard.c

Large diffs are not rendered by default.

246 changes: 136 additions & 110 deletions libudpard/udpard.h

Large diffs are not rendered by default.

12 changes: 12 additions & 0 deletions tests/.idea/dictionaries/pavel.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

51 changes: 31 additions & 20 deletions tests/src/helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,17 +38,17 @@ extern "C" {
} \
} while (0)

static inline void* dummyAllocatorAllocate(struct UdpardMemoryResource* const self, const size_t size)
static inline void* dummyAllocatorAllocate(void* const user_reference, const size_t size)
{
(void) self;
(void) user_reference;
(void) size;
return NULL;
}

static inline void dummyAllocatorFree(struct UdpardMemoryResource* const self, const size_t size, void* const pointer)
static inline void dummyAllocatorDeallocate(void* const user_reference, const size_t size, void* const pointer)
{
(void) user_reference;
(void) size;
TEST_PANIC_UNLESS(self != NULL);
TEST_PANIC_UNLESS(pointer == NULL);
}

Expand All @@ -57,22 +57,23 @@ static inline void dummyAllocatorFree(struct UdpardMemoryResource* const self, c
#define INSTRUMENTED_ALLOCATOR_CANARY_SIZE 1024U
typedef struct
{
struct UdpardMemoryResource base;
uint_least8_t canary[INSTRUMENTED_ALLOCATOR_CANARY_SIZE];
uint_least8_t canary[INSTRUMENTED_ALLOCATOR_CANARY_SIZE];
/// The limit can be changed at any moment to control the maximum amount of memory that can be allocated.
/// It may be set to a value less than the currently allocated amount.
size_t limit_fragments;
size_t limit_bytes;
/// The current state of the allocator.
size_t allocated_fragments;
size_t allocated_bytes;
} InstrumentedAllocator;

static inline void* instrumentedAllocatorAllocate(struct UdpardMemoryResource* const base, const size_t size)
static inline void* instrumentedAllocatorAllocate(void* const user_reference, const size_t size)
{
InstrumentedAllocator* const self = (InstrumentedAllocator*) base;
TEST_PANIC_UNLESS(self->base.allocate == &instrumentedAllocatorAllocate);
void* result = NULL;
if ((size > 0U) && ((self->allocated_bytes + size) <= self->limit_bytes))
InstrumentedAllocator* const self = (InstrumentedAllocator*) user_reference;
void* result = NULL;
if ((size > 0U) && //
((self->allocated_bytes + size) <= self->limit_bytes) && //
((self->allocated_fragments + 1U) <= self->limit_fragments))
{
const size_t size_with_canaries = size + ((size_t) INSTRUMENTED_ALLOCATOR_CANARY_SIZE * 2U);
void* origin = malloc(size_with_canaries);
Expand All @@ -98,13 +99,9 @@ static inline void* instrumentedAllocatorAllocate(struct UdpardMemoryResource* c
return result;
}

static inline void instrumentedAllocatorFree(struct UdpardMemoryResource* const base,
const size_t size,
void* const pointer)
static inline void instrumentedAllocatorDeallocate(void* const user_reference, const size_t size, void* const pointer)
{
InstrumentedAllocator* const self = (InstrumentedAllocator*) base;
TEST_PANIC_UNLESS(self->base.allocate == &instrumentedAllocatorAllocate);
TEST_PANIC_UNLESS(self->base.free == &instrumentedAllocatorFree);
InstrumentedAllocator* const self = (InstrumentedAllocator*) user_reference;
if (pointer != NULL)
{
uint_least8_t* p = ((uint_least8_t*) pointer) - INSTRUMENTED_ALLOCATOR_CANARY_SIZE;
Expand Down Expand Up @@ -135,18 +132,32 @@ static inline void instrumentedAllocatorFree(struct UdpardMemoryResource* const
/// By default, the limit is unrestricted (set to the maximum possible value).
static inline void instrumentedAllocatorNew(InstrumentedAllocator* const self)
{
self->base.allocate = &instrumentedAllocatorAllocate;
self->base.free = &instrumentedAllocatorFree;
self->base.user_reference = NULL;
for (size_t i = 0; i < INSTRUMENTED_ALLOCATOR_CANARY_SIZE; i++)
{
self->canary[i] = (uint_least8_t) (rand() % (UINT_LEAST8_MAX + 1));
}
self->limit_fragments = SIZE_MAX;
self->limit_bytes = SIZE_MAX;
self->allocated_fragments = 0U;
self->allocated_bytes = 0U;
}

static inline struct UdpardMemoryResource instrumentedAllocatorMakeMemoryResource(
const InstrumentedAllocator* const self)
{
const struct UdpardMemoryResource out = {.user_reference = (void*) self,
.deallocate = &instrumentedAllocatorDeallocate,
.allocate = &instrumentedAllocatorAllocate};
return out;
}

static inline struct UdpardMemoryDeleter instrumentedAllocatorMakeMemoryDeleter(const InstrumentedAllocator* const self)
{
const struct UdpardMemoryDeleter out = {.user_reference = (void*) self,
.deallocate = &instrumentedAllocatorDeallocate};
return out;
}

#ifdef __cplusplus
}
#endif
18 changes: 10 additions & 8 deletions tests/src/hexdump.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,16 @@

namespace hexdump
{
template <std::uint8_t BytesPerRow = 16, typename InputIterator>
using Byte = std::uint_least8_t;

template <Byte BytesPerRow = 16, typename InputIterator>
[[nodiscard]] std::string hexdump(InputIterator begin, const InputIterator end)
{
static_assert(BytesPerRow > 0);
static constexpr std::pair<std::uint8_t, std::uint8_t> PrintableASCIIRange{32, 126};
std::uint32_t offset = 0;
std::ostringstream output;
bool first = true;
static constexpr std::pair<Byte, Byte> PrintableASCIIRange{32, 126};
std::uint32_t offset = 0;
std::ostringstream output;
bool first = true;
output << std::hex << std::setfill('0');
do
{
Expand All @@ -33,7 +35,7 @@ template <std::uint8_t BytesPerRow = 16, typename InputIterator>
output << std::setw(8) << offset << " ";
offset += BytesPerRow;
auto it = begin;
for (std::uint8_t i = 0; i < BytesPerRow; ++i)
for (Byte i = 0; i < BytesPerRow; ++i)
{
if (i == 8)
{
Expand All @@ -50,7 +52,7 @@ template <std::uint8_t BytesPerRow = 16, typename InputIterator>
}
}
output << " ";
for (std::uint8_t i = 0; i < BytesPerRow; ++i)
for (Byte i = 0; i < BytesPerRow; ++i)
{
if (begin != end)
{
Expand All @@ -76,6 +78,6 @@ template <std::uint8_t BytesPerRow = 16, typename InputIterator>

[[nodiscard]] inline auto hexdump(const void* const data, const std::size_t size)
{
return hexdump(static_cast<const std::uint8_t*>(data), static_cast<const std::uint8_t*>(data) + size);
return hexdump(static_cast<const Byte*>(data), static_cast<const Byte*>(data) + size);
}
} // namespace hexdump
Loading