The performance of a storage system is often dictated by its caching strategy. An effective cache will significantly reduce data access latency and improve overall system throughput. The caching competition is an assignment designed to encourage students to learn and explore the design of cache eviction algorithms. Students will implement their own cache eviction strategies and compete against each other based on performance on a set of benchmark workloads.
Below are minimal FIFO-style examples to illustrate the shape of a cache eviction implementation.
#include "cache.h"
typedef struct fifo_node {
uint64_t key;
struct fifo_node *next;
} fifo_node_t;
typedef struct {
fifo_node_t *head;
fifo_node_t *tail;
} fifo_state_t;
void *cache_init(void) {
fifo_state_t *state = calloc(1, sizeof(fifo_state_t));
return state;
}
void cache_record(void *ctx, uint64_t key, uint64_t size) {
(void)size;
fifo_state_t *state = (fifo_state_t *)ctx;
fifo_node_t *node = calloc(1, sizeof(fifo_node_t));
node->key = key;
if (!state->tail) {
state->head = state->tail = node;
} else {
state->tail->next = node;
state->tail = node;
}
}
uint64_t cache_evict(void *ctx) {
fifo_state_t *state = (fifo_state_t *)ctx;
fifo_node_t *node = state->head;
if (!node) {
return 0;
}
state->head = node->next;
if (!state->head) {
state->tail = NULL;
}
uint64_t key = node->key;
free(node);
return key;
}
#include "cache.hpp"
#include <deque>
class FifoCache {
public:
void init() {}
void record(uint64_t key, uint64_t size) {
(void)size;
queue_.push_back(key);
}
uint64_t evict() {
if (queue_.empty()) {
return 0;
}
uint64_t key = queue_.front();
queue_.pop_front();
return key;
}
private:
std::deque<uint64_t> queue_;
};
from collections import deque
class FifoCache:
def __init__(self):
self.queue = deque()
def record(self, key, size):
_ = size
self.queue.append(key)
def evict(self):
if not self.queue:
return 0
return self.queue.popleft()
Adjust the function names and interfaces to match the libCacheSim plugin API.