blob: d6462f007ca54b7228876bccf5d276e58ae0902a [file] [log] [blame]
/*
* Copyright © 2022 Google, LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice (including the next
* paragraph) shall be included in all copies or substantial portions of the
* Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
#include "u_magma_map.h"
#include "util/u_atomic.h"
#include <stddef.h>
static const uint32_t kInvalidHandle = 0;
struct u_magma_map_entry {
uint64_t object;
uint32_t handle;
uint32_t free_index;
bool valid;
};
void u_magma_map_init(struct u_magma_map* map)
{
size_t node_size = 8; /* grows as necessary */
uint32_t sentinel = kInvalidHandle;
util_sparse_array_init(&map->array, sizeof(struct u_magma_map_entry), node_size);
util_sparse_array_free_list_init(&map->free_list, &map->array, sentinel,
offsetof(struct u_magma_map_entry, free_index));
map->current_handle = kInvalidHandle;
}
void u_magma_map_release(struct u_magma_map* map)
{
struct u_magma_map_entry* entry;
while ((entry = util_sparse_array_free_list_pop_elem(&map->free_list))) {
}
util_sparse_array_finish(&map->array);
}
uint32_t u_magma_map_get(struct u_magma_map* map, uint64_t object)
{
struct u_magma_map_entry* entry;
entry = util_sparse_array_free_list_pop_elem(&map->free_list);
if (!entry) {
uint32_t handle = p_atomic_inc_return(&map->current_handle);
entry = util_sparse_array_get(&map->array, handle);
entry->handle = handle;
}
entry->object = object;
entry->valid = true;
return entry->handle;
}
static struct u_magma_map_entry* query(struct u_magma_map* map, uint32_t handle) {
if (handle == kInvalidHandle)
return NULL;
struct u_magma_map_entry* entry = util_sparse_array_get(&map->array, handle);
if (entry->valid)
return entry;
// We may have just allocated a node;
// it will be reused when current_handle reaches handle.
return NULL;
}
bool u_magma_map_query(struct u_magma_map* map, uint32_t handle, uint64_t* object_out)
{
struct u_magma_map_entry* entry = query(map, handle);
if (!entry)
return false;
*object_out = entry->object;
return true;
}
void u_magma_map_put(struct u_magma_map* map, uint32_t handle)
{
struct u_magma_map_entry* entry = query(map, handle);
assert(entry);
assert(entry->valid);
entry->valid = false;
util_sparse_array_free_list_push(&map->free_list, &handle, 1);
}