Skip to content

Commit

Permalink
Make DeclArena generic
Browse files Browse the repository at this point in the history
  • Loading branch information
osa1 committed Feb 12, 2024
1 parent 74cd97e commit 9e52fd0
Show file tree
Hide file tree
Showing 15 changed files with 293 additions and 216 deletions.
120 changes: 120 additions & 0 deletions crates/h10/src/arena.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
use std::fmt;
use std::hash::Hash;
use std::marker::PhantomData;

#[derive(Debug)]
pub struct Arena<T> {
allocs: Vec<Allocation<T>>,

/// A free slot in the arena, if available.
///
/// When this is not available we grow the vector.
free: Option<Idx<T>>,
}

pub struct Idx<T> {
idx: u32,
phantom: PhantomData<T>,
}

const _: () = assert!(std::mem::size_of::<Idx<u64>>() == std::mem::size_of::<u32>());

impl<T> fmt::Debug for Idx<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Idx").field(&self.idx).finish()
}
}

impl<T> Clone for Idx<T> {
fn clone(&self) -> Self {
*self
}
}

impl<T> Copy for Idx<T> {}

impl<T> PartialEq for Idx<T> {
fn eq(&self, other: &Self) -> bool {
self.idx == other.idx
}
}

impl<T> Eq for Idx<T> {}

impl<T> Hash for Idx<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.idx.hash(state);
}
}

impl<T> Idx<T> {
fn from_usize(idx: usize) -> Self {
Self {
idx: idx as u32,
phantom: Default::default(),
}
}

fn as_usize(&self) -> usize {
self.idx as usize
}
}

#[derive(Debug)]
enum Allocation<T> {
Free { next_free_slot: Option<Idx<T>> },
Used { elem: T },
}

impl<T> Arena<T> {
pub fn new() -> Self {
Self {
allocs: vec![],
free: None,
}
}

pub fn allocate(&mut self, elem: T) -> Idx<T> {
match self.free.take() {
Some(idx) => {
let free_decl =
std::mem::replace(&mut self.allocs[idx.as_usize()], Allocation::Used { elem });
match free_decl {
Allocation::Free { next_free_slot } => {
self.free = next_free_slot;
}
Allocation::Used { elem: _ } => {
panic!("Free slot in `self.free` was not really free");
}
}
idx
}
None => {
let idx = Idx::from_usize(self.allocs.len());
self.allocs.push(Allocation::Used { elem });
idx
}
}
}

pub fn free(&mut self, idx: Idx<T>) {
self.allocs[idx.as_usize()] = Allocation::Free {
next_free_slot: self.free,
};
self.free = Some(idx);
}

pub fn get(&self, idx: Idx<T>) -> &T {
match &self.allocs[idx.as_usize()] {
Allocation::Free { .. } => panic!("Declaration index is not in use"),
Allocation::Used { elem } => elem,
}
}

pub fn get_mut(&mut self, idx: Idx<T>) -> &mut T {
match &mut self.allocs[idx.as_usize()] {
Allocation::Free { .. } => panic!("Declaration index is not in use"),
Allocation::Used { elem } => elem,
}
}
}
88 changes: 0 additions & 88 deletions crates/h10/src/decl_arena.rs

This file was deleted.

25 changes: 18 additions & 7 deletions crates/h10/src/incremental_lexing.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
#[cfg(test)]
mod tests;

use crate::decl_arena::DeclArena;
use crate::arena::Arena;
use crate::indentation_groups::IndentationGroup;
use crate::pos::Pos;
use crate::token::TokenRef;
use h10_lexer::Lexer;
Expand All @@ -13,7 +14,7 @@ use lexgen_util::Loc;
///
/// [`inserted_text`] should not be empty.
///
/// [`DeclArena`] argument is needed to be able to get absolute spans of tokens, to be able to
/// [`Arena<IndentationGroup>`] argument is needed to be able to get absolute spans of tokens, to be able to
/// check if we've generated an identical token and stop.
///
/// The returned token is the replacement for [`lex_start`]. The caller should update:
Expand All @@ -24,7 +25,7 @@ pub(crate) fn relex_insertion(
lex_start: TokenRef,
insertion_pos: Pos,
inserted_text: &str,
arena: &DeclArena,
arena: &Arena<IndentationGroup>,
) -> TokenRef {
debug_assert!(!inserted_text.is_empty());

Expand All @@ -48,7 +49,7 @@ pub(crate) fn relex_deletion(
lex_start: TokenRef,
deletion_start: Pos,
deletion_end: Pos,
arena: &DeclArena,
arena: &Arena<IndentationGroup>,
) -> Option<TokenRef> {
let chars =
TokenCharIteratorWithDeletion::new(lex_start.clone(), deletion_start, deletion_end, arena);
Expand All @@ -62,7 +63,7 @@ fn relex<I, F>(
lex_start: TokenRef,
char_iter: I,
update_end_pos: Pos,
arena: &DeclArena,
arena: &Arena<IndentationGroup>,
adjust_old_token_pos: F,
) -> Option<TokenRef>
where
Expand Down Expand Up @@ -191,7 +192,12 @@ enum TokenCharIterSource {

impl<'a> TokenCharIteratorWithInsertion<'a> {
// NB. `insertion_pos` is the absolute position.
fn new(token: TokenRef, insertion_pos: Pos, inserted_text: &'a str, arena: &DeclArena) -> Self {
fn new(
token: TokenRef,
insertion_pos: Pos,
inserted_text: &'a str,
arena: &Arena<IndentationGroup>,
) -> Self {
Self {
current_pos: token.absolute_span(arena).start,
source: TokenCharIterSource::TokenBeforeInsertion,
Expand Down Expand Up @@ -300,7 +306,12 @@ struct TokenCharIteratorWithDeletion {
}

impl TokenCharIteratorWithDeletion {
fn new(token: TokenRef, deletion_start: Pos, deletion_end: Pos, arena: &DeclArena) -> Self {
fn new(
token: TokenRef,
deletion_start: Pos,
deletion_end: Pos,
arena: &Arena<IndentationGroup>,
) -> Self {
Self {
current_pos: token.absolute_span(arena).start,
token,
Expand Down
20 changes: 10 additions & 10 deletions crates/h10/src/incremental_lexing/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ fn insertion_iteration_0() {
token.clone(),
insertion_pos,
inserted_text,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand All @@ -40,7 +40,7 @@ fn insertion_iteration_1() {
token.clone(),
insertion_pos,
inserted_text,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand Down Expand Up @@ -68,7 +68,7 @@ fn insertion_iteration_2() {
token.clone(),
insertion_pos,
inserted_text,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand Down Expand Up @@ -96,7 +96,7 @@ fn insertion_iteration_3() {
token.clone(),
insertion_pos,
inserted_text,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand Down Expand Up @@ -125,7 +125,7 @@ fn deletion_iteration_0() {
token.clone(),
deletion_start,
deletion_end,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand All @@ -147,7 +147,7 @@ fn deletion_iteration_1() {
token.clone(),
deletion_start,
deletion_end,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand All @@ -166,7 +166,7 @@ fn deletion_iteration_2() {
token.clone(),
deletion_start,
deletion_end,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand All @@ -185,7 +185,7 @@ fn deletion_iteration_3() {
token.clone(),
deletion_start,
deletion_end,
&DeclArena::new(),
&Arena::new(),
)
.collect();

Expand All @@ -203,7 +203,7 @@ fn relex_insertion_same_group() {
let token = lex_full(pgm, Pos::ZERO);
let initial_token_list: Vec<TokenRef> = token.iter().collect();

let arena = DeclArena::new();
let arena = Arena::new();

let relex_start_token = token.iter().nth(8).unwrap();
assert_eq!(relex_start_token.text(), "x");
Expand Down Expand Up @@ -243,7 +243,7 @@ fn relex_insertion_new_group() {
let token = lex_full(pgm, Pos::ZERO);
let initial_token_list: Vec<TokenRef> = token.iter().collect();

let arena = DeclArena::new();
let arena = Arena::new();

let relex_start_token = token.iter().nth(7).unwrap();
assert_eq!(relex_start_token.text(), "\n");
Expand Down
Loading

0 comments on commit 9e52fd0

Please sign in to comment.