Skip to content

Commit

Permalink
Big rework of linker and related systems. Cont
Browse files Browse the repository at this point in the history
  • Loading branch information
VonTum committed Sep 21, 2023
1 parent bbb6327 commit bb0fe69
Show file tree
Hide file tree
Showing 8 changed files with 373 additions and 225 deletions.
170 changes: 170 additions & 0 deletions src/arena_alloc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
use std::{ops::{IndexMut, Index}, marker::PhantomData, iter::Enumerate};

#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct UUID<IndexMarker>(usize, PhantomData<IndexMarker>);

impl<IndexMarker> Default for UUID<IndexMarker> {
fn default() -> Self {
Self::INVALID
}
}

impl<IndexMarker> UUID<IndexMarker> {
pub const INVALID : Self = UUID(usize::MAX, PhantomData);
}

#[derive(Default)]
pub struct ArenaAllocator<T, IndexMarker> {
data : Vec<Option<T>>,
free_slots : Vec<usize>,
_ph : PhantomData<IndexMarker>
}

impl<T, IndexMarker> ArenaAllocator<T, IndexMarker> {
pub fn new() -> Self {
Self{data : Vec::new(), free_slots : Vec::new(), _ph : PhantomData}
}
pub fn alloc(&mut self, v : T) -> UUID<IndexMarker> {
UUID(if let Some(empty_slot) = self.free_slots.pop() {
assert!(self.data[empty_slot].is_none());
self.data[empty_slot] = Some(v);
empty_slot
} else {
let l = self.data.len();
self.data.push(Some(v));
l
}, PhantomData)
}
pub fn reserve(&mut self) -> UUID<IndexMarker> {
UUID(if let Some(empty_slot) = self.free_slots.pop() {
assert!(self.data[empty_slot].is_none());
self.data[empty_slot] = None;
empty_slot
} else {
let l = self.data.len();
self.data.push(None);
l
}, PhantomData)
}
pub fn alloc_reservation(&mut self, UUID(uuid, _) : UUID<IndexMarker>, v : T) {
assert!(self.data[uuid].is_none());
self.data[uuid] = Some(v);
}
pub fn free(&mut self, UUID(uuid, _) : UUID<IndexMarker>) -> T {
self.free_slots.push(uuid);
std::mem::replace(&mut self.data[uuid], None).unwrap()
}
}

impl<T, IndexMarker> Index<UUID<IndexMarker>> for ArenaAllocator<T, IndexMarker> {
type Output = T;

fn index(&self, UUID(uuid, _): UUID<IndexMarker>) -> &Self::Output {
assert!(self.data[uuid].is_some());
self.data[uuid].as_ref().unwrap()
}
}

impl<T, IndexMarker> IndexMut<UUID<IndexMarker>> for ArenaAllocator<T, IndexMarker> {
fn index_mut(&mut self, UUID(uuid, _): UUID<IndexMarker>) -> &mut Self::Output {
assert!(self.data[uuid].is_some());
self.data[uuid].as_mut().unwrap()
}
}

pub struct ArenaIterator<'a, T, IndexMarker> {
it: Enumerate<std::slice::Iter<'a, Option<T>>>,
_ph : PhantomData<IndexMarker>
}

impl<'a, T, IndexMarker> Iterator for ArenaIterator<'a, T, IndexMarker> {
type Item = (UUID<IndexMarker>, &'a T);

fn next(&mut self) -> Option<Self::Item> {
loop {
match self.it.next() {
None => {
return None;
},
Some((_pos, None)) => {},
Some((pos, Some(val))) => {
return Some((UUID(pos, PhantomData), val));
}
}
}
}
}

pub struct ArenaIteratorMut<'a, T, IndexMarker> {
it: Enumerate<std::slice::IterMut<'a, Option<T>>>,
_ph : PhantomData<IndexMarker>
}

impl<'a, T, IndexMarker> Iterator for ArenaIteratorMut<'a, T, IndexMarker> {
type Item = (UUID<IndexMarker>, &'a mut T);

fn next(&mut self) -> Option<Self::Item> {
loop {
match self.it.next() {
None => {
return None;
},
Some((_pos, None)) => {},
Some((pos, Some(val))) => {
return Some((UUID(pos, PhantomData), val));
}
}
}
}
}

impl<'a, T, IndexMarker> IntoIterator for &'a ArenaAllocator<T, IndexMarker> {
type Item = (UUID<IndexMarker>, &'a T);

type IntoIter = ArenaIterator<'a, T, IndexMarker>;

fn into_iter(self) -> Self::IntoIter {
ArenaIterator{it : self.data.iter().enumerate(), _ph : PhantomData}
}
}

impl<'a, T, IndexMarker> IntoIterator for &'a mut ArenaAllocator<T, IndexMarker> {
type Item = (UUID<IndexMarker>, &'a mut T);

type IntoIter = ArenaIteratorMut<'a, T, IndexMarker>;

fn into_iter(self) -> Self::IntoIter {
ArenaIteratorMut{it : self.data.iter_mut().enumerate(), _ph : PhantomData}
}
}

pub struct ArenaVector<T : Default, IndexMarker> {
data : Vec<T>,
_ph : PhantomData<IndexMarker>
}

impl<T : Default, IndexMarker> ArenaVector<T, IndexMarker> {
pub fn insert(&mut self, UUID(uuid, _) : UUID<IndexMarker>, value : T) {
if uuid >= self.data.len() {
self.data.resize_with(uuid+1, Default::default);
}
self.data[uuid] = value;
}
pub fn remove(&mut self, UUID(uuid, _) : UUID<IndexMarker>) {
self.data[uuid] = Default::default();
}
}

impl<T : Default, IndexMarker> Index<UUID<IndexMarker>> for ArenaVector<T, IndexMarker> {
type Output = T;

fn index(&self, UUID(uuid, _): UUID<IndexMarker>) -> &Self::Output {
&self.data[uuid]
}
}

impl<T : Default, IndexMarker> IndexMut<UUID<IndexMarker>> for ArenaVector<T, IndexMarker> {
fn index_mut(&mut self, UUID(uuid, _): UUID<IndexMarker>) -> &mut Self::Output {
&mut self.data[uuid]
}
}
12 changes: 3 additions & 9 deletions src/ast.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@

use num_bigint::BigUint;

use crate::{tokenizer::TokenTypeIdx, linker::ValueUUID, linker::TypeUUID};
use crate::{tokenizer::TokenTypeIdx, linker::{ValueUUID, FileUUID}};
use core::ops::Range;
use std::{rc::Rc, path::Path};

// Token span. Indices are INCLUSIVE
#[derive(Clone,Copy,Debug,PartialEq,Eq)]
Expand Down Expand Up @@ -105,21 +104,17 @@ pub enum Statement {
TimelineStage(usize)
}

pub type FileName = Rc<Path>;

#[derive(Debug)]
pub struct Location {
pub file_name : FileName,
pub file : FileUUID,
pub span : Span
}

#[derive(Debug, Default)]
pub struct Dependencies {
pub global_references : Vec<GlobalReference>,
pub type_references : Vec<GlobalReference>,

pub resolved_globals : Vec<ValueUUID>,
pub resolved_types : Vec<TypeUUID>
pub resolved_globals : Vec<ValueUUID>
}

#[derive(Debug)]
Expand All @@ -128,7 +123,6 @@ pub struct Module {
pub declarations : Vec<SignalDeclaration>,
pub code : Vec<SpanStatement>,

pub full_name : String,
pub location : Location,
pub dependencies : Dependencies
}
Expand Down
20 changes: 13 additions & 7 deletions src/dev_aid/lsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use lsp_server::{Response, Message, Connection};

use lsp_types::notification::Notification;

use crate::{parser::{perform_full_semantic_parse, FullParseResult}, dev_aid::syntax_highlighting::create_token_ide_info, ast::{IdentifierType, Span}, errors::{ErrorCollector, ParsingError}};
use crate::{parser::{perform_full_semantic_parse, FullParseResult}, dev_aid::syntax_highlighting::create_token_ide_info, ast::{IdentifierType, Span}, errors::{ErrorCollector, ParsingError}, linker::{Linker, PreLinker}};

use super::syntax_highlighting::{IDETokenType, IDEIdentifierType, IDEToken};

Expand Down Expand Up @@ -270,23 +270,23 @@ fn cvt_span_to_lsp_range(ch_sp : Span, token_positions : &[std::ops::Range<Posit
}

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn convert_diagnostic(err : ParsingError, severity : DiagnosticSeverity, token_positions : &[std::ops::Range<Position>]) -> Diagnostic {
fn convert_diagnostic(err : ParsingError, severity : DiagnosticSeverity, token_positions : &[std::ops::Range<Position>], linker : &Linker) -> Diagnostic {
let error_pos = cvt_span_to_lsp_range(err.position, token_positions);

let mut related_info = Vec::new();
for info in err.infos {
let info_pos = cvt_span_to_lsp_range(info.position, token_positions);
let location = Location{uri : Url::from_file_path(info.file_name).unwrap(), range : info_pos};
let location = Location{uri : Url::from_file_path(&linker.files[info.file].file_path).unwrap(), range : info_pos};
related_info.push(DiagnosticRelatedInformation { location, message: info.info });
}
Diagnostic::new(error_pos, Some(severity), None, None, err.reason, Some(related_info), None)
}

// Requires that token_positions.len() == tokens.len() + 1 to include EOF token
fn send_errors_warnings(connection: &Connection, errors : ErrorCollector, uri : Url, token_positions : &[std::ops::Range<Position>]) -> Result<(), Box<dyn Error + Sync + Send>> {
fn send_errors_warnings(connection: &Connection, errors : ErrorCollector, uri : Url, token_positions : &[std::ops::Range<Position>], linker : &Linker) -> Result<(), Box<dyn Error + Sync + Send>> {
let mut diag_vec : Vec<Diagnostic> = Vec::new();
for err in errors.errors {
diag_vec.push(convert_diagnostic(err, DiagnosticSeverity::ERROR, token_positions));
diag_vec.push(convert_diagnostic(err, DiagnosticSeverity::ERROR, token_positions, linker));
}

let params = &PublishDiagnosticsParams{
Expand Down Expand Up @@ -339,7 +339,11 @@ fn main_loop(
let path : PathBuf = params.text_document.uri.to_file_path().unwrap();
let file_data : Rc<LoadedFile> = file_cache.get(&path);

let (full_parse, errors) = perform_full_semantic_parse(&file_data.file_text, Rc::from(path));

let mut prelink = PreLinker::new();
let uuid = prelink.reserve_file();

let (full_parse, errors) = perform_full_semantic_parse(&file_data.file_text, uuid);

let (syntax_highlight, token_positions) = do_syntax_highlight(&file_data, &full_parse);

Expand All @@ -348,7 +352,9 @@ fn main_loop(
id: req.id, result: Some(result), error: None
}))?;

send_errors_warnings(&connection, errors, params.text_document.uri, &token_positions)?;
let linker = prelink.link();

send_errors_warnings(&connection, errors, params.text_document.uri, &token_positions, &linker)?;
},
// TODO ...
req => {
Expand Down
30 changes: 15 additions & 15 deletions src/dev_aid/syntax_highlighting.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

use std::{ops::Range, rc::Rc, collections::HashMap};
use std::{ops::Range, path::PathBuf};

use crate::{ast::*, tokenizer::*, parser::*, linker::Links};
use crate::{ast::*, tokenizer::*, parser::*, linker::PreLinker};

use ariadne::FileCache;
use console::Style;
Expand Down Expand Up @@ -107,7 +107,8 @@ fn walk_name_color(ast : &ASTRoot, result : &mut [IDEToken]) {
});
result[module.name.position].typ = IDETokenType::Identifier(IDEIdentifierType::Interface);

for part_vec in &module.dependencies.type_references {

for part_vec in &module.dependencies.global_references {
for part_tok in part_vec {
result[part_tok.position].typ = IDETokenType::Identifier(IDEIdentifierType::Type);
}
Expand Down Expand Up @@ -179,19 +180,19 @@ fn generate_character_offsets(file_text : &str, tokens : &[Token]) -> Vec<Range<
character_offsets
}

pub fn syntax_highlight_file(file_paths : &[FileName]) {
let mut linker : Links = Links::new();
let mut file_list = HashMap::new();
pub fn syntax_highlight_file(file_paths : Vec<PathBuf>) {
let mut prelinker : PreLinker = PreLinker::new();
for file_path in file_paths {
let file_text = match std::fs::read_to_string(file_path) {
let uuid = prelinker.reserve_file();
let file_text = match std::fs::read_to_string(&file_path) {
Ok(file_text) => file_text,
Err(reason) => {
let file_path_disp = file_path.display();
panic!("Could not open file '{file_path_disp}' for syntax highlighting because {reason}")
}
};

let (full_parse, errors) = perform_full_semantic_parse(&file_text, Rc::from(file_path.to_owned()));
let (full_parse, errors) = perform_full_semantic_parse(&file_text, uuid);

print_tokens(&file_text, &full_parse.tokens);

Expand All @@ -202,19 +203,18 @@ pub fn syntax_highlight_file(file_paths : &[FileName]) {

println!("{:?}", full_parse.ast);

let file_data = linker.add_file(file_text, full_parse.ast, errors, (full_parse.tokens, ide_tokens));
file_list.insert(file_path.clone(), file_data);
prelinker.add_reserved_file(uuid, file_path, file_text, full_parse, errors);
}

let linked = linker.link_all(file_list);
let linker = prelinker.link();

let mut file_cache : FileCache = Default::default();

for (_file_name, f) in &linked.files {
let token_offsets = generate_character_offsets(&f.file_text, &f.extra_data.0);
for (_file_name, f) in &linker.files {
let token_offsets = generate_character_offsets(&f.file_text, &f.tokens);

for err in &f.errors.errors {
err.pretty_print_error(&f.errors.main_file, &token_offsets, &mut file_cache);
for err in &f.parsing_errors.errors {
err.pretty_print_error(f.parsing_errors.file, &token_offsets, &linker, &mut file_cache);
}
}
}
Loading

0 comments on commit bb0fe69

Please sign in to comment.