Skip to content

Commit

Permalink
Messy fix for updates
Browse files Browse the repository at this point in the history
  • Loading branch information
dannymcgee committed May 14, 2024
1 parent f737724 commit d705b25
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 51 deletions.
32 changes: 28 additions & 4 deletions packages/server/src/documents/intake.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,23 @@ pub(super) fn read_documents(
eprintln!(" Needs preprocessing: {uri}");
match r_config.as_ref() {
Some(config) => match crate::pre::process(
uri.clone(),
&uri,
source,
&config.preprocessor.global_shader_defs,
) {
Ok(bundle) => Some(bundle),
Ok((source, pruned_source, tokens, comments, errors, scopes, tree)) => {
Some(WgslDocumentBundle {
comments: comments.into(),
errors: errors.into(),
tokens: tokens.into(),
uri: uri.into(),
rope: Rope::from_str(&source).into(),
source: source.into(),
scopes: parser::scopes::build(&tree).into(),
ast: tree.into(),
token_refs: Default::default(),
})
}
Err(error) => {
eprintln!(" Preprocessing error in {uri}:\n{error}");
None
Expand All @@ -149,11 +161,23 @@ pub(super) fn preprocess_pending(
.iter()
.filter_map(|(uri, source)| {
match crate::pre::process(
uri.clone(),
&uri,
source.clone(),
&r_config.preprocessor.global_shader_defs,
) {
Ok(bundle) => Some(bundle),
Ok((source, pruned_source, tokens, comments, errors, scopes, tree)) => {
Some(WgslDocumentBundle {
comments: comments.into(),
errors: errors.into(),
tokens: tokens.into(),
uri: uri.clone().into(),
rope: Rope::from_str(&source).into(),
source: source.into(),
scopes: scopes.into(),
ast: tree.into(),
token_refs: Default::default(),
})
}
Err(error) => {
eprintln!(" Preprocessing error in {uri}:\n{error}");
None
Expand Down
59 changes: 35 additions & 24 deletions packages/server/src/documents/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,22 +6,22 @@ use bevy_derive::{Deref, DerefMut};
use bevy_ecs::{
component::Component,
entity::Entity,
event::EventReader,
event::{EventReader, Events},
schedule::{common_conditions::resource_exists, IntoSystemConfigs, SystemSet},
system::{IntoSystem, Query, Res, ResMut, Resource},
};
use bevy_utils::{HashMap, HashSet};
use gramatika::{ArcStr, ParseStreamer, SpannedError, Substr};
use bevy_utils::{smallvec::SmallVec, HashMap, HashSet};
use gramatika::{ArcStr, ParseStreamer, Position, SpannedError, Substr};
use lsp_server::{Message, Notification};
use lsp_types::{notification::Notification as _, Url};
use parser::{comment::Comment, scopes::Scope, ParseResult, ParseStream, SyntaxTree, Token};
use parser::{comment::Comment, scopes::Scope, ParseResult, ParseStream, Span, SyntaxTree, Token};
use ropey::Rope;

use crate::{
config::Config,
ipc::{notify, Ipc},
lsp_extensions::{UnreadDependency, UnreadDependencyParams},
utils::{self},
pre, utils,
workspace::Workspace,
};

Expand All @@ -48,7 +48,9 @@ impl Plugin for DocumentsPlugin {
intake::preprocess_pending
.pipe(intake::process_documents)
.run_if(resource_exists::<Config>),
update_documents.map(utils::log_error),
update_documents
.map(utils::log_error)
.run_if(resource_exists::<Config>),
)
.in_set(DocumentIntakeSystem),
);
Expand Down Expand Up @@ -141,6 +143,7 @@ fn request_all_docs_in_workspace(

fn update_documents(
r_documents: Res<DocumentsMap>,
r_config: Res<Config>,
mut er_doc_change: EventReader<notify::DocumentChange>,
mut q_ropes: Query<&mut WgslRope>,
mut q_parse_results: Query<(
Expand All @@ -155,7 +158,8 @@ fn update_documents(
for event in er_doc_change.read() {
eprintln!(" Updating document: {}", &event.text_document.uri);

let Some(&entity) = r_documents.get(&event.text_document.uri) else {
let uri = &event.text_document.uri;
let Some(&entity) = r_documents.get(uri) else {
bail!(
" Failed to find document entity for URI: {}",
event.text_document.uri,
Expand All @@ -166,19 +170,38 @@ fn update_documents(

for update in event.content_changes.iter() {
let range = update.range.unwrap();
let span = Span {
start: Position {
line: range.start.line as _,
character: range.start.character as _,
},
end: Position {
line: range.end.line as _,
character: range.end.character as _,
},
};
eprintln!("edit range: ({span:?})");

let start_line = rope.line_to_char(range.start.line as usize);
let edit_start = start_line + range.start.character as usize;
let line_start = rope.line_to_char(range.start.line as usize);
eprintln!("line start: {line_start}");
let edit_start = line_start + range.start.character as usize;
eprintln!("edit start: {edit_start}");

let end_line = rope.line_to_char(range.end.line as usize);
let edit_end = end_line + range.end.character as usize;
let line_end = rope.line_to_char(range.end.line as usize);
eprintln!("line end: {line_end}");
let edit_end = line_end + range.end.character as usize;
eprintln!("edit end: {edit_end}");

if edit_end - edit_start > 0 {
rope.remove(edit_start..edit_end);
}
rope.insert(edit_start, &update.text);
}

let source = ArcStr::from(rope.to_string());
let (source, pruned_source, tokens, comments, errors, scopes, tree) =
pre::process(uri, source, &r_config.preprocessor.global_shader_defs)?;

let (
mut doc_ast,
mut doc_source,
Expand All @@ -188,19 +211,7 @@ fn update_documents(
mut doc_scopes,
) = q_parse_results.get_mut(entity)?;

let (
ast,
ParseResult {
source,
tokens,
comments,
errors,
},
) = parse(rope.to_string());

let scopes = parser::scopes::build(&ast);

*doc_ast = ast.into();
*doc_ast = tree.into();
*doc_source = source.into();
*doc_tokens = tokens.into();
*doc_comments = comments.into();
Expand Down
95 changes: 72 additions & 23 deletions packages/server/src/pre/mod.rs
Original file line number Diff line number Diff line change
@@ -1,46 +1,95 @@
use std::sync::Arc;

use bevy_utils::HashMap;
use gramatika::ArcStr;
use gramatika::{ArcStr, SpannedError};
use lsp_types::Url;
use parser::pre::{self, traversal::Walk};

use crate::{
documents::{IntakeError, WgslDocumentBundle},
pre::pruner::Pruner,
use parser::{
comment::Comment,
pre::{self, traversal::Walk},
scopes::Scope,
ParseResult, ParseStreamer, SyntaxTree, Token,
};
use ropey::Rope;

use crate::{documents::WgslDocumentBundle, pre::pruner::Pruner};

mod interpreter;
mod pruner;

pub fn process(
uri: Url,
uri: &Url,
source: ArcStr,
defs: &HashMap<String, String>,
) -> gramatika::Result<WgslDocumentBundle> {
) -> gramatika::Result<(
ArcStr,
String,
Vec<Token>,
Vec<Comment>,
Vec<SpannedError>,
Arc<Scope>,
SyntaxTree,
)> {
eprintln!(" Preprocessing {uri}");

let (parsed, source, pre_parse_errors) = pre::parse(source);

let mut pruner = Pruner::new(source, defs.clone());
parsed.walk(&mut pruner);

let text = pruner.write_output();
let pruned_source = pruner.write_output();

eprintln!(" Pruned source for {uri}:");
for (idx, line) in text.lines().enumerate() {
for (idx, line) in pruned_source.lines().enumerate() {
eprintln!(" {:>4} | {line}", idx + 1);
}

match WgslDocumentBundle::new(&text, uri.clone()) {
Ok(mut bundle) => {
bundle.errors.extend(pre_parse_errors);
bundle.errors.extend(pruner.errors);

Ok(bundle)
}
Err(error) => match error {
IntakeError::NeedsPreprocessing(_, _, errors) => {
Err(errors.into_iter().next().unwrap())
}
},
}
let mut parser = parser::ParseStream::from(&pruned_source);
let tree = parser.parse::<SyntaxTree>()?;
let ParseResult {
tokens,
comments,
mut errors,
..
} = parser.into_inner();

errors.extend(pre_parse_errors);
errors.extend(pruner.errors);

Ok((
// uri,
pruner.source,
pruned_source,
tokens,
comments,
errors,
parser::scopes::build(&tree),
tree,
))

// Ok(WgslDocumentBundle {
// uri: uri.into(),
// rope: Rope::new().into(),
// source: pruner.source.into(),
// tokens: tokens.into(),
// comments: comments.into(),
// errors: errors.into(),
// scopes: parser::scopes::build(&tree).into(),
// ast: tree.into(),
// token_refs: Default::default(),
// })

// match WgslDocumentBundle::new(&text, uri.clone()) {
// Ok(mut bundle) => {
// bundle.source = pruner.source.into();
// bundle.errors.extend(pre_parse_errors);
// bundle.errors.extend(pruner.errors);

// Ok(bundle)
// }
// Err(error) => match error {
// IntakeError::NeedsPreprocessing(_, _, errors) => {
// Err(errors.into_iter().next().unwrap())
// }
// },
// }
}

0 comments on commit d705b25

Please sign in to comment.