Skip to content

Commit 2da85d3

Browse files
committed
wip simple '//' comment token for 'Struct'
1 parent 5c5c8b1 commit 2da85d3

File tree

4 files changed

+55
-8
lines changed

4 files changed

+55
-8
lines changed

naga/src/front/wgsl/error.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -299,6 +299,7 @@ impl<'a> Error<'a> {
299299
Token::Arrow => "->".to_string(),
300300
Token::Unknown(c) => format!("unknown ('{c}')"),
301301
Token::Trivia => "trivia".to_string(),
302+
Token::Comment(s) => format!("documentation ('{s}')"),
302303
Token::End => "end".to_string(),
303304
}
304305
}

naga/src/front/wgsl/parse/ast.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,6 +186,8 @@ pub struct StructMember<'a> {
186186
pub struct Struct<'a> {
187187
pub name: Ident<'a>,
188188
pub members: Vec<StructMember<'a>>,
189+
// TODO: Make it optional ? Store Span ? Add it to other elements
190+
pub comments: Vec<&'a str>,
189191
}
190192

191193
#[derive(Debug)]

naga/src/front/wgsl/parse/lexer.rs

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ pub enum Token<'a> {
2222
Arrow,
2323
Unknown(char),
2424
Trivia,
25+
Comment(&'a str),
2526
End,
2627
}
2728

@@ -81,8 +82,13 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) {
8182
let og_chars = chars.as_str();
8283
match chars.next() {
8384
Some('/') => {
84-
let _ = chars.position(is_comment_end);
85-
(Token::Trivia, chars.as_str())
85+
let og_chars = chars.as_str();
86+
let documentation = if let Some(end_position) = chars.position(is_comment_end) {
87+
&og_chars[..end_position]
88+
} else {
89+
og_chars
90+
};
91+
(Token::Comment(documentation), chars.as_str())
8692
}
8793
Some('*') => {
8894
let mut depth = 1;
@@ -238,7 +244,7 @@ impl<'a> Lexer<'a> {
238244
loop {
239245
// Eat all trivia because `next` doesn't eat trailing trivia.
240246
let (token, rest) = consume_token(self.input, false);
241-
if let Token::Trivia = token {
247+
if let Token::Trivia | Token::Comment(_) = token {
242248
self.input = rest;
243249
} else {
244250
return self.current_byte_offset();
@@ -253,7 +259,27 @@ impl<'a> Lexer<'a> {
253259
(token, rest)
254260
}
255261

256-
const fn current_byte_offset(&self) -> usize {
262+
pub(in crate::front::wgsl) fn start_byte_offset_and_aggregate_comment(
263+
&'a mut self,
264+
comments: &mut Vec<Span>,
265+
) -> usize {
266+
loop {
267+
let start = self.current_byte_offset();
268+
// Eat all trivia because `next` doesn't eat trailing trivia.
269+
let (token, rest) = consume_token(self.input, false);
270+
if let Token::Comment(_) = token {
271+
let next = self.current_byte_offset();
272+
comments.push(Span::new(start as u32, next as u32));
273+
self.input = rest;
274+
} else if let Token::Trivia = token {
275+
self.input = rest;
276+
} else {
277+
return self.current_byte_offset();
278+
}
279+
}
280+
}
281+
282+
pub const fn current_byte_offset(&self) -> usize {
257283
self.source.len() - self.input.len()
258284
}
259285

@@ -288,7 +314,7 @@ impl<'a> Lexer<'a> {
288314
let (token, rest) = consume_token(self.input, generic);
289315
self.input = rest;
290316
match token {
291-
Token::Trivia => start_byte_offset = self.current_byte_offset(),
317+
Token::Trivia | Token::Comment(_) => start_byte_offset = self.current_byte_offset(),
292318
_ => {
293319
self.last_end_offset = self.current_byte_offset();
294320
return (token, self.span_from(start_byte_offset));

naga/src/front/wgsl/parse/mod.rs

Lines changed: 21 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
use std::ops::Index;
2+
13
use crate::front::wgsl::error::{Error, ExpectedToken};
24
use crate::front::wgsl::parse::lexer::{Lexer, Token};
35
use crate::front::wgsl::parse::number::Number;
@@ -2172,6 +2174,9 @@ impl Parser {
21722174
lexer: &mut Lexer<'a>,
21732175
out: &mut ast::TranslationUnit<'a>,
21742176
) -> Result<(), Error<'a>> {
2177+
// Save a lexer to be able to backtrack comments if need be.
2178+
let mut lexer_comments = lexer.clone();
2179+
21752180
// read attributes
21762181
let mut binding = None;
21772182
let mut stage = ParsedAttribute::default();
@@ -2251,7 +2256,6 @@ impl Parser {
22512256
(_, word_span) => return Err(Error::UnknownAttribute(word_span)),
22522257
}
22532258
}
2254-
22552259
let attrib_span = self.pop_rule_span(lexer);
22562260
match (bind_group.value, bind_index.value) {
22572261
(Some(group), Some(index)) => {
@@ -2267,13 +2271,27 @@ impl Parser {
22672271

22682272
// read item
22692273
let start = lexer.start_byte_offset();
2270-
let kind = match lexer.next() {
2274+
let token_span = lexer.next();
2275+
2276+
let kind = match token_span {
22712277
(Token::Separator(';'), _) => None,
22722278
(Token::Word("struct"), _) => {
22732279
let name = lexer.next_ident()?;
22742280

22752281
let members = self.struct_body(lexer, &mut ctx)?;
2276-
Some(ast::GlobalDeclKind::Struct(ast::Struct { name, members }))
2282+
2283+
let mut comments = Vec::new();
2284+
lexer_comments.start_byte_offset_and_aggregate_comment(&mut comments);
2285+
2286+
let comments = comments
2287+
.into_iter()
2288+
.map(|comment_span| lexer.source.index(comment_span))
2289+
.collect();
2290+
Some(ast::GlobalDeclKind::Struct(ast::Struct {
2291+
name,
2292+
members,
2293+
comments,
2294+
}))
22772295
}
22782296
(Token::Word("alias"), _) => {
22792297
let name = lexer.next_ident()?;

0 commit comments

Comments
 (0)