diff --git a/CHANGELOG.md b/CHANGELOG.md index b09aeb297b..f24ba37378 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -139,6 +139,7 @@ By @cwfitzgerald in [#7030](https://github.com/gfx-rs/wgpu/pull/7030). - Support @must_use attribute on function declarations. By @turbocrime in [#6801](https://github.com/gfx-rs/wgpu/pull/6801). - Support for generating the candidate intersections from AABB geometry, and confirming the hits. By @kvark in [#7047](https://github.com/gfx-rs/wgpu/pull/7047). - Make naga::back::spv::Function::to_words write the OpFunctionEnd instruction in itself, instead of making another call after it. By @junjunjd in [#7156](https://github.com/gfx-rs/wgpu/pull/7156). +- Support comments parsing for wgsl through `naga::front::glsl::Frontend::new_with_options`. By @Vrixyz in [#6364](https://github.com/gfx-rs/wgpu/pull/6364). ### Changes diff --git a/naga/src/compact/mod.rs b/naga/src/compact/mod.rs index c9a7c93977..71eb0094cb 100644 --- a/naga/src/compact/mod.rs +++ b/naga/src/compact/mod.rs @@ -264,6 +264,48 @@ pub fn compact(module: &mut crate::Module) { module_map.global_expressions.adjust(init); } } + // Adjust comments + if let Some(ref mut comments) = module.comments { + let crate::Comments { + module: _, + types: ref mut comment_types, + struct_members: ref mut comment_struct_members, + entry_points: _, + functions: _, + constants: ref mut comment_constants, + global_variables: _, + } = **comments; + log::trace!("adjusting comments for types"); + for (mut comment_type_handle, comment) in std::mem::take(comment_types) { + if !module_map.types.used(comment_type_handle) { + continue; + } + module_map.types.adjust(&mut comment_type_handle); + comment_types.insert(comment_type_handle, comment); + } + log::trace!("adjusting comments for struct members"); + for (mut comment_struct_member_handle, comment) in std::mem::take(comment_struct_members) { + if !module_map.types.used(comment_struct_member_handle.0) { + continue; + } + module_map.types.adjust(&mut comment_struct_member_handle.0); + comment_struct_members.insert( + ( + comment_struct_member_handle.0, + comment_struct_member_handle.1, + ), + comment, + ); + } + log::trace!("adjusting comments for constants"); + for (mut comment_constant_handle, comment) in std::mem::take(comment_constants) { + if !module_map.constants.used(comment_constant_handle) { + continue; + } + module_map.constants.adjust(&mut comment_constant_handle); + comment_constants.insert(comment_constant_handle, comment); + } + } // Temporary storage to help us reuse allocations of existing // named expression tables. diff --git a/naga/src/front/mod.rs b/naga/src/front/mod.rs index 11c8aa047e..0fe70cab61 100644 --- a/naga/src/front/mod.rs +++ b/naga/src/front/mod.rs @@ -1,5 +1,5 @@ /*! -Frontend parsers that consume binary and text shaders and load them into [`Module`](super::Module)s. +Frontend parsers that consume binary and text shaders and load them into [`Module`]s. */ mod interpolator; @@ -328,3 +328,12 @@ impl fmt::Debug for SymbolTable { .finish() } } + +use crate::{Comments, Module}; + +impl Module { + pub fn get_comments_or_insert_default(&mut self) -> &mut Box { + self.comments + .get_or_insert_with(|| Box::new(Comments::default())) + } +} diff --git a/naga/src/front/wgsl/error.rs b/naga/src/front/wgsl/error.rs index 7bdbf12d2c..0ef9b5ae59 100644 --- a/naga/src/front/wgsl/error.rs +++ b/naga/src/front/wgsl/error.rs @@ -377,6 +377,8 @@ impl<'a> Error<'a> { Token::Arrow => "->".to_string(), Token::Unknown(c) => format!("unknown (`{c}`)"), Token::Trivia => "trivia".to_string(), + Token::CommentDoc(s) => format!("documentation ('{s}')"), + Token::CommentDocModule(s) => format!("module documentation ('{s}')"), Token::End => "end".to_string(), }, ExpectedToken::Identifier => "identifier".to_string(), diff --git a/naga/src/front/wgsl/lower/mod.rs b/naga/src/front/wgsl/lower/mod.rs index 70900e0db3..38e6b3eb44 100644 --- a/naga/src/front/wgsl/lower/mod.rs +++ b/naga/src/front/wgsl/lower/mod.rs @@ -957,7 +957,7 @@ enum LoweredGlobalDecl { Const(Handle), Override(Handle), Type(Handle), - EntryPoint, + EntryPoint(usize), } enum Texture { @@ -1057,6 +1057,10 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { layouter: &mut Layouter::default(), global_expression_kind_tracker: &mut crate::proc::ExpressionKindTracker::new(), }; + if !tu.comments.is_empty() { + ctx.module.get_comments_or_insert_default().module = + tu.comments.iter().map(|s| s.to_string()).collect(); + } for decl_handle in self.index.visit_ordered() { let span = tu.decls.get_span(decl_handle); @@ -1065,6 +1069,29 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { match decl.kind { ast::GlobalDeclKind::Fn(ref f) => { let lowered_decl = self.function(f, span, &mut ctx)?; + if !f.comments.is_empty() { + match lowered_decl { + LoweredGlobalDecl::Function { handle, .. } => { + ctx.module + .get_comments_or_insert_default() + .functions + .insert( + handle, + f.comments.iter().map(|s| s.to_string()).collect(), + ); + } + LoweredGlobalDecl::EntryPoint(index) => { + ctx.module + .get_comments_or_insert_default() + .entry_points + .insert( + index, + f.comments.iter().map(|s| s.to_string()).collect(), + ); + } + _ => {} + } + } ctx.globals.insert(f.name.name, lowered_decl); } ast::GlobalDeclKind::Var(ref v) => { @@ -1095,6 +1122,12 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { span, ); + if !v.comments.is_empty() { + ctx.module + .get_comments_or_insert_default() + .global_variables + .insert(handle, v.comments.iter().map(|s| s.to_string()).collect()); + } ctx.globals .insert(v.name.name, LoweredGlobalDecl::Var(handle)); } @@ -1120,6 +1153,12 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { ctx.globals .insert(c.name.name, LoweredGlobalDecl::Const(handle)); + if !c.comments.is_empty() { + ctx.module + .get_comments_or_insert_default() + .constants + .insert(handle, c.comments.iter().map(|s| s.to_string()).collect()); + } } ast::GlobalDeclKind::Override(ref o) => { let explicit_ty = @@ -1160,6 +1199,12 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let handle = self.r#struct(s, span, &mut ctx)?; ctx.globals .insert(s.name.name, LoweredGlobalDecl::Type(handle)); + if !s.comments.is_empty() { + ctx.module + .get_comments_or_insert_default() + .types + .insert(handle, s.comments.iter().map(|s| s.to_string()).collect()); + } } ast::GlobalDeclKind::Type(ref alias) => { let ty = self.resolve_named_ast_type( @@ -1375,7 +1420,9 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { workgroup_size_overrides, function, }); - Ok(LoweredGlobalDecl::EntryPoint) + Ok(LoweredGlobalDecl::EntryPoint( + ctx.module.entry_points.len() - 1, + )) } else { let handle = ctx.module.functions.append(function, span); Ok(LoweredGlobalDecl::Function { @@ -1933,7 +1980,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { } LoweredGlobalDecl::Function { .. } | LoweredGlobalDecl::Type(_) - | LoweredGlobalDecl::EntryPoint => { + | LoweredGlobalDecl::EntryPoint(_) => { return Err(Error::Unexpected(span, ExpectedToken::Variable)); } }; @@ -2193,7 +2240,7 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { | &LoweredGlobalDecl::Override(_) | &LoweredGlobalDecl::Var(_), ) => Err(Error::Unexpected(function_span, ExpectedToken::Function)), - Some(&LoweredGlobalDecl::EntryPoint) => Err(Error::CalledEntryPoint(function_span)), + Some(&LoweredGlobalDecl::EntryPoint(_)) => Err(Error::CalledEntryPoint(function_span)), Some(&LoweredGlobalDecl::Function { handle: function, must_use, @@ -3058,6 +3105,8 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { let mut struct_alignment = Alignment::ONE; let mut members = Vec::with_capacity(s.members.len()); + let mut comments: Vec>> = Vec::new(); + for member in s.members.iter() { let ty = self.resolve_ast_type(member.ty, &mut ctx.as_const())?; @@ -3097,6 +3146,11 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { offset = member_alignment.round_up(offset); struct_alignment = struct_alignment.max(member_alignment); + if !member.comments.is_empty() { + comments.push(Some( + member.comments.iter().map(|s| s.to_string()).collect(), + )); + } members.push(crate::StructMember { name: Some(member.name.name.to_owned()), ty, @@ -3120,6 +3174,12 @@ impl<'source, 'temp> Lowerer<'source, 'temp> { }, span, ); + for (i, c) in comments.drain(..).enumerate() { + if let Some(comment) = c { + let comments = ctx.module.get_comments_or_insert_default(); + comments.struct_members.insert((handle, i), comment); + } + } Ok(handle) } diff --git a/naga/src/front/wgsl/mod.rs b/naga/src/front/wgsl/mod.rs index 7c7c0ba88f..4863181095 100644 --- a/naga/src/front/wgsl/mod.rs +++ b/naga/src/front/wgsl/mod.rs @@ -19,6 +19,7 @@ use thiserror::Error; pub use crate::front::wgsl::error::ParseError; use crate::front::wgsl::lower::Lowerer; use crate::Scalar; +pub use parse::Options; pub use crate::front::wgsl::parse::directive::language_extension::{ ImplementedLanguageExtension, LanguageExtension, UnimplementedLanguageExtension, @@ -26,12 +27,20 @@ pub use crate::front::wgsl::parse::directive::language_extension::{ pub struct Frontend { parser: Parser, + options: Options, } impl Frontend { pub const fn new() -> Self { Self { parser: Parser::new(), + options: Options::new(), + } + } + pub const fn new_with_options(options: Options) -> Self { + Self { + parser: Parser::new(), + options, } } @@ -40,7 +49,7 @@ impl Frontend { } fn inner<'a>(&mut self, source: &'a str) -> Result> { - let tu = self.parser.parse(source)?; + let tu = self.parser.parse(source, &self.options)?; let index = index::Index::generate(&tu)?; let module = Lowerer::new(&index).lower(&tu)?; diff --git a/naga/src/front/wgsl/parse/ast.rs b/naga/src/front/wgsl/parse/ast.rs index 219dd856e7..e34fc6ed15 100644 --- a/naga/src/front/wgsl/parse/ast.rs +++ b/naga/src/front/wgsl/parse/ast.rs @@ -38,6 +38,10 @@ pub struct TranslationUnit<'a> { /// See [`DiagnosticFilterNode`] for details on how the tree is represented and used in /// validation. pub diagnostic_filter_leaf: Option>, + + /// Comments appearing first in the file. + /// This serves as documentation for the whole TranslationUnit. + pub comments: Vec<&'a str>, } #[derive(Debug, Clone, Copy)] @@ -135,6 +139,7 @@ pub struct Function<'a> { pub result: Option>, pub body: Block<'a>, pub diagnostic_filter_leaf: Option>, + pub comments: Vec<&'a str>, } #[derive(Debug)] @@ -161,6 +166,7 @@ pub struct GlobalVariable<'a> { pub binding: Option>, pub ty: Option>>, pub init: Option>>, + pub comments: Vec<&'a str>, } #[derive(Debug)] @@ -170,12 +176,15 @@ pub struct StructMember<'a> { pub binding: Option>, pub align: Option>>, pub size: Option>>, + pub comments: Vec<&'a str>, } #[derive(Debug)] pub struct Struct<'a> { pub name: Ident<'a>, pub members: Vec>, + // TODO: Make it optional ? Store Span ? Add it to other elements + pub comments: Vec<&'a str>, } #[derive(Debug)] @@ -189,6 +198,7 @@ pub struct Const<'a> { pub name: Ident<'a>, pub ty: Option>>, pub init: Handle>, + pub comments: Vec<&'a str>, } #[derive(Debug)] diff --git a/naga/src/front/wgsl/parse/lexer.rs b/naga/src/front/wgsl/parse/lexer.rs index d55720972e..5783fc2d59 100644 --- a/naga/src/front/wgsl/parse/lexer.rs +++ b/naga/src/front/wgsl/parse/lexer.rs @@ -4,7 +4,6 @@ use crate::front::wgsl::parse::directive::enable_extension::EnableExtensions; use crate::front::wgsl::parse::{conv, Number}; use crate::front::wgsl::Scalar; use crate::Span; - type TokenSpan<'a> = (Token<'a>, Span); #[derive(Copy, Clone, Debug, PartialEq)] @@ -23,6 +22,8 @@ pub enum Token<'a> { Arrow, Unknown(char), Trivia, + CommentDoc(&'a str), + CommentDocModule(&'a str), End, } @@ -46,7 +47,7 @@ fn consume_any(input: &str, what: impl Fn(char) -> bool) -> (&str, &str) { /// `Token::LogicalOperation` tokens. /// /// [§3.1 Parsing]: https://gpuweb.github.io/gpuweb/wgsl/#parsing -fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) { +fn consume_token(input: &str, generic: bool, save_comments: bool) -> (Token<'_>, &str) { let mut chars = input.chars(); let cur = match chars.next() { Some(c) => c, @@ -82,20 +83,64 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) { let og_chars = chars.as_str(); match chars.next() { Some('/') => { - let _ = chars.position(is_comment_end); - (Token::Trivia, chars.as_str()) + let end_position = { + if let Some(end_position) = input + .char_indices() + .find(|char_indices| is_comment_end(char_indices.1)) + { + end_position.0 + } else { + input.len() + } + }; + if !save_comments { + return (Token::Trivia, &input[end_position..]); + } + let next_char = chars.next(); + ( + match next_char { + Some('/') => Token::CommentDoc(&input[..end_position]), + Some('!') => Token::CommentDocModule(&input[..end_position]), + _ => Token::Trivia, + }, + &input[end_position..], + ) } Some('*') => { let mut depth = 1; let mut prev = None; - - for c in &mut chars { + let mut char_indices = input.char_indices(); + + // Skip '/' and '*' + char_indices.next(); + char_indices.next(); + + let mut constructing_token = if !save_comments { + Token::Trivia + } else { + let next_char = char_indices + .clone() + .next() + .map(|peeked_next_char| peeked_next_char.1); + match next_char { + Some('*') => Token::CommentDoc(""), + Some('!') => Token::CommentDocModule(""), + _ => Token::Trivia, + } + }; + for (index, c) in char_indices { match (prev, c) { (Some('*'), '/') => { prev = None; depth -= 1; if depth == 0 { - return (Token::Trivia, chars.as_str()); + if let Token::CommentDoc(ref mut doc) + | Token::CommentDocModule(ref mut doc) = constructing_token + { + *doc = &input[..=index]; + } + + return (constructing_token, &input[(index + 1)..]); } } (Some('/'), '*') => { @@ -168,6 +213,7 @@ fn consume_token(input: &str, generic: bool) -> (Token<'_>, &str) { /// Returns whether or not a char is a comment end /// (Unicode Pattern_White_Space excluding U+0020, U+0009, U+200E and U+200F) +/// https://www.w3.org/TR/WGSL/#line-break const fn is_comment_end(c: char) -> bool { match c { '\u{000a}'..='\u{000d}' | '\u{0085}' | '\u{2028}' | '\u{2029}' => true, @@ -218,17 +264,22 @@ pub(in crate::front::wgsl) struct Lexer<'a> { /// statements. last_end_offset: usize, + /// Whether or not to save comments as we lexe through them. + /// If `false`, comments are saved as [`Token::Trivia`]. + save_comments: bool, + #[allow(dead_code)] pub(in crate::front::wgsl) enable_extensions: EnableExtensions, } impl<'a> Lexer<'a> { - pub(in crate::front::wgsl) const fn new(input: &'a str) -> Self { + pub(in crate::front::wgsl) const fn new(input: &'a str, save_comments: bool) -> Self { Lexer { input, source: input, last_end_offset: 0, enable_extensions: EnableExtensions::empty(), + save_comments, } } @@ -254,8 +305,8 @@ impl<'a> Lexer<'a> { pub(in crate::front::wgsl) fn start_byte_offset(&mut self) -> usize { loop { // Eat all trivia because `next` doesn't eat trailing trivia. - let (token, rest) = consume_token(self.input, false); - if let Token::Trivia = token { + let (token, rest) = consume_token(self.input, false, self.save_comments); + if let Token::Trivia | Token::CommentDoc(_) | Token::CommentDocModule(_) = token { self.input = rest; } else { return self.current_byte_offset(); @@ -270,7 +321,29 @@ impl<'a> Lexer<'a> { (token, rest) } - const fn current_byte_offset(&self) -> usize { + /// Collect all doc comments until a non doc token is found. + pub(in crate::front::wgsl) fn accumulate_doc_item_comments(&'a mut self) -> Vec { + let mut comments = Vec::new(); + loop { + let start = self.current_byte_offset(); + // Eat all trivia because `next` doesn't eat trailing trivia. + let (token, rest) = consume_token(self.input, false, self.save_comments); + if let Token::CommentDoc(_) = token { + self.input = rest; + let next = self.current_byte_offset(); + comments.push(Span::new(start as u32, next as u32)); + } else if let Token::Trivia = token { + self.input = rest; + } else if let Token::CommentDocModule(_) = token { + self.input = rest; + // TODO: return an error ? + } else { + return comments; + } + } + } + + pub const fn current_byte_offset(&self) -> usize { self.source.len() - self.input.len() } @@ -300,17 +373,30 @@ impl<'a> Lexer<'a> { /// /// See [`consume_token`] for the meaning of `generic`. fn next_impl(&mut self, generic: bool) -> TokenSpan<'a> { + self.next_until( + |token| { + !matches!( + token, + Token::Trivia | Token::CommentDoc(_) | Token::CommentDocModule(_) + ) + }, + generic, + ) + } + + /// Return the next token from `self` for which `stop_at` returns true. + /// + /// See [`consume_token`] for the meaning of `generic`. + pub fn next_until(&mut self, stop_at: fn(Token) -> bool, generic: bool) -> TokenSpan<'a> { let mut start_byte_offset = self.current_byte_offset(); loop { - let (token, rest) = consume_token(self.input, generic); + let (token, rest) = consume_token(self.input, generic, self.save_comments); self.input = rest; - match token { - Token::Trivia => start_byte_offset = self.current_byte_offset(), - _ => { - self.last_end_offset = self.current_byte_offset(); - return (token, self.span_from(start_byte_offset)); - } + if stop_at(token) { + self.last_end_offset = self.current_byte_offset(); + return (token, self.span_from(start_byte_offset)); } + start_byte_offset = self.current_byte_offset(); } } @@ -484,9 +570,35 @@ impl<'a> Lexer<'a> { #[cfg(test)] #[track_caller] fn sub_test(source: &str, expected_tokens: &[Token]) { - let mut lex = Lexer::new(source); + sub_test_with_comments(false, source, expected_tokens); +} + +#[cfg(test)] +#[track_caller] +fn sub_test_with_and_without_comments(source: &str, expected_tokens: &[Token]) { + sub_test_with_comments(true, source, expected_tokens); + sub_test_with_comments( + false, + source, + expected_tokens + .iter() + .filter(|v| !matches!(**v, Token::CommentDoc(_) | Token::CommentDocModule(_))) + .cloned() + .collect::>() + .as_slice(), + ); +} + +#[cfg(test)] +#[track_caller] +fn sub_test_with_comments(with_comments: bool, source: &str, expected_tokens: &[Token]) { + let mut lex = Lexer::new(source, with_comments); for &token in expected_tokens { - assert_eq!(lex.next().0, token); + assert_eq!( + lex.next_until(|token| !matches!(token, Token::Trivia), false) + .0, + token + ); } assert_eq!(lex.next().0, Token::End); } @@ -702,11 +814,13 @@ fn test_tokens() { sub_test("No¾", &[Token::Word("No"), Token::Unknown('¾')]); sub_test("No好", &[Token::Word("No好")]); sub_test("_No", &[Token::Word("_No")]); - sub_test( + + sub_test_with_and_without_comments( "*/*/***/*//=/*****//", &[ Token::Operation('*'), Token::AssignmentOperation('/'), + Token::CommentDoc("/*****/"), Token::Operation('/'), ], ); @@ -772,3 +886,132 @@ fn test_variable_decl() { ], ); } + +#[test] +fn test_comments_trivia() { + sub_test_with_and_without_comments("// Single comment", &[]); + + sub_test_with_and_without_comments( + "/* multi + line + comment */", + &[], + ); + sub_test_with_and_without_comments( + "/* multi + line + comment */ + // and another", + &[], + ); +} + +#[test] +fn test_comments() { + sub_test_with_and_without_comments( + "/// Single comment", + &[Token::CommentDoc("/// Single comment")], + ); + + sub_test_with_and_without_comments( + "/** multi + line + comment */", + &[Token::CommentDoc( + "/** multi + line + comment */", + )], + ); + sub_test_with_and_without_comments( + "/** multi + line + comment */ + /// and another", + &[ + Token::CommentDoc( + "/** multi + line + comment */", + ), + Token::CommentDoc("/// and another"), + ], + ); +} + +#[test] +fn test_comment_nested() { + sub_test_with_and_without_comments( + "/** + a comment with nested one /** + nested comment + */ + */ + const a : i32 = 2;", + &[ + Token::CommentDoc( + "/** + a comment with nested one /** + nested comment + */ + */", + ), + Token::Word("const"), + Token::Word("a"), + Token::Separator(':'), + Token::Word("i32"), + Token::Operation('='), + Token::Number(Ok(Number::AbstractInt(2))), + Token::Separator(';'), + ], + ); +} + +#[test] +fn test_comment_long_character() { + sub_test_with_and_without_comments( + "/// π/2 + /// D(𝐡) = ─────────────────────────────────────────────────── +/// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +` + const a : i32 = 2;", + &[ + Token::CommentDoc("/// π/2"), + Token::CommentDoc("/// D(𝐡) = ───────────────────────────────────────────────────"), + Token::CommentDoc("/// παₜα_b((𝐡 ⋅ 𝐭)² / αₜ²) + (𝐡 ⋅ 𝐛)² / α_b² +`"), + Token::Word("const"), + Token::Word("a"), + Token::Separator(':'), + Token::Word("i32"), + Token::Operation('='), + Token::Number(Ok(Number::AbstractInt(2))), + Token::Separator(';'), + ], + ); +} + +#[test] +fn test_comments_module() { + sub_test_with_and_without_comments( + "//! Comment Module + //! Another one. + /*! Different module comment */ + /// Trying to break module comment + // Trying to break module comment again + //! After a regular comment is ok. + /*! Different module comment again */ + + //! After a break is supported. + const + //! After anything else is not.", + &[ + Token::CommentDocModule("//! Comment Module"), + Token::CommentDocModule("//! Another one."), + Token::CommentDocModule("/*! Different module comment */"), + Token::CommentDoc("/// Trying to break module comment"), + Token::CommentDocModule("//! After a regular comment is ok."), + Token::CommentDocModule("/*! Different module comment again */"), + Token::CommentDocModule("//! After a break is supported."), + Token::Word("const"), + ], + ); +} diff --git a/naga/src/front/wgsl/parse/mod.rs b/naga/src/front/wgsl/parse/mod.rs index 23b2984e75..ba7bb42939 100644 --- a/naga/src/front/wgsl/parse/mod.rs +++ b/naga/src/front/wgsl/parse/mod.rs @@ -8,6 +8,8 @@ use crate::front::wgsl::parse::directive::enable_extension::{ }; use crate::front::wgsl::parse::directive::language_extension::LanguageExtension; use crate::front::wgsl::parse::directive::DirectiveKind; +use std::ops::Index; + use crate::front::wgsl::parse::lexer::{Lexer, Token}; use crate::front::wgsl::parse::number::Number; use crate::front::wgsl::Scalar; @@ -259,6 +261,21 @@ impl<'a> BindingParser<'a> { } } +/// Configuration for the whole parser run. +pub struct Options { + /// Controls whether the parser should parse comments. + pub parse_comments: bool, +} + +impl Options { + /// Creates a new [`Options`] without comments parsing. + pub const fn new() -> Self { + Options { + parse_comments: false, + } + } +} + pub struct Parser { rules: Vec<(Rule, usize)>, recursion_depth: u32, @@ -1162,6 +1179,7 @@ impl Parser { binding: None, ty, init, + comments: Vec::new(), }) } @@ -1182,6 +1200,9 @@ impl Parser { ExpectedToken::Token(Token::Separator(',')), )); } + // Save a lexer to be able to backtrack comments if need be. + let mut lexer_comments = lexer.clone(); + let (mut size, mut align) = (ParsedAttribute::default(), ParsedAttribute::default()); self.push_rule_span(Rule::Attribute, lexer); let mut bind_parser = BindingParser::default(); @@ -1211,12 +1232,20 @@ impl Parser { let ty = self.type_decl(lexer, ctx)?; ready = lexer.skip(Token::Separator(',')); + let comments = lexer_comments.accumulate_doc_item_comments(); + + let comments = comments + .into_iter() + .map(|comment_span| lexer.source.index(comment_span)) + .collect(); + members.push(ast::StructMember { name, ty, binding, size: size.value, align: align.value, + comments, }); if !member_names.insert(name.name) { @@ -2455,6 +2484,7 @@ impl Parser { result, body, diagnostic_filter_leaf, + comments: Vec::new(), }; // done @@ -2497,6 +2527,9 @@ impl Parser { lexer: &mut Lexer<'a>, out: &mut ast::TranslationUnit<'a>, ) -> Result<(), Error<'a>> { + // Save a lexer to be able to backtrack comments if need be. + let mut lexer_comments = lexer.clone(); + // read attributes let mut binding = None; let mut stage = ParsedAttribute::default(); @@ -2599,7 +2632,6 @@ impl Parser { _ => return Err(Error::UnknownAttribute(name_span)), } } - let attrib_span = self.pop_rule_span(lexer); match (bind_group.value, bind_index.value) { (Some(group), Some(index)) => { @@ -2615,7 +2647,8 @@ impl Parser { // read item let start = lexer.start_byte_offset(); - let kind = match lexer.next() { + let token_span = lexer.next(); + let kind = match token_span { (Token::Separator(';'), _) => { ensure_no_diag_attrs( DiagnosticAttributeNotSupportedPosition::SemicolonInModulePosition, @@ -2632,7 +2665,18 @@ impl Parser { let name = lexer.next_ident()?; let members = self.struct_body(lexer, &mut ctx)?; - Some(ast::GlobalDeclKind::Struct(ast::Struct { name, members })) + + let comments = lexer_comments.accumulate_doc_item_comments(); + + let comments = comments + .into_iter() + .map(|comment_span| lexer.source.index(comment_span)) + .collect(); + Some(ast::GlobalDeclKind::Struct(ast::Struct { + name, + members, + comments, + })) } (Token::Word("alias"), _) => { ensure_no_diag_attrs("`alias`es".into(), diagnostic_filters)?; @@ -2660,7 +2704,19 @@ impl Parser { let init = self.general_expression(lexer, &mut ctx)?; lexer.expect(Token::Separator(';'))?; - Some(ast::GlobalDeclKind::Const(ast::Const { name, ty, init })) + let comments = lexer_comments.accumulate_doc_item_comments(); + + let comments = comments + .into_iter() + .map(|comment_span| lexer.source.index(comment_span)) + .collect(); + + Some(ast::GlobalDeclKind::Const(ast::Const { + name, + ty, + init, + comments, + })) } (Token::Word("override"), _) => { ensure_no_diag_attrs("`override`s".into(), diagnostic_filters)?; @@ -2693,6 +2749,14 @@ impl Parser { let mut var = self.variable_decl(lexer, &mut ctx)?; var.binding = binding.take(); + + let comments = lexer_comments.accumulate_doc_item_comments(); + + let comments = comments + .into_iter() + .map(|comment_span| lexer.source.index(comment_span)) + .collect(); + var.comments = comments; Some(ast::GlobalDeclKind::Var(var)) } (Token::Word("fn"), _) => { @@ -2701,7 +2765,6 @@ impl Parser { diagnostic_filters, out.diagnostic_filter_leaf, ); - let function = self.function_decl( lexer, diagnostic_filter_leaf, @@ -2709,6 +2772,14 @@ impl Parser { out, &mut dependencies, )?; + + let comments = lexer_comments.accumulate_doc_item_comments(); + + let comments = comments + .into_iter() + .map(|comment_span| lexer.source.index(comment_span)) + .collect(); + Some(ast::GlobalDeclKind::Fn(ast::Function { entry_point: if let Some(stage) = stage.value { if stage == ShaderStage::Compute && workgroup_size.value.is_none() { @@ -2722,6 +2793,7 @@ impl Parser { } else { None }, + comments, ..function })) } @@ -2762,14 +2834,34 @@ impl Parser { } } - pub fn parse<'a>(&mut self, source: &'a str) -> Result, Error<'a>> { + pub fn parse<'a>( + &mut self, + source: &'a str, + options: &Options, + ) -> Result, Error<'a>> { self.reset(); - let mut lexer = Lexer::new(source); + let mut lexer = Lexer::new(source, options.parse_comments); let mut tu = ast::TranslationUnit::default(); let mut enable_extensions = EnableExtensions::empty(); let mut diagnostic_filters = DiagnosticFilterMap::new(); + if options.parse_comments { + // Parse module comments. + let mut comments = Vec::new(); + + fn peek_any_next<'a>(lexer: &'a Lexer) -> (Token<'a>, Span) { + let mut cloned = lexer.clone(); + let token = cloned.next_until(|_| true, false); + token + } + while let (Token::CommentDocModule(_), span) = peek_any_next(&lexer) { + comments.push(lexer.source.index(span)); + let _ = lexer.next_until(|_| true, false); + } + tu.comments = comments; + } + // Parse directives. while let Ok((ident, _directive_ident_span)) = lexer.peek_ident_with_span() { if let Some(kind) = DirectiveKind::from_ident(ident) { @@ -2841,7 +2933,6 @@ impl Parser { } } } - Ok(tu) } diff --git a/naga/src/lib.rs b/naga/src/lib.rs index 2e917d34e0..2131a7a4fd 100644 --- a/naga/src/lib.rs +++ b/naga/src/lib.rs @@ -2394,6 +2394,28 @@ pub enum RayQueryIntersection { Aabb = 3, } +/// Comments preceding items. +/// +/// These can be used to generate automated documentation, +/// IDE hover information or translate shaders with their context comments. +#[derive(Debug, Default, Clone)] +#[cfg_attr(feature = "serialize", derive(Serialize))] +#[cfg_attr(feature = "deserialize", derive(Deserialize))] +#[cfg_attr(feature = "arbitrary", derive(Arbitrary))] +pub struct Comments { + pub types: FastIndexMap, Vec>, + // The key is: + // - key.0: the handle to the Struct + // - key.1: the index of the `StructMember`. + pub struct_members: FastIndexMap<(Handle, usize), Vec>, + pub entry_points: FastIndexMap>, + pub functions: FastIndexMap, Vec>, + pub constants: FastIndexMap, Vec>, + pub global_variables: FastIndexMap, Vec>, + // top level comments, appearing before any space. + pub module: Vec, +} + /// Shader module. /// /// A module is a set of constants, global variables and functions, as well as @@ -2476,4 +2498,6 @@ pub struct Module { /// See [`DiagnosticFilterNode`] for details on how the tree is represented and used in /// validation. pub diagnostic_filter_leaf: Option>, + /// Comments, usually serving as documentation + pub comments: Option>, } diff --git a/naga/src/valid/handles.rs b/naga/src/valid/handles.rs index 5affa9bff2..db500bb23e 100644 --- a/naga/src/valid/handles.rs +++ b/naga/src/valid/handles.rs @@ -3,13 +3,13 @@ use crate::{ arena::{BadHandle, BadRangeError}, diagnostic_filter::DiagnosticFilterNode, - Handle, + EntryPoint, Handle, }; use crate::non_max_u32::NonMaxU32; use crate::{Arena, UniqueArena}; -use super::ValidationError; +use super::{TypeError, ValidationError}; use std::{convert::TryInto, hash::Hash}; @@ -42,6 +42,7 @@ impl super::Validator { ref global_expressions, ref diagnostic_filters, ref diagnostic_filter_leaf, + ref comments, } = module; // Because types can refer to global expressions and vice versa, to @@ -254,6 +255,73 @@ impl super::Validator { handle.check_valid_for(diagnostic_filters)?; } + if let Some(comments) = comments.as_ref() { + let crate::Comments { + module: _, + types: ref comment_types, + struct_members: ref comment_struct_members, + entry_points: ref comment_entry_points, + functions: ref comment_functions, + constants: ref comment_constants, + global_variables: ref comment_global_variables, + } = **comments; + + for comment_type in comment_types.iter() { + validate_type(*comment_type.0)?; + } + + for comment_struct_member_doc in comment_struct_members.iter() { + validate_type(comment_struct_member_doc.0 .0)?; + let struct_type = types.get_handle(comment_struct_member_doc.0 .0).unwrap(); + match struct_type.inner { + crate::TypeInner::Struct { + ref members, + span: ref _span, + } => { + (0..members.len()) + .contains(&comment_struct_member_doc.0 .1) + .then_some(()) + // TODO: what errors should this be? + .ok_or_else(|| ValidationError::Type { + handle: comment_struct_member_doc.0 .0, + name: struct_type.name.as_ref().map_or_else( + || "members length incorrect".to_string(), + |name| name.to_string(), + ), + source: TypeError::InvalidData(comment_struct_member_doc.0 .0), + })?; + } + _ => { + // TODO: internal error ? We should never get here. + // If entering there, it's probably that we forgot to adjust a handle in the compact phase. + return Err(ValidationError::Type { + handle: comment_struct_member_doc.0 .0, + name: struct_type + .name + .as_ref() + .map_or_else(|| "Unknown".to_string(), |name| name.to_string()), + source: TypeError::InvalidData(comment_struct_member_doc.0 .0), + }); + } + } + for comment_function in comment_functions.iter() { + Self::validate_function_handle(*comment_function.0, functions)?; + } + for comment_entry_point in comment_entry_points.iter() { + Self::validate_entry_point_index(*comment_entry_point.0, entry_points)?; + } + for comment_constant in comment_constants.iter() { + Self::validate_constant_handle(*comment_constant.0, constants)?; + } + for comment_global_variable in comment_global_variables.iter() { + Self::validate_global_variable_handle( + *comment_global_variable.0, + global_variables, + )?; + } + } + } + Ok(()) } @@ -271,6 +339,13 @@ impl super::Validator { handle.check_valid_for(constants).map(|_| ()) } + fn validate_global_variable_handle( + handle: Handle, + global_variables: &Arena, + ) -> Result<(), InvalidHandleError> { + handle.check_valid_for(global_variables).map(|_| ()) + } + fn validate_override_handle( handle: Handle, overrides: &Arena, @@ -343,6 +418,22 @@ impl super::Validator { Ok(max_expr) } + fn validate_entry_point_index( + entry_point_index: usize, + entry_points: &[EntryPoint], + ) -> Result<(), InvalidHandleError> { + (0..entry_points.len()) + .contains(&entry_point_index) + .then_some(()) + .ok_or_else(|| { + BadHandle { + kind: "EntryPoint", + index: entry_point_index, + } + .into() + }) + } + /// Validate all handles that occur in `expression`, whose handle is `handle`. /// /// If `expression` refers to any `Type`s, return the highest-indexed type diff --git a/naga/tests/in/types_with_comments.wgsl b/naga/tests/in/types_with_comments.wgsl new file mode 100644 index 0000000000..bb38ae27c1 --- /dev/null +++ b/naga/tests/in/types_with_comments.wgsl @@ -0,0 +1,10 @@ +//! Module comment. + +/* + 🍽️ /* + nested comment + */ + */ +@group(0) @binding(0) var mvp_matrix: mat4x4; + // test + var w_mem: mat2x2; \ No newline at end of file diff --git a/naga/tests/out/ir/access.compact.ron b/naga/tests/out/ir/access.compact.ron index 53ba1a3c9b..4eecb3fc25 100644 --- a/naga/tests/out/ir/access.compact.ron +++ b/naga/tests/out/ir/access.compact.ron @@ -2854,4 +2854,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/access.ron b/naga/tests/out/ir/access.ron index 53ba1a3c9b..4eecb3fc25 100644 --- a/naga/tests/out/ir/access.ron +++ b/naga/tests/out/ir/access.ron @@ -2854,4 +2854,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/atomic_i_increment.compact.ron b/naga/tests/out/ir/atomic_i_increment.compact.ron new file mode 100644 index 0000000000..e12573ea48 --- /dev/null +++ b/naga/tests/out/ir/atomic_i_increment.compact.ron @@ -0,0 +1,287 @@ +( + types: [ + ( + name: None, + inner: Scalar(( + kind: Uint, + width: 4, + )), + ), + ( + name: None, + inner: Scalar(( + kind: Bool, + width: 1, + )), + ), + ( + name: None, + inner: Struct( + members: [ + ( + name: None, + ty: 0, + binding: None, + offset: 0, + ), + ], + span: 4, + ), + ), + ( + name: None, + inner: Atomic(( + kind: Uint, + width: 4, + )), + ), + ( + name: None, + inner: Struct( + members: [ + ( + name: None, + ty: 3, + binding: None, + offset: 0, + ), + ], + span: 4, + ), + ), + ], + special_types: ( + ray_desc: None, + ray_intersection: None, + predeclared_types: {}, + ), + constants: [ + ( + name: None, + ty: 0, + init: 0, + ), + ( + name: None, + ty: 1, + init: 1, + ), + ( + name: None, + ty: 0, + init: 2, + ), + ( + name: None, + ty: 1, + init: 3, + ), + ( + name: None, + ty: 0, + init: 4, + ), + ], + overrides: [], + global_variables: [ + ( + name: None, + space: Storage( + access: ("LOAD | STORE"), + ), + binding: Some(( + group: 0, + binding: 0, + )), + ty: 4, + init: None, + ), + ( + name: None, + space: Storage( + access: ("LOAD"), + ), + binding: Some(( + group: 0, + binding: 1, + )), + ty: 2, + init: None, + ), + ], + global_expressions: [ + Literal(U32(0)), + Literal(Bool(false)), + Literal(U32(1)), + Literal(Bool(true)), + ZeroValue(0), + ], + functions: [ + ( + name: None, + arguments: [], + result: None, + local_variables: [ + ( + name: Some("phi_23"), + ty: 0, + init: None, + ), + ( + name: Some("phi_24"), + ty: 0, + init: None, + ), + ], + expressions: [ + GlobalVariable(0), + GlobalVariable(1), + Constant(3), + Constant(1), + Constant(4), + Constant(2), + Constant(0), + AccessIndex( + base: 0, + index: 0, + ), + AccessIndex( + base: 1, + index: 0, + ), + LocalVariable(0), + Load( + pointer: 9, + ), + Load( + pointer: 8, + ), + Binary( + op: GreaterEqual, + left: 10, + right: 11, + ), + AtomicResult( + ty: 0, + comparison: false, + ), + Literal(U32(1)), + Binary( + op: Add, + left: 10, + right: 5, + ), + LocalVariable(1), + Load( + pointer: 16, + ), + Select( + condition: 12, + accept: 3, + reject: 2, + ), + Unary( + op: LogicalNot, + expr: 18, + ), + LocalVariable(0), + LocalVariable(1), + ], + named_expressions: {}, + body: [ + Emit(( + start: 7, + end: 9, + )), + Store( + pointer: 20, + value: 6, + ), + Loop( + body: [ + Emit(( + start: 10, + end: 11, + )), + Emit(( + start: 11, + end: 13, + )), + If( + condition: 12, + accept: [ + Store( + pointer: 21, + value: 4, + ), + ], + reject: [ + Emit(( + start: 13, + end: 14, + )), + Atomic( + pointer: 7, + fun: Add, + value: 14, + result: Some(13), + ), + Emit(( + start: 15, + end: 16, + )), + Store( + pointer: 21, + value: 15, + ), + ], + ), + Emit(( + start: 17, + end: 19, + )), + Continue, + ], + continuing: [ + Emit(( + start: 19, + end: 20, + )), + Store( + pointer: 20, + value: 17, + ), + ], + break_if: Some(19), + ), + Return( + value: None, + ), + ], + ), + ], + entry_points: [ + ( + name: "stage::test_atomic_i_increment", + stage: Compute, + early_depth_test: None, + workgroup_size: (32, 1, 1), + function: ( + name: Some("stage::test_atomic_i_increment_wrap"), + arguments: [], + result: None, + local_variables: [], + expressions: [], + named_expressions: {}, + body: [ + Call( + function: 0, + arguments: [], + result: None, + ), + ], + ), + ), + ], + comments: None, +) \ No newline at end of file diff --git a/naga/tests/out/ir/atomic_i_increment.ron b/naga/tests/out/ir/atomic_i_increment.ron new file mode 100644 index 0000000000..73c0b70369 --- /dev/null +++ b/naga/tests/out/ir/atomic_i_increment.ron @@ -0,0 +1,312 @@ +( + types: [ + ( + name: None, + inner: Scalar(( + kind: Uint, + width: 4, + )), + ), + ( + name: None, + inner: Scalar(( + kind: Bool, + width: 1, + )), + ), + ( + name: None, + inner: Pointer( + base: 0, + space: Storage( + access: ("LOAD | STORE"), + ), + ), + ), + ( + name: None, + inner: Struct( + members: [ + ( + name: None, + ty: 0, + binding: None, + offset: 0, + ), + ], + span: 4, + ), + ), + ( + name: None, + inner: Pointer( + base: 3, + space: Storage( + access: ("LOAD | STORE"), + ), + ), + ), + ( + name: None, + inner: Atomic(( + kind: Uint, + width: 4, + )), + ), + ( + name: None, + inner: Struct( + members: [ + ( + name: None, + ty: 5, + binding: None, + offset: 0, + ), + ], + span: 4, + ), + ), + ], + special_types: ( + ray_desc: None, + ray_intersection: None, + predeclared_types: {}, + ), + constants: [ + ( + name: None, + ty: 0, + init: 0, + ), + ( + name: None, + ty: 0, + init: 1, + ), + ( + name: None, + ty: 1, + init: 2, + ), + ( + name: None, + ty: 0, + init: 3, + ), + ( + name: None, + ty: 1, + init: 4, + ), + ( + name: None, + ty: 0, + init: 5, + ), + ], + overrides: [], + global_variables: [ + ( + name: None, + space: Storage( + access: ("LOAD | STORE"), + ), + binding: Some(( + group: 0, + binding: 0, + )), + ty: 6, + init: None, + ), + ( + name: None, + space: Storage( + access: ("LOAD"), + ), + binding: Some(( + group: 0, + binding: 1, + )), + ty: 3, + init: None, + ), + ], + global_expressions: [ + Literal(U32(0)), + Literal(U32(2)), + Literal(Bool(false)), + Literal(U32(1)), + Literal(Bool(true)), + ZeroValue(0), + ], + functions: [ + ( + name: None, + arguments: [], + result: None, + local_variables: [ + ( + name: Some("phi_23"), + ty: 0, + init: None, + ), + ( + name: Some("phi_24"), + ty: 0, + init: None, + ), + ], + expressions: [ + GlobalVariable(0), + GlobalVariable(1), + Constant(4), + Constant(2), + Constant(5), + Constant(3), + Constant(1), + Constant(0), + AccessIndex( + base: 0, + index: 0, + ), + AccessIndex( + base: 1, + index: 0, + ), + LocalVariable(0), + Load( + pointer: 10, + ), + Load( + pointer: 9, + ), + Binary( + op: GreaterEqual, + left: 11, + right: 12, + ), + AtomicResult( + ty: 0, + comparison: false, + ), + Literal(U32(1)), + Binary( + op: Add, + left: 11, + right: 5, + ), + LocalVariable(1), + Load( + pointer: 17, + ), + Select( + condition: 13, + accept: 3, + reject: 2, + ), + Unary( + op: LogicalNot, + expr: 19, + ), + LocalVariable(0), + LocalVariable(1), + ], + named_expressions: {}, + body: [ + Emit(( + start: 8, + end: 10, + )), + Store( + pointer: 21, + value: 7, + ), + Loop( + body: [ + Emit(( + start: 11, + end: 12, + )), + Emit(( + start: 12, + end: 14, + )), + If( + condition: 13, + accept: [ + Store( + pointer: 22, + value: 4, + ), + ], + reject: [ + Emit(( + start: 14, + end: 15, + )), + Atomic( + pointer: 8, + fun: Add, + value: 15, + result: Some(14), + ), + Emit(( + start: 16, + end: 17, + )), + Store( + pointer: 22, + value: 16, + ), + ], + ), + Emit(( + start: 18, + end: 20, + )), + Continue, + ], + continuing: [ + Emit(( + start: 20, + end: 21, + )), + Store( + pointer: 21, + value: 18, + ), + ], + break_if: Some(20), + ), + Return( + value: None, + ), + ], + ), + ], + entry_points: [ + ( + name: "stage::test_atomic_i_increment", + stage: Compute, + early_depth_test: None, + workgroup_size: (32, 1, 1), + function: ( + name: Some("stage::test_atomic_i_increment_wrap"), + arguments: [], + result: None, + local_variables: [], + expressions: [], + named_expressions: {}, + body: [ + Call( + function: 0, + arguments: [], + result: None, + ), + ], + ), + ), + ], + comments: None, +) \ No newline at end of file diff --git a/naga/tests/out/ir/collatz.compact.ron b/naga/tests/out/ir/collatz.compact.ron index 6a7aebe544..4e22cc77e6 100644 --- a/naga/tests/out/ir/collatz.compact.ron +++ b/naga/tests/out/ir/collatz.compact.ron @@ -335,4 +335,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/collatz.ron b/naga/tests/out/ir/collatz.ron index 6a7aebe544..4e22cc77e6 100644 --- a/naga/tests/out/ir/collatz.ron +++ b/naga/tests/out/ir/collatz.ron @@ -335,4 +335,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/const_assert.compact.ron b/naga/tests/out/ir/const_assert.compact.ron index 03c540b601..55bd9d8a16 100644 --- a/naga/tests/out/ir/const_assert.compact.ron +++ b/naga/tests/out/ir/const_assert.compact.ron @@ -54,4 +54,5 @@ entry_points: [], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/const_assert.ron b/naga/tests/out/ir/const_assert.ron index 03c540b601..55bd9d8a16 100644 --- a/naga/tests/out/ir/const_assert.ron +++ b/naga/tests/out/ir/const_assert.ron @@ -54,4 +54,5 @@ entry_points: [], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/diagnostic-filter.compact.ron b/naga/tests/out/ir/diagnostic-filter.compact.ron index 315c8b3423..e4466557b7 100644 --- a/naga/tests/out/ir/diagnostic-filter.compact.ron +++ b/naga/tests/out/ir/diagnostic-filter.compact.ron @@ -57,4 +57,5 @@ ), ], diagnostic_filter_leaf: Some(0), + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/diagnostic-filter.ron b/naga/tests/out/ir/diagnostic-filter.ron index 315c8b3423..e4466557b7 100644 --- a/naga/tests/out/ir/diagnostic-filter.ron +++ b/naga/tests/out/ir/diagnostic-filter.ron @@ -57,4 +57,5 @@ ), ], diagnostic_filter_leaf: Some(0), + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/fetch_depth.compact.ron b/naga/tests/out/ir/fetch_depth.compact.ron index f10ccb94f7..760ea35069 100644 --- a/naga/tests/out/ir/fetch_depth.compact.ron +++ b/naga/tests/out/ir/fetch_depth.compact.ron @@ -197,4 +197,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/fetch_depth.ron b/naga/tests/out/ir/fetch_depth.ron index dc9237beb4..0b6fcf46c4 100644 --- a/naga/tests/out/ir/fetch_depth.ron +++ b/naga/tests/out/ir/fetch_depth.ron @@ -267,4 +267,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/index-by-value.compact.ron b/naga/tests/out/ir/index-by-value.compact.ron index 93a9821426..a0c7158747 100644 --- a/naga/tests/out/ir/index-by-value.compact.ron +++ b/naga/tests/out/ir/index-by-value.compact.ron @@ -376,4 +376,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/index-by-value.ron b/naga/tests/out/ir/index-by-value.ron index 93a9821426..a0c7158747 100644 --- a/naga/tests/out/ir/index-by-value.ron +++ b/naga/tests/out/ir/index-by-value.ron @@ -376,4 +376,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/local-const.compact.ron b/naga/tests/out/ir/local-const.compact.ron index fc3dc81e71..6ef4b33ade 100644 --- a/naga/tests/out/ir/local-const.compact.ron +++ b/naga/tests/out/ir/local-const.compact.ron @@ -142,4 +142,5 @@ entry_points: [], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/local-const.ron b/naga/tests/out/ir/local-const.ron index fc3dc81e71..6ef4b33ade 100644 --- a/naga/tests/out/ir/local-const.ron +++ b/naga/tests/out/ir/local-const.ron @@ -142,4 +142,5 @@ entry_points: [], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/must-use.compact.ron b/naga/tests/out/ir/must-use.compact.ron index 3d51cb0c95..c18594cfc3 100644 --- a/naga/tests/out/ir/must-use.compact.ron +++ b/naga/tests/out/ir/must-use.compact.ron @@ -178,4 +178,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/must-use.ron b/naga/tests/out/ir/must-use.ron index 3d51cb0c95..c18594cfc3 100644 --- a/naga/tests/out/ir/must-use.ron +++ b/naga/tests/out/ir/must-use.ron @@ -178,4 +178,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.compact.ron b/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.compact.ron index 56be2f8ab6..4cccd5991d 100644 --- a/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.compact.ron +++ b/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.compact.ron @@ -129,4 +129,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.ron b/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.ron index 56be2f8ab6..4cccd5991d 100644 --- a/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.ron +++ b/naga/tests/out/ir/overrides-atomicCompareExchangeWeak.ron @@ -129,4 +129,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/overrides-ray-query.compact.ron b/naga/tests/out/ir/overrides-ray-query.compact.ron index 10cad83538..fe95d97403 100644 --- a/naga/tests/out/ir/overrides-ray-query.compact.ron +++ b/naga/tests/out/ir/overrides-ray-query.compact.ron @@ -260,4 +260,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/overrides-ray-query.ron b/naga/tests/out/ir/overrides-ray-query.ron index 10cad83538..fe95d97403 100644 --- a/naga/tests/out/ir/overrides-ray-query.ron +++ b/naga/tests/out/ir/overrides-ray-query.ron @@ -260,4 +260,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/overrides.compact.ron b/naga/tests/out/ir/overrides.compact.ron index 00c57fa434..29622dcc08 100644 --- a/naga/tests/out/ir/overrides.compact.ron +++ b/naga/tests/out/ir/overrides.compact.ron @@ -214,4 +214,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/overrides.ron b/naga/tests/out/ir/overrides.ron index 00c57fa434..29622dcc08 100644 --- a/naga/tests/out/ir/overrides.ron +++ b/naga/tests/out/ir/overrides.ron @@ -214,4 +214,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/shadow.compact.ron b/naga/tests/out/ir/shadow.compact.ron index 24ab5edda3..a0403368ca 100644 --- a/naga/tests/out/ir/shadow.compact.ron +++ b/naga/tests/out/ir/shadow.compact.ron @@ -1032,4 +1032,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/shadow.ron b/naga/tests/out/ir/shadow.ron index f7b4b67dc2..797ccbb5a2 100644 --- a/naga/tests/out/ir/shadow.ron +++ b/naga/tests/out/ir/shadow.ron @@ -1310,4 +1310,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/spec-constants.compact.ron b/naga/tests/out/ir/spec-constants.compact.ron index d8658a413f..110e0dc4aa 100644 --- a/naga/tests/out/ir/spec-constants.compact.ron +++ b/naga/tests/out/ir/spec-constants.compact.ron @@ -614,4 +614,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/spec-constants.ron b/naga/tests/out/ir/spec-constants.ron index 407ce5f49e..e10030cc74 100644 --- a/naga/tests/out/ir/spec-constants.ron +++ b/naga/tests/out/ir/spec-constants.ron @@ -720,4 +720,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/storage-textures.compact.ron b/naga/tests/out/ir/storage-textures.compact.ron index 3f2f06439c..02e159b961 100644 --- a/naga/tests/out/ir/storage-textures.compact.ron +++ b/naga/tests/out/ir/storage-textures.compact.ron @@ -316,4 +316,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/ir/storage-textures.ron b/naga/tests/out/ir/storage-textures.ron index 3f2f06439c..02e159b961 100644 --- a/naga/tests/out/ir/storage-textures.ron +++ b/naga/tests/out/ir/storage-textures.ron @@ -316,4 +316,5 @@ ], diagnostic_filters: [], diagnostic_filter_leaf: None, + comments: None, ) \ No newline at end of file diff --git a/naga/tests/out/wgsl/types_with_comments.wgsl b/naga/tests/out/wgsl/types_with_comments.wgsl new file mode 100644 index 0000000000..5d69709ee6 --- /dev/null +++ b/naga/tests/out/wgsl/types_with_comments.wgsl @@ -0,0 +1,4 @@ +@group(0) @binding(0) +var mvp_matrix: mat4x4; +var w_mem: mat2x2; +