feat: implement documentation comments
Add support for doc comments (/// syntax) that can be attached to declarations for documentation purposes. The implementation: - Adds DocComment token kind to lexer - Recognizes /// as doc comment syntax (distinct from // regular comments) - Parses consecutive doc comments and combines them into a single string - Adds doc field to FunctionDecl, TypeDecl, LetDecl, EffectDecl, TraitDecl - Passes doc comments through parser to declarations - Multiple consecutive doc comment lines are joined with newlines This enables documentation extraction and could be used for generating API docs, IDE hover information, and REPL help. Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
39
src/lexer.rs
39
src/lexer.rs
@@ -44,6 +44,9 @@ pub enum TokenKind {
|
||||
Impl, // impl (for trait implementations)
|
||||
For, // for (in impl Trait for Type)
|
||||
|
||||
// Documentation
|
||||
DocComment(String), // /// doc comment
|
||||
|
||||
// Behavioral type keywords
|
||||
Is, // is (for behavioral properties)
|
||||
Pure, // pure
|
||||
@@ -124,6 +127,7 @@ impl fmt::Display for TokenKind {
|
||||
TokenKind::Trait => write!(f, "trait"),
|
||||
TokenKind::Impl => write!(f, "impl"),
|
||||
TokenKind::For => write!(f, "for"),
|
||||
TokenKind::DocComment(s) => write!(f, "/// {}", s),
|
||||
TokenKind::Is => write!(f, "is"),
|
||||
TokenKind::Pure => write!(f, "pure"),
|
||||
TokenKind::Total => write!(f, "total"),
|
||||
@@ -268,9 +272,16 @@ impl<'a> Lexer<'a> {
|
||||
}
|
||||
'/' => {
|
||||
if self.peek() == Some('/') {
|
||||
// Line comment
|
||||
self.skip_line_comment();
|
||||
return self.next_token();
|
||||
self.advance(); // consume second '/'
|
||||
// Check if this is a doc comment (///)
|
||||
if self.peek() == Some('/') {
|
||||
self.advance(); // consume third '/'
|
||||
return Ok(self.scan_doc_comment(start));
|
||||
} else {
|
||||
// Regular line comment
|
||||
self.skip_line_comment();
|
||||
return self.next_token();
|
||||
}
|
||||
} else {
|
||||
TokenKind::Slash
|
||||
}
|
||||
@@ -411,6 +422,28 @@ impl<'a> Lexer<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_doc_comment(&mut self, start: usize) -> Token {
|
||||
// Skip leading whitespace after ///
|
||||
while self.peek() == Some(' ') || self.peek() == Some('\t') {
|
||||
self.advance();
|
||||
}
|
||||
|
||||
// Collect the rest of the line
|
||||
let mut content = String::new();
|
||||
while let Some(c) = self.peek() {
|
||||
if c == '\n' {
|
||||
break;
|
||||
}
|
||||
content.push(c);
|
||||
self.advance();
|
||||
}
|
||||
|
||||
Token::new(
|
||||
TokenKind::DocComment(content.trim_end().to_string()),
|
||||
Span::new(start, self.pos),
|
||||
)
|
||||
}
|
||||
|
||||
fn scan_string(&mut self, _start: usize) -> Result<TokenKind, LexError> {
|
||||
let mut value = String::new();
|
||||
loop {
|
||||
|
||||
Reference in New Issue
Block a user