Skip to content

Commit

Permalink
cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
vcfxb committed Feb 18, 2024
1 parent a345cb7 commit 82187d3
Show file tree
Hide file tree
Showing 5 changed files with 27 additions and 20 deletions.
29 changes: 19 additions & 10 deletions wright/src/bin/wright.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@ use anyhow::Result;
use clap::{Parser, Subcommand};
use codespan_reporting::files::Files;
use std::path::PathBuf;
use wright::{filemap::{FileId, FileMap}, parser::lexer::{Lexer, Token}, repl};
use wright::{
filemap::{FileId, FileMap},
parser::lexer::{Lexer, Token},
repl,
};

/// The wright cli.
#[derive(Parser, Debug)]
Expand Down Expand Up @@ -50,21 +54,26 @@ fn main() -> Result<()> {
// Start an interactive repl.
Some(Commands::Repl) => repl::start(),

// Print all the tokens for a given file.
Some(Commands::Debug { command: DebugCommands::Tokens { file, pretty: false } }) => {
// Print all the tokens for a given file.
Some(Commands::Debug {
command:
DebugCommands::Tokens {
file,
pretty: false,
},
}) => {
let mut file_map: FileMap = FileMap::new();
// Add the given file to the file map.
// Add the given file to the file map.
let file_id: FileId = file_map.add_file(file)?;
// Make a lexer over the entirety of the given file.
// Use unwrap here, since we know we just added the file.
// Make a lexer over the entirety of the given file.
// Use unwrap here, since we know we just added the file.
let lexer: Lexer = Lexer::new(file_map.source(file_id).unwrap());
// Get all the tokens from the lexer and print them each.
// Get all the tokens from the lexer and print them each.
lexer.for_each(|token: Token| println!("{token:?}"));
// Return ok.
Ok(())
},
}


_ => unimplemented!()
_ => unimplemented!(),
}
}
2 changes: 1 addition & 1 deletion wright/src/filemap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ pub struct FileMap<'src> {
inner: Vec<SimpleFile<FileName, ImmutableString<'src>>>,
}

/// File Identifier used to refer to files.
/// File Identifier used to refer to files.
pub type FileId = <FileMap<'static> as Files<'static>>::FileId;

impl<'src> FileMap<'src> {
Expand Down
2 changes: 1 addition & 1 deletion wright/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@
// pub mod state;
// pub mod util;

pub mod ast;
pub mod fragment;
pub mod lexer;
pub mod ast;
4 changes: 1 addition & 3 deletions wright/src/parser/ast.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
//! Abstract syntax tree representation for Wright source code.

//! Abstract syntax tree representation for Wright source code.
10 changes: 5 additions & 5 deletions wright/src/parser/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@
//! defined for tokens.
use super::fragment::Fragment;
use derive_more::Display;
use std::iter::FusedIterator;
use std::str::Chars;
use std::{iter::Peekable, ptr};
use unicode_ident::{is_xid_continue, is_xid_start};
use derive_more::Display;

/// Constant table of single character tokens and the characters that match them.
pub const SINGLE_CHAR_TOKENS: &[(char, TokenTy)] = &[
Expand Down Expand Up @@ -431,13 +431,13 @@ impl<'src> Lexer<'src> {
}

// If we haven't matched at this point, produce a token marked as "Unknown".
// The unsafe is fine -- we know from above that there are remaining characters.
// The unsafe is fine -- we know from above that there are remaining characters.
let unknown_char = unsafe { self.remaining.chars().next().unwrap_unchecked() };
return Some(self.split_token(unknown_char.len_utf8(), TokenTy::Unknown));
}
}

/// Lexers can be considered token iterators.
/// Lexers can be considered token iterators.
impl<'src> Iterator for Lexer<'src> {
type Item = Token<'src>;

Expand All @@ -446,12 +446,12 @@ impl<'src> Iterator for Lexer<'src> {
}

fn size_hint(&self) -> (usize, Option<usize>) {
// Lexers cannot return multiple tokens for a single byte.
// Lexers cannot return multiple tokens for a single byte.
(0, Some(self.bytes_remaining()))
}
}

// Lexers are fused -- they cannot generate tokens infinitely.
// Lexers are fused -- they cannot generate tokens infinitely.
impl<'src> FusedIterator for Lexer<'src> {}

#[cfg(test)]
Expand Down

0 comments on commit 82187d3

Please # to comment.