diff options
| author | Gustav Sörnäs <gustav@sornas.net> | 2021-03-10 18:44:59 +0100 |
|---|---|---|
| committer | Gustav Sörnäs <gustav@sornas.net> | 2021-03-10 18:44:59 +0100 |
| commit | 0275d67aba60706ed6c1311af615e97653c751dc (patch) | |
| tree | b0085d348b517b41aa8cfe2ec04c7cd27eaec611 | |
| parent | 21b76633d149f62dbfdf55702dbdf8c84bf14105 (diff) | |
| download | sylt-main.tar.gz | |
handle file not founds everywheremain
| -rw-r--r-- | src/error.rs | 10 | ||||
| -rw-r--r-- | src/lib.rs | 2 | ||||
| -rw-r--r-- | src/main.rs | 9 | ||||
| -rw-r--r-- | src/sectionizer.rs | 31 | ||||
| -rw-r--r-- | src/tokenizer.rs | 4 |
5 files changed, 44 insertions, 12 deletions
diff --git a/src/error.rs b/src/error.rs index f967685..52bcc82 100644 --- a/src/error.rs +++ b/src/error.rs @@ -143,3 +143,13 @@ impl fmt::Display for Error { } } +impl Error { + pub fn new_nowhere(kind: ErrorKind, message: Option<String>) -> Self { + Self { + kind, + message, + file: PathBuf::from("!compiler!"), + line: 0, + } + } +} @@ -38,7 +38,7 @@ fn run(args: Args, functions: Vec<(String, RustFunction)>) -> Result<(), Vec<Err }]); } }; - let sections = sectionizer::sectionize(&path); + let sections = sectionizer::sectionize(&path)?; match compiler::Compiler::new(sections).compile("/preamble", &path, &functions) { Ok(prog) => { let mut vm = vm::VM::new(); diff --git a/src/main.rs b/src/main.rs index 23ababa..16446dd 100644 --- a/src/main.rs +++ b/src/main.rs @@ -2,16 +2,19 @@ use std::path::Path; use sylt::{run_file, Args}; -fn main() { +fn main() -> Result<(), String> { let args = parse_args(); + if args.file.is_none() { + return Err("No file to run".to_string()); + } let errs = match run_file(args, sylt_macro::link!(extern_test as test)) { Err(it) => it, - _ => return, + _ => return Ok(()), }; for err in errs.iter() { println!("{}", err); } - println!(" {} errors occured.", errs.len()); + Err(format!("{} errors occured.", errs.len())) } fn parse_args() -> Args { diff --git a/src/sectionizer.rs b/src/sectionizer.rs index 8c5e238..b66f388 100644 --- a/src/sectionizer.rs +++ b/src/sectionizer.rs @@ -1,3 +1,4 @@ +use crate::error::{Error, ErrorKind}; use crate::tokenizer::{PlacedToken, Token, file_to_tokens}; use std::collections::HashSet; @@ -19,12 +20,15 @@ impl Section { } } -pub fn sectionize(path: &Path) -> Vec<Section> { +pub fn sectionize(path: &Path) -> Result<Vec<Section>, Vec<Error>> { let mut read_files = HashSet::new(); read_files.insert(path.to_path_buf()); - let tokens = file_to_tokens(path); + let tokens = file_to_tokens(path).map_err(|_| vec![ + Error::new_nowhere(ErrorKind::FileNotFound(path.to_path_buf()), None) + ])?; let mut all_tokens = vec![(path.to_path_buf(), tokens)]; let mut sections = Vec::new(); + let mut errors = Vec::new(); let mut i = 0; while i < all_tokens.len() { @@ -44,12 +48,23 @@ pub fn sectionize(path: &Path) -> Vec<Section> { (Some((Token::Use, _)), Some((Token::Identifier(use_file), _)), - Some((Token::Newline, _))) => { + Some((Token::Newline, line))) => { let use_file: PathBuf = format!("{}.sy", use_file).into(); if !read_files.contains(&use_file) { - let use_file_tokens = file_to_tokens(&use_file); read_files.insert(use_file.clone()); - all_tokens.push((use_file, use_file_tokens)) + match file_to_tokens(&use_file) { + Ok(tokens) => { + all_tokens.push((use_file, tokens)) + } + Err(_) => { + errors.push(Error { + kind: ErrorKind::FileNotFound(use_file), + file: path.to_path_buf(), + line: *line, + message: None, + }); + } + } } true }, @@ -101,5 +116,9 @@ pub fn sectionize(path: &Path) -> Vec<Section> { } sections.push(Section::new(path.clone(), &tokens[last..curr])); } - sections + if errors.is_empty() { + Ok(sections) + } else { + Err(errors) + } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 8b06324..2203a6e 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -183,8 +183,8 @@ pub fn string_to_tokens(content: &str) -> TokenStream { lined_tokens } -pub fn file_to_tokens(file: &Path) -> TokenStream { - string_to_tokens(&fs::read_to_string(file).unwrap()) +pub fn file_to_tokens(file: &Path) -> Result<TokenStream, std::io::Error> { + Ok(string_to_tokens(&fs::read_to_string(file)?)) } #[cfg(test)] |
