From 474eb5d34a6803bf049c319f55e9f3d6407c5583 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Wed, 25 Sep 2024 20:36:03 -0700 Subject: [PATCH 01/21] fix: propagate errors This follows idiomatic rust [error handling] by using the [anyhow library]. [error handling]: https://doc.rust-lang.org/book/ch09-02-recoverable-errors-with-result.html [anyhow library]: https://crates.io/crates/anyhow --- Cargo.lock | 1 + cpp-linter/Cargo.toml | 1 + cpp-linter/src/clang_tools/clang_format.rs | 45 +- cpp-linter/src/clang_tools/clang_tidy.rs | 82 ++-- cpp-linter/src/clang_tools/mod.rs | 80 ++-- cpp-linter/src/cli/mod.rs | 45 +- cpp-linter/src/cli/structs.rs | 4 +- cpp-linter/src/common_fs/file_filter.rs | 22 +- cpp-linter/src/common_fs/mod.rs | 25 +- cpp-linter/src/git.rs | 9 +- cpp-linter/src/main.rs | 9 +- cpp-linter/src/rest_api/github/mod.rs | 144 +++--- .../src/rest_api/github/specific_api.rs | 420 ++++++++++-------- cpp-linter/src/rest_api/mod.rs | 237 +++++----- cpp-linter/src/run.rs | 96 ++-- cpp-linter/tests/comments.rs | 2 +- cpp-linter/tests/paginated_changed_files.rs | 7 +- cpp-linter/tests/reviews.rs | 2 +- cspell.config.yml | 2 + node-binding/src/lib.rs | 7 +- py-binding/src/lib.rs | 7 +- 21 files changed, 644 insertions(+), 603 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 280d638..731f5c9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -332,6 +332,7 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" name = "cpp-linter" version = "2.0.0-rc4" dependencies = [ + "anyhow", "chrono", "clap", "fast-glob", diff --git a/cpp-linter/Cargo.toml b/cpp-linter/Cargo.toml index fd07244..f0d6508 100644 --- a/cpp-linter/Cargo.toml +++ b/cpp-linter/Cargo.toml @@ -14,6 +14,7 @@ license.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +anyhow = "1.0.89" chrono = "0.4.38" clap = "4.5.17" fast-glob = "0.4.0" diff --git a/cpp-linter/src/clang_tools/clang_format.rs b/cpp-linter/src/clang_tools/clang_format.rs index ea87e0f..52b0480 100644 --- a/cpp-linter/src/clang_tools/clang_format.rs +++ b/cpp-linter/src/clang_tools/clang_format.rs @@ -6,6 +6,7 @@ use std::{ sync::{Arc, Mutex, MutexGuard}, }; +use anyhow::{Context, Result}; use log::Level; // non-std crates use serde::Deserialize; @@ -109,7 +110,7 @@ pub fn tally_format_advice(files: &[Arc>]) -> u64 { pub fn run_clang_format( file: &mut MutexGuard, clang_params: &ClangParams, -) -> Vec<(log::Level, String)> { +) -> Result> { let mut cmd = Command::new(clang_params.clang_format_command.as_ref().unwrap()); let mut logs = vec![]; cmd.args(["--style", &clang_params.style]); @@ -117,7 +118,8 @@ pub fn run_clang_format( for range in &ranges { cmd.arg(format!("--lines={}:{}", range.start(), range.end())); } - cmd.arg(file.name.to_string_lossy().as_ref()); + let file_name = file.name.to_string_lossy().to_string(); + cmd.arg(file.name.to_path_buf().as_os_str()); let mut patched = None; if clang_params.format_review { logs.push(( @@ -129,7 +131,7 @@ pub fn run_clang_format( .as_ref() .unwrap() .to_str() - .unwrap(), + .unwrap_or_default(), cmd.get_args() .map(|a| a.to_str().unwrap()) .collect::>() @@ -138,7 +140,7 @@ pub fn run_clang_format( )); patched = Some( cmd.output() - .expect("Failed to get fixes from clang-format") + .with_context(|| format!("Failed to get fixes from clang-format: {file_name}"))? .stdout, ); } @@ -147,33 +149,34 @@ pub fn run_clang_format( log::Level::Info, format!( "Running \"{} {}\"", - clang_params - .clang_format_command - .as_ref() - .unwrap() - .to_str() - .unwrap(), + cmd.get_program().to_string_lossy(), cmd.get_args() .map(|x| x.to_str().unwrap()) .collect::>() .join(" ") ), )); - let output = cmd.output().unwrap(); + let output = cmd + .output() + .with_context(|| format!("Failed to get replacements from clang-format: {file_name}"))?; if !output.stderr.is_empty() || !output.status.success() { - logs.push(( - log::Level::Debug, - format!( - "clang-format raised the follow errors:\n{}", - String::from_utf8(output.stderr).unwrap() - ), - )); + if let Ok(stderr) = String::from_utf8(output.stderr) { + logs.push(( + log::Level::Debug, + format!("clang-format raised the follow errors:\n{}", stderr), + )); + } else { + logs.push(( + log::Level::Error, + "stderr from clang-format was not UTF-8 encoded".to_string(), + )); + } } if output.stdout.is_empty() { - return logs; + return Ok(logs); } let xml = String::from_utf8(output.stdout) - .unwrap() + .with_context(|| format!("stdout from clang-format was not UTF-8 encoded: {file_name}"))? .lines() .collect::>() .join(""); @@ -208,7 +211,7 @@ pub fn run_clang_format( format_advice.replacements = filtered_replacements; } file.format_advice = Some(format_advice); - logs + Ok(logs) } #[cfg(test)] diff --git a/cpp-linter/src/clang_tools/clang_tidy.rs b/cpp-linter/src/clang_tools/clang_tidy.rs index f276e82..46bd6a6 100644 --- a/cpp-linter/src/clang_tools/clang_tidy.rs +++ b/cpp-linter/src/clang_tools/clang_tidy.rs @@ -9,6 +9,7 @@ use std::{ sync::{Arc, Mutex, MutexGuard}, }; +use anyhow::{Context, Result}; // non-std crates use regex::Regex; use serde::Deserialize; @@ -132,7 +133,7 @@ impl MakeSuggestions for TidyAdvice { fn parse_tidy_output( tidy_stdout: &[u8], database_json: &Option>, -) -> Option { +) -> Result> { let note_header = Regex::new(r"^(.+):(\d+):(\d+):\s(\w+):(.*)\[([a-zA-Z\d\-\.]+)\]$").unwrap(); let fixed_note = Regex::new(r"^.+:(\d+):\d+:\snote: FIX-IT applied suggested code changes$").unwrap(); @@ -178,14 +179,15 @@ fn parse_tidy_output( // likely not a member of the project's sources (ie /usr/include/stdio.h) filename = filename .strip_prefix(&cur_dir) - .expect("cannot determine filename by relative path.") + // we already checked above that filename.starts_with(current_directory) + .unwrap() .to_path_buf(); } notification = Some(TidyNotification { filename: filename.to_string_lossy().to_string().replace('\\', "/"), - line: captured[2].parse::().unwrap(), - cols: captured[3].parse::().unwrap(), + line: captured[2].parse()?, + cols: captured[3].parse()?, severity: String::from(&captured[4]), rationale: String::from(&captured[5]).trim().to_string(), diagnostic: String::from(&captured[6]), @@ -195,9 +197,7 @@ fn parse_tidy_output( // begin capturing subsequent lines as suggestions found_fix = false; } else if let Some(capture) = fixed_note.captures(line) { - let fixed_line = capture[1] - .parse() - .expect("Failed to parse fixed line number's string as integer"); + let fixed_line = capture[1].parse()?; if let Some(note) = &mut notification { if !note.fixed_lines.contains(&fixed_line) { note.fixed_lines.push(fixed_line); @@ -219,12 +219,12 @@ fn parse_tidy_output( result.push(note); } if result.is_empty() { - None + Ok(None) } else { - Some(TidyAdvice { + Ok(Some(TidyAdvice { notes: result, patched: None, - }) + })) } } @@ -249,7 +249,7 @@ pub fn tally_tidy_advice(files: &[Arc>]) -> u64 { pub fn run_clang_tidy( file: &mut MutexGuard, clang_params: &ClangParams, -) -> Vec<(log::Level, std::string::String)> { +) -> Result> { let mut cmd = Command::new(clang_params.clang_tidy_command.as_ref().unwrap()); let mut logs = vec![]; if !clang_params.tidy_checks.is_empty() { @@ -258,19 +258,15 @@ pub fn run_clang_tidy( if let Some(db) = &clang_params.database { cmd.args(["-p", &db.to_string_lossy()]); } - if let Some(extras) = &clang_params.extra_args { - for arg in extras { - cmd.args(["--extra-arg", format!("\"{}\"", arg).as_str()]); - } + for arg in &clang_params.extra_args { + cmd.args(["--extra-arg", format!("\"{}\"", arg).as_str()]); } + let file_name = file.name.to_string_lossy().to_string(); if clang_params.lines_changed_only != LinesChangedOnly::Off { let ranges = file.get_ranges(&clang_params.lines_changed_only); let filter = format!( "[{{\"name\":{:?},\"lines\":{:?}}}]", - &file - .name - .to_string_lossy() - .replace('/', if OS == "windows" { "\\" } else { "/" }), + &file_name.replace('/', if OS == "windows" { "\\" } else { "/" }), ranges .iter() .map(|r| [r.start(), r.end()]) @@ -281,10 +277,12 @@ pub fn run_clang_tidy( let mut original_content = None; if clang_params.tidy_review { cmd.arg("--fix-errors"); - original_content = - Some(fs::read_to_string(&file.name).expect( - "Failed to cache file's original content before applying clang-tidy changes.", - )); + original_content = Some(fs::read_to_string(&file.name).with_context(|| { + format!( + "Failed to cache file's original content before applying clang-tidy changes: {}", + file_name.clone() + ) + })?); } if !clang_params.style.is_empty() { cmd.args(["--format-style", clang_params.style.as_str()]); @@ -310,29 +308,32 @@ pub fn run_clang_tidy( ), )); if !output.stderr.is_empty() { - logs.push(( - log::Level::Debug, - format!( - "clang-tidy made the following summary:\n{}", - String::from_utf8(output.stderr).unwrap() - ), - )); + if let Ok(stderr) = String::from_utf8(output.stderr) { + logs.push(( + log::Level::Debug, + format!("clang-tidy made the following summary:\n{}", stderr), + )); + } else { + logs.push(( + log::Level::Error, + "clang-tidy stderr is not UTF-8 encoded".to_string(), + )); + } } - file.tidy_advice = parse_tidy_output(&output.stdout, &clang_params.database_json); + file.tidy_advice = parse_tidy_output(&output.stdout, &clang_params.database_json)?; if clang_params.tidy_review { - let file_name = &file.name.to_owned(); if let Some(tidy_advice) = &mut file.tidy_advice { // cache file changes in a buffer and restore the original contents for further analysis by clang-format tidy_advice.patched = - Some(fs::read(file_name).expect("Failed to read changes from clang-tidy")); + Some(fs::read(&file_name).with_context(|| { + format!("Failed to read changes from clang-tidy: {file_name}") + })?); } - fs::write( - file_name, - original_content.expect("original content of file was not cached"), - ) - .expect("failed to restore file's original content."); + // original_content is guaranteed to be Some() value at this point + fs::write(&file_name, original_content.unwrap()) + .with_context(|| format!("Failed to restore file's original content: {file_name}"))?; } - logs + Ok(logs) } #[cfg(test)] @@ -427,7 +428,7 @@ mod test { tidy_checks: "".to_string(), // use .clang-tidy config file lines_changed_only: LinesChangedOnly::Off, database: None, - extra_args: Some(extra_args.clone()), // <---- the reason for this test + extra_args: extra_args.clone(), // <---- the reason for this test database_json: None, format_filter: None, tidy_filter: None, @@ -438,6 +439,7 @@ mod test { }; let mut file_lock = arc_ref.lock().unwrap(); let logs = run_clang_tidy(&mut file_lock, &clang_params) + .unwrap() .into_iter() .filter_map(|(_lvl, msg)| { if msg.contains("Running ") { diff --git a/cpp-linter/src/clang_tools/mod.rs b/cpp-linter/src/clang_tools/mod.rs index 87efba9..0cf7fdd 100644 --- a/cpp-linter/src/clang_tools/mod.rs +++ b/cpp-linter/src/clang_tools/mod.rs @@ -9,6 +9,7 @@ use std::{ sync::{Arc, Mutex}, }; +use anyhow::{anyhow, Context, Result}; use git2::{DiffOptions, Patch}; // non-std crates use lenient_semver; @@ -41,14 +42,14 @@ use clang_tidy::{run_clang_tidy, CompilationUnit}; /// /// The only reason this function would return an error is if the specified tool is not /// installed or present on the system (nor in the `$PATH` environment variable). -pub fn get_clang_tool_exe(name: &str, version: &str) -> Result { +pub fn get_clang_tool_exe(name: &str, version: &str) -> Result { if version.is_empty() { // The default CLI value is an empty string. // Thus, we should use whatever is installed and added to $PATH. if let Ok(cmd) = which(name) { return Ok(cmd); } else { - return Err("Could not find clang tool by name"); + return Err(anyhow!("Could not find clang tool by name")); } } if let Ok(semver) = lenient_semver::parse_into::(version) { @@ -66,14 +67,14 @@ pub fn get_clang_tool_exe(name: &str, version: &str) -> Result Result>, clang_params: Arc, -) -> (PathBuf, Vec<(log::Level, String)>) { - let mut file = file.lock().unwrap(); +) -> Result<(PathBuf, Vec<(log::Level, String)>)> { + let mut file = file + .lock() + .map_err(|_| anyhow!("File mutex is already locked!"))?; let mut logs = vec![]; if clang_params.clang_tidy_command.is_some() { if clang_params @@ -99,7 +102,7 @@ fn analyze_single_file( .is_some_and(|f| f.is_source_or_ignored(file.name.as_path())) || clang_params.tidy_filter.is_none() { - let tidy_result = run_clang_tidy(&mut file, &clang_params); + let tidy_result = run_clang_tidy(&mut file, &clang_params)?; logs.extend(tidy_result); } else { logs.push(( @@ -108,7 +111,7 @@ fn analyze_single_file( "{} not scanned by clang-tidy due to `--ignore-tidy`", file.name.as_os_str().to_string_lossy() ), - )) + )); } } if clang_params.clang_format_command.is_some() { @@ -118,7 +121,7 @@ fn analyze_single_file( .is_some_and(|f| f.is_source_or_ignored(file.name.as_path())) || clang_params.format_filter.is_none() { - let format_result = run_clang_format(&mut file, &clang_params); + let format_result = run_clang_format(&mut file, &clang_params)?; logs.extend(format_result); } else { logs.push(( @@ -130,7 +133,7 @@ fn analyze_single_file( )); } } - (file.name.clone(), logs) + Ok((file.name.clone(), logs)) } /// Runs clang-tidy and/or clang-format and returns the parsed output from each. @@ -141,31 +144,27 @@ pub async fn capture_clang_tools_output( files: &mut Vec>>, version: &str, clang_params: &mut ClangParams, -) { +) -> Result<()> { // find the executable paths for clang-tidy and/or clang-format and show version // info as debugging output. if clang_params.tidy_checks != "-*" { clang_params.clang_tidy_command = { - let cmd = get_clang_tool_exe("clang-tidy", version).unwrap(); + let cmd = get_clang_tool_exe("clang-tidy", version)?; log::debug!( "{} --version\n{}", &cmd.to_string_lossy(), - String::from_utf8_lossy( - &Command::new(&cmd).arg("--version").output().unwrap().stdout - ) + String::from_utf8_lossy(&Command::new(&cmd).arg("--version").output()?.stdout) ); Some(cmd) } }; if !clang_params.style.is_empty() { clang_params.clang_format_command = { - let cmd = get_clang_tool_exe("clang-format", version).unwrap(); + let cmd = get_clang_tool_exe("clang-format", version)?; log::debug!( "{} --version\n{}", &cmd.to_string_lossy(), - String::from_utf8_lossy( - &Command::new(&cmd).arg("--version").output().unwrap().stdout - ) + String::from_utf8_lossy(&Command::new(&cmd).arg("--version").output()?.stdout) ); Some(cmd) } @@ -176,9 +175,10 @@ pub async fn capture_clang_tools_output( if let Ok(db_str) = fs::read(db_path.join("compile_commands.json")) { clang_params.database_json = Some( serde_json::from_str::>( + // A compilation database should be UTF-8 encoded. Its safe to `unwrap()` String::from_utf8(db_str).unwrap().as_str(), ) - .expect("Failed to parse compile_commands.json"), + .with_context(|| "Failed to parse compile_commands.json")?, ) } }; @@ -192,7 +192,7 @@ pub async fn capture_clang_tools_output( } while let Some(output) = executors.join_next().await { - if let Ok(out) = output { + if let Ok(out) = output? { let (file_name, logs) = out; start_log_group(format!("Analyzing {}", file_name.to_string_lossy())); for (level, msg) in logs { @@ -201,6 +201,7 @@ pub async fn capture_clang_tools_output( end_log_group(); } } + Ok(()) } /// A struct to describe a single suggestion in a pull_request review. @@ -298,7 +299,7 @@ pub fn make_patch<'buffer>( path: &Path, patched: &'buffer [u8], original_content: &'buffer [u8], -) -> Patch<'buffer> { +) -> Result> { let mut diff_opts = &mut DiffOptions::new(); diff_opts = diff_opts.indent_heuristic(true); diff_opts = diff_opts.context_lines(0); @@ -309,14 +310,13 @@ pub fn make_patch<'buffer>( Some(path), Some(diff_opts), ) - .unwrap_or_else(|_| { - panic!( + .with_context(|| { + format!( "Failed to create patch for file {}.", - path.to_str() - .expect("Failed to convert file's path to string.") + path.to_string_lossy() ) - }); - patch + })?; + Ok(patch) } pub trait MakeSuggestions { @@ -333,32 +333,33 @@ pub trait MakeSuggestions { file_obj: &FileObj, patch: &mut Patch, summary_only: bool, - ) { + ) -> Result<()> { let tool_name = self.get_tool_name(); let is_tidy_tool = tool_name == "clang-tidy"; let hunks_total = patch.num_hunks(); let mut hunks_in_patch = 0u32; let file_name = file_obj .name - .to_str() - .expect("Failed to convert file path to string") + .to_string_lossy() .replace("\\", "/") .trim_start_matches("./") .to_owned(); let patch_buf = &patch .to_buf() - .expect("Failed to convert patch to byte array") + .with_context(|| "Failed to convert patch to byte array")? .to_vec(); review_comments.full_patch[is_tidy_tool as usize].push_str( String::from_utf8(patch_buf.to_owned()) - .expect("Failed to convert patch buffer to string") + .with_context(|| format!("Failed to convert patch to string: {file_name}"))? .as_str(), ); if summary_only { - return; + return Ok(()); } for hunk_id in 0..hunks_total { - let (hunk, line_count) = patch.hunk(hunk_id).expect("Failed to get hunk from patch"); + let (hunk, line_count) = patch.hunk(hunk_id).with_context(|| { + format!("Failed to get hunk {hunk_id} from patch for {file_name}") + })?; hunks_in_patch += 1; let hunk_range = file_obj.is_hunk_in_diff(&hunk); if hunk_range.is_none() { @@ -371,16 +372,16 @@ pub trait MakeSuggestions { for line_index in 0..line_count { let diff_line = patch .line_in_hunk(hunk_id, line_index) - .expect("Failed to get line in a hunk"); + .with_context(|| format!("Failed to get line {line_index} in a hunk {hunk_id} of patch for {file_name}"))?; let line = String::from_utf8(diff_line.content().to_owned()) - .expect("Failed to convert line buffer to string"); + .with_context(|| format!("Failed to convert line {line_index} buffer to string in hunk {hunk_id} of patch for {file_name}"))?; if ['+', ' '].contains(&diff_line.origin()) { suggestion.push_str(line.as_str()); } else { removed.push( diff_line .old_lineno() - .expect("Removed line has no line number?!"), + .expect("Removed line should have a line number"), ); } } @@ -397,7 +398,7 @@ pub trait MakeSuggestions { .as_str(), ) } else { - suggestion = format!("```suggestion\n{suggestion}```",); + suggestion = format!("```suggestion\n{suggestion}```"); } let comment = Suggestion { line_start: start_line, @@ -410,6 +411,7 @@ pub trait MakeSuggestions { } } review_comments.tool_total[is_tidy_tool as usize] += hunks_in_patch; + Ok(()) } } diff --git a/cpp-linter/src/cli/mod.rs b/cpp-linter/src/cli/mod.rs index f2d4e87..dd04101 100644 --- a/cpp-linter/src/cli/mod.rs +++ b/cpp-linter/src/cli/mod.rs @@ -366,29 +366,21 @@ Further filtering can still be applied (see [Source options](#source-options))." /// ``` /// The cpp-linter-action (for Github CI workflows) can only use 1 `extra-arg` input option, so /// the value will be split at spaces. -pub fn convert_extra_arg_val(args: &ArgMatches) -> Option> { - let raw_val = args - .try_get_many::("extra-arg") - .expect("parser failed in set a default for `--extra-arg`"); - if let Some(mut val) = raw_val { - if val.len() == 1 { - // specified once; split and return result - return Some( - val.next() - .unwrap() - .trim_matches('\'') - .trim_matches('"') - .split(' ') - .map(|i| i.to_string()) - .collect(), - ); - } else { - // specified multiple times; just return - Some(val.map(|i| i.to_string()).collect()) - } +pub fn convert_extra_arg_val(args: &ArgMatches) -> Vec { + let mut val = args.get_many::("extra-arg").unwrap_or_default(); + if val.len() == 1 { + // specified once; split and return result + return val + .next() + .unwrap() + .trim_matches('\'') + .trim_matches('"') + .split(' ') + .map(|i| i.to_string()) + .collect(); } else { - // no value specified; just return - None + // specified multiple times; just return + val.map(|i| i.to_string()).collect() } } @@ -407,14 +399,13 @@ mod test { fn extra_arg_0() { let args = parser_args(vec!["cpp-linter"]); let extras = convert_extra_arg_val(&args); - assert!(extras.is_none()); + assert!(extras.is_empty()); } #[test] fn extra_arg_1() { let args = parser_args(vec!["cpp-linter", "--extra-arg='-std=c++17 -Wall'"]); - let extras = convert_extra_arg_val(&args); - let extra_args = extras.expect("extra-arg not parsed properly"); + let extra_args = convert_extra_arg_val(&args); assert_eq!(extra_args.len(), 2); assert_eq!(extra_args, ["-std=c++17", "-Wall"]) } @@ -426,9 +417,7 @@ mod test { "--extra-arg=-std=c++17", "--extra-arg=-Wall", ]); - let extras = convert_extra_arg_val(&args); - assert!(extras.is_some()); - let extra_args = extras.expect("extra-arg not parsed properly"); + let extra_args = convert_extra_arg_val(&args); assert_eq!(extra_args.len(), 2); assert_eq!(extra_args, ["-std=c++17", "-Wall"]) } diff --git a/cpp-linter/src/cli/structs.rs b/cpp-linter/src/cli/structs.rs index 650f0ca..5bc876b 100644 --- a/cpp-linter/src/cli/structs.rs +++ b/cpp-linter/src/cli/structs.rs @@ -51,7 +51,7 @@ pub struct Cli { pub ignore_tidy: Option>, pub tidy_checks: String, pub database: Option, - pub extra_arg: Option>, + pub extra_arg: Vec, pub thread_comments: ThreadComments, pub no_lgtm: bool, pub step_summary: bool, @@ -161,7 +161,7 @@ pub struct ClangParams { pub tidy_checks: String, pub lines_changed_only: LinesChangedOnly, pub database: Option, - pub extra_args: Option>, + pub extra_args: Vec, pub database_json: Option>, pub style: String, pub clang_tidy_command: Option, diff --git a/cpp-linter/src/common_fs/file_filter.rs b/cpp-linter/src/common_fs/file_filter.rs index b6eff2f..88a5521 100644 --- a/cpp-linter/src/common_fs/file_filter.rs +++ b/cpp-linter/src/common_fs/file_filter.rs @@ -1,10 +1,10 @@ +use anyhow::{anyhow, Context, Result}; +use fast_glob::glob_match; use std::{ fs, path::{Path, PathBuf}, }; -use fast_glob::glob_match; - use super::FileObj; #[derive(Debug, Clone)] @@ -150,21 +150,23 @@ impl FileFilter { /// - uses at least 1 of the given `extensions` /// - is not specified in the internal list of `ignored` paths /// - is specified in the internal list `not_ignored` paths (which supersedes `ignored` paths) - pub fn list_source_files(&self, root_path: &str) -> Vec { + pub fn list_source_files(&self, root_path: &str) -> Result> { let mut files: Vec = Vec::new(); - let entries = fs::read_dir(root_path).expect("repo root-path should exist"); + let entries = fs::read_dir(root_path) + .with_context(|| format!("Failed to read directory contents: {root_path}"))?; for entry in entries.filter_map(|p| p.ok()) { let path = entry.path(); if path.is_dir() { let mut is_hidden = false; - let parent = path.components().last().expect("parent not known"); + let parent = path + .components() + .last() + .ok_or(anyhow!("parent directory not known for {path:?}"))?; if parent.as_os_str().to_str().unwrap().starts_with('.') { is_hidden = true; } if !is_hidden { - files.extend( - self.list_source_files(&path.into_os_string().into_string().unwrap()), - ); + files.extend(self.list_source_files(&path.to_string_lossy())?); } } else { let is_valid_src = self.is_source_or_ignored(&path); @@ -175,7 +177,7 @@ impl FileFilter { } } } - files + Ok(files) } } @@ -255,7 +257,7 @@ mod tests { fn walk_dir_recursively() { let extensions = vec!["cpp".to_string(), "hpp".to_string()]; let file_filter = setup_ignore("target", extensions.clone()); - let files = file_filter.list_source_files("."); + let files = file_filter.list_source_files(".").unwrap(); assert!(!files.is_empty()); for file in files { assert!(extensions.contains( diff --git a/cpp-linter/src/common_fs/mod.rs b/cpp-linter/src/common_fs/mod.rs index 253d028..b35dcb2 100644 --- a/cpp-linter/src/common_fs/mod.rs +++ b/cpp-linter/src/common_fs/mod.rs @@ -6,6 +6,8 @@ use std::io::Read; use std::path::{Component, Path}; use std::{ops::RangeInclusive, path::PathBuf}; +use anyhow::{Context, Result}; + use crate::clang_tools::clang_format::FormatAdvice; use crate::clang_tools::clang_tidy::TidyAdvice; use crate::clang_tools::{make_patch, MakeSuggestions, ReviewComments, Suggestion}; @@ -127,30 +129,26 @@ impl FileObj { &self, review_comments: &mut ReviewComments, summary_only: bool, - ) { + ) -> Result<()> { let original_content = - fs::read(&self.name).expect("Failed to read original contents of file"); - let file_name = self - .name - .to_str() - .expect("Failed to convert file extension to string") - .replace("\\", "/"); + fs::read(&self.name).with_context(|| "Failed to read original contents of file")?; + let file_name = self.name.to_str().unwrap_or_default().replace("\\", "/"); let file_path = Path::new(&file_name); if let Some(advice) = &self.format_advice { if let Some(patched) = &advice.patched { - let mut patch = make_patch(file_path, patched, &original_content); - advice.get_suggestions(review_comments, self, &mut patch, summary_only); + let mut patch = make_patch(file_path, patched, &original_content)?; + advice.get_suggestions(review_comments, self, &mut patch, summary_only)?; } } if let Some(advice) = &self.tidy_advice { if let Some(patched) = &advice.patched { - let mut patch = make_patch(file_path, patched, &original_content); - advice.get_suggestions(review_comments, self, &mut patch, summary_only); + let mut patch = make_patch(file_path, patched, &original_content)?; + advice.get_suggestions(review_comments, self, &mut patch, summary_only)?; } if summary_only { - return; + return Ok(()); } // now check for clang-tidy warnings with no fixes applied @@ -159,7 +157,7 @@ impl FileObj { .extension() .unwrap_or_default() .to_str() - .expect("Failed to convert file extension to string"); + .unwrap_or_default(); for note in &advice.notes { if note.fixed_lines.is_empty() { // notification had no suggestion applied in `patched` @@ -200,6 +198,7 @@ impl FileObj { } } } + Ok(()) } } diff --git a/cpp-linter/src/git.rs b/cpp-linter/src/git.rs index e98486d..b3e0cf5 100644 --- a/cpp-linter/src/git.rs +++ b/cpp-linter/src/git.rs @@ -10,6 +10,7 @@ use std::{ops::RangeInclusive, path::PathBuf}; +use anyhow::{Context, Result}; // non-std crates use git2::{Diff, Error, Patch, Repository}; @@ -45,7 +46,7 @@ fn get_sha(repo: &Repository, depth: Option) -> Result, Er /// If there are files staged for a commit, then the resulting [`Diff`] will describe /// the staged changes. However, if there are no staged changes, then the last commit's /// [`Diff`] is returned. -pub fn get_diff(repo: &Repository) -> git2::Diff { +pub fn get_diff(repo: &Repository) -> Result { let head = get_sha(repo, None).unwrap().peel_to_tree().unwrap(); let mut has_staged_files = false; for entry in repo.statuses(None).unwrap().iter() { @@ -63,12 +64,12 @@ pub fn get_diff(repo: &Repository) -> git2::Diff { if has_staged_files { // get diff for staged files only repo.diff_tree_to_index(Some(&head), None, None) - .expect("Could not get diff for current changes in local repo index") + .with_context(|| "Could not get diff for current changes in local repo index") } else { // get diff for last commit only let base = get_sha(repo, Some(1)).unwrap().peel_to_tree().unwrap(); repo.diff_tree_to_tree(Some(&base), Some(&head), None) - .expect("could not get diff for last commit") + .with_context(|| "Could not get diff for last commit") } } @@ -407,8 +408,10 @@ mod test { set_current_dir(tmp).unwrap(); env::set_var("CI", "false"); // avoid use of REST API when testing in CI rest_api_client + .unwrap() .get_list_of_changed_files(&file_filter) .await + .unwrap() } #[tokio::test] diff --git a/cpp-linter/src/main.rs b/cpp-linter/src/main.rs index cd8ebe8..fcc4012 100644 --- a/cpp-linter/src/main.rs +++ b/cpp-linter/src/main.rs @@ -1,11 +1,14 @@ #![cfg(not(test))] /// This crate is the binary executable's entrypoint. -use std::env; +use std::{env, process::ExitCode}; use ::cpp_linter::run::run_main; +use anyhow::Result; /// This function simply forwards CLI args to [`run_main()`]. #[tokio::main] -pub async fn main() { - run_main(env::args().collect::>()).await; +pub async fn main() -> Result { + Ok(ExitCode::from( + run_main(env::args().collect::>()).await?, + )) } diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index 7d516e9..d40f08e 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -9,6 +9,7 @@ use std::io::Write; use std::sync::{Arc, Mutex}; // non-std crates +use anyhow::{anyhow, Context, Result}; use reqwest::{ header::{HeaderMap, HeaderValue, AUTHORIZATION}, Client, Method, Url, @@ -63,23 +64,23 @@ impl RestApiClient for GithubApiClient { tidy_checks_failed: Option, ) -> u64 { if let Ok(gh_out) = env::var("GITHUB_OUTPUT") { - let mut gh_out_file = OpenOptions::new() - .append(true) - .open(gh_out) - .expect("GITHUB_OUTPUT file could not be opened"); - for (prompt, value) in [ - ("checks-failed", Some(checks_failed)), - ("format-checks-failed", format_checks_failed), - ("tidy-checks-failed", tidy_checks_failed), - ] { - if let Err(e) = writeln!(gh_out_file, "{prompt}={}", value.unwrap_or(0),) { - log::error!("Could not write to GITHUB_OUTPUT file: {}", e); - break; + if let Ok(mut gh_out_file) = OpenOptions::new().append(true).open(gh_out) { + for (prompt, value) in [ + ("checks-failed", Some(checks_failed)), + ("format-checks-failed", format_checks_failed), + ("tidy-checks-failed", tidy_checks_failed), + ] { + if let Err(e) = writeln!(gh_out_file, "{prompt}={}", value.unwrap_or(0),) { + log::error!("Could not write to GITHUB_OUTPUT file: {}", e); + break; + } + } + if let Err(e) = gh_out_file.flush() { + log::debug!("Failed to flush buffer to GITHUB_OUTPUT file: {e:?}"); } + } else { + log::debug!("GITHUB_OUTPUT file could not be opened"); } - gh_out_file - .flush() - .expect("Failed to flush buffer to GITHUB_OUTPUT file"); } log::info!( "{} clang-format-checks-failed", @@ -93,24 +94,22 @@ impl RestApiClient for GithubApiClient { checks_failed } - fn make_headers() -> HeaderMap { + fn make_headers() -> Result> { let mut headers = HeaderMap::new(); headers.insert( "Accept", - HeaderValue::from_str("application/vnd.github.raw+json") - .expect("Failed to create a header value for the API return data type"), + HeaderValue::from_str("application/vnd.github.raw+json")?, ); // headers.insert("User-Agent", USER_AGENT.parse().unwrap()); if let Ok(token) = env::var("GITHUB_TOKEN") { - let mut val = HeaderValue::from_str(token.as_str()) - .expect("Failed to create a secure header value for the API token."); + let mut val = HeaderValue::from_str(token.as_str())?; val.set_sensitive(true); headers.insert(AUTHORIZATION, val); } - headers + Ok(headers) } - async fn get_list_of_changed_files(&self, file_filter: &FileFilter) -> Vec { + async fn get_list_of_changed_files(&self, file_filter: &FileFilter) -> Result> { if env::var("CI").is_ok_and(|val| val.as_str() == "true") && self.repo.is_some() && self.sha.is_some() @@ -121,39 +120,33 @@ impl RestApiClient for GithubApiClient { let sha = self.sha.clone().unwrap(); let url = self .api_url - .join("repos/") - .unwrap() - .join(format!("{}/", self.repo.as_ref().unwrap()).as_str()) - .unwrap() - .join(if is_pr { "pulls/" } else { "commits/" }) - .unwrap() - .join(if is_pr { pr.as_str() } else { sha.as_str() }) - .unwrap(); + .join("repos/")? + .join(format!("{}/", self.repo.as_ref().unwrap()).as_str())? + .join(if is_pr { "pulls/" } else { "commits/" })? + .join(if is_pr { pr.as_str() } else { sha.as_str() })?; let mut diff_header = HeaderMap::new(); - diff_header.insert("Accept", "application/vnd.github.diff".parse().unwrap()); + diff_header.insert("Accept", "application/vnd.github.diff".parse()?); let request = Self::make_api_request( &self.client, url.as_str(), Method::GET, None, Some(diff_header), - ); - let response = Self::send_api_request( + )?; + match Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.to_owned(), 0, ) - .await; - match response { - Some(response) => { - if response.status.is_success() { - return parse_diff_from_buf(response.text.as_bytes(), file_filter); + .await + { + Ok(response) => { + if response.status().is_success() { + Ok(parse_diff_from_buf(&response.bytes().await?, file_filter)) } else { let endpoint = if is_pr { - Url::parse(format!("{}/files", url.as_str()).as_str()) - .expect("failed to parse URL endpoint") + Url::parse(format!("{}/files", url.as_str()).as_str())? } else { url }; @@ -161,16 +154,18 @@ impl RestApiClient for GithubApiClient { .await } } - None => { - panic!("Failed to connect with GitHub server to get list of changed files.") - } + Err(e) => Err(anyhow!( + "Failed to connect with GitHub server to get list of changed files." + ) + .context(e)), } } else { // get diff from libgit2 API - let repo = open_repo(".") - .expect("Please ensure the repository is checked out before running cpp-linter."); - let list = parse_diff(&get_diff(&repo), file_filter); - list + let repo = open_repo(".").with_context(|| { + "Please ensure the repository is checked out before running cpp-linter." + })?; + let list = parse_diff(&get_diff(&repo)?, file_filter); + Ok(list) } } @@ -178,30 +173,32 @@ impl RestApiClient for GithubApiClient { &self, url: Url, file_filter: &FileFilter, - ) -> Vec { - let mut url = Some(Url::parse_with_params(url.as_str(), &[("page", "1")]).unwrap()); + ) -> Result> { + let mut url = Some(Url::parse_with_params(url.as_str(), &[("page", "1")])?); let mut files = vec![]; while let Some(ref endpoint) = url { let request = - Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None); + Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; let response = Self::send_api_request( self.client.clone(), request, - true, self.rate_limit_headers.clone(), 0, ) .await; - if let Some(response) = response { - url = Self::try_next_page(&response.headers); + if let Ok(response) = response { + url = Self::try_next_page(response.headers()); let files_list = if self.event_name != "pull_request" { - let json_value: PushEventFiles = serde_json::from_str(&response.text) - .expect("Failed to deserialize list of changed files from json response"); + let json_value: PushEventFiles = serde_json::from_str(&response.text().await?) + .with_context(|| { + "Failed to deserialize list of changed files from json response" + })?; json_value.files } else { - serde_json::from_str::>(&response.text).expect( - "Failed to deserialize list of file changes from Pull Request event.", - ) + serde_json::from_str::>(&response.text().await?) + .with_context(|| { + "Failed to deserialize list of file changes from Pull Request event." + })? }; for file in files_list { if let Some(patch) = file.patch { @@ -219,14 +216,14 @@ impl RestApiClient for GithubApiClient { } } } - files + Ok(files) } async fn post_feedback( &self, files: &[Arc>], feedback_inputs: FeedbackInput, - ) -> u64 { + ) -> Result { let tidy_checks_failed = tally_tidy_advice(files); let format_checks_failed = tally_format_advice(files); let mut comment = None; @@ -279,15 +276,15 @@ impl RestApiClient for GithubApiClient { format_checks_failed + tidy_checks_failed == 0, feedback_inputs.thread_comments == ThreadComments::Update, ) - .await; + .await?; } } if self.event_name == "pull_request" && (feedback_inputs.tidy_review || feedback_inputs.format_review) { - self.post_review(files, &feedback_inputs).await; + self.post_review(files, &feedback_inputs).await?; } - format_checks_failed + tidy_checks_failed + Ok(format_checks_failed + tidy_checks_failed) } } @@ -319,7 +316,7 @@ mod test { async fn create_comment(tidy_checks: &str, style: &str) -> (String, String) { let tmp_dir = tempdir().unwrap(); - let rest_api_client = GithubApiClient::default(); + let rest_api_client = GithubApiClient::new().unwrap(); if env::var("ACTIONS_STEP_DEBUG").is_ok_and(|var| var == "true") { assert!(rest_api_client.debug_enabled); } @@ -337,7 +334,8 @@ mod test { env::var("CLANG-VERSION").unwrap_or("".to_string()).as_str(), &mut clang_params, ) - .await; + .await + .unwrap(); let feedback_inputs = FeedbackInput { style: style.to_string(), step_summary: true, @@ -347,7 +345,10 @@ mod test { env::set_var("GITHUB_STEP_SUMMARY", step_summary_path.path()); let mut gh_out_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); env::set_var("GITHUB_OUTPUT", gh_out_path.path()); - rest_api_client.post_feedback(&files, feedback_inputs).await; + rest_api_client + .post_feedback(&files, feedback_inputs) + .await + .unwrap(); let mut step_summary_content = String::new(); step_summary_path .read_to_string(&mut step_summary_content) @@ -393,7 +394,7 @@ mod test { let mut server = Server::new_async().await; let url = Url::parse(server.url().as_str()).unwrap(); env::set_var("GITHUB_API_URL", server.url()); - let client = GithubApiClient::default(); + let client = GithubApiClient::new().unwrap(); let reset_timestamp = (Utc::now().timestamp() + 60).to_string(); let mock = server .mock("GET", "/") @@ -413,15 +414,16 @@ mod test { mock.create(); } let request = - GithubApiClient::make_api_request(&client.client, url, Method::GET, None, None); + GithubApiClient::make_api_request(&client.client, url, Method::GET, None, None) + .unwrap(); GithubApiClient::send_api_request( client.client.clone(), request, - true, client.rate_limit_headers.clone(), 0, ) - .await; + .await + .unwrap(); } #[tokio::test] diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 4d1abb0..11225c9 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -8,6 +8,7 @@ use std::{ sync::{Arc, Mutex}, }; +use anyhow::{Context, Result}; use reqwest::{Client, Method, Url}; use crate::{ @@ -22,49 +23,43 @@ use super::{ GithubApiClient, RestApiClient, }; -impl Default for GithubApiClient { - fn default() -> Self { - Self::new() - } -} - impl GithubApiClient { /// Instantiate a [`GithubApiClient`] object. - pub fn new() -> Self { + pub fn new() -> Result { let event_name = env::var("GITHUB_EVENT_NAME").unwrap_or(String::from("unknown")); let pull_request = { match event_name.as_str() { "pull_request" => { - let event_payload_path = env::var("GITHUB_EVENT_PATH") - .expect("GITHUB_EVENT_NAME is set to 'pull_request', but GITHUB_EVENT_PATH is not set"); + let event_payload_path = env::var("GITHUB_EVENT_PATH").with_context(|| { + "GITHUB_EVENT_NAME is set to 'pull_request', but GITHUB_EVENT_PATH is not set" + })?; let file_buf = &mut String::new(); OpenOptions::new() .read(true) - .open(event_payload_path) - .unwrap() + .open(event_payload_path.clone())? .read_to_string(file_buf) - .unwrap(); - let json = serde_json::from_str::>( - file_buf, - ) - .unwrap(); - json["number"].as_i64() + .with_context(|| { + format!("Failed to read event payload at {event_payload_path}") + })?; + let payload = + serde_json::from_str::>( + file_buf, + ) + .with_context(|| "Failed to deserialize event payload")?; + payload["number"].as_i64() } _ => None, } }; - let api_url = Url::parse( - env::var("GITHUB_API_URL") - .unwrap_or("https://api.github.com".to_string()) - .as_str(), - ) - .expect("Failed to parse URL from GITHUB_API_URL"); + let gh_api_url = env::var("GITHUB_API_URL").unwrap_or("https://api.github.com".to_string()); + let api_url = Url::parse(gh_api_url.clone().as_str()) + .with_context(|| format!("Failed to parse URL from GITHUB_API_URL: {}", gh_api_url))?; - GithubApiClient { + Ok(GithubApiClient { client: Client::builder() - .default_headers(Self::make_headers()) + .default_headers(Self::make_headers()?) .build() - .expect("Failed to create a session client for REST API calls"), + .with_context(|| "Failed to create a session client for REST API calls")?, pull_request, event_name, api_url, @@ -85,19 +80,18 @@ impl GithubApiClient { remaining: "x-ratelimit-remaining".to_string(), retry: "retry-after".to_string(), }, - } + }) } /// Append step summary to CI workflow's summary page. pub fn post_step_summary(&self, comment: &String) { if let Ok(gh_out) = env::var("GITHUB_STEP_SUMMARY") { - let mut gh_out_file = OpenOptions::new() - .append(true) - .open(gh_out) - .expect("GITHUB_STEP_SUMMARY file could not be opened"); - if let Err(e) = writeln!(gh_out_file, "\n{}\n", comment) { - log::error!("Could not write to GITHUB_STEP_SUMMARY file: {}", e); + if let Ok(mut gh_out_file) = OpenOptions::new().append(true).open(gh_out) { + if let Err(e) = writeln!(gh_out_file, "\n{}\n", comment) { + log::error!("Could not write to GITHUB_STEP_SUMMARY file: {}", e); + } } + log::error!("GITHUB_STEP_SUMMARY file could not be opened"); } } @@ -157,12 +151,11 @@ impl GithubApiClient { no_lgtm: bool, is_lgtm: bool, update_only: bool, - ) { + ) -> Result<()> { let comment_url = self .remove_bot_comments(&url, !update_only || (is_lgtm && no_lgtm)) - .await; - #[allow(clippy::nonminimal_bool)] // an inaccurate assessment - if (is_lgtm && !no_lgtm) || !is_lgtm { + .await?; + if !is_lgtm || !no_lgtm { let payload = HashMap::from([("body", comment.to_owned())]); // log::debug!("payload body:\n{:?}", payload); let req_meth = if comment_url.is_some() { @@ -180,160 +173,198 @@ impl GithubApiClient { req_meth, Some( serde_json::to_string(&payload) - .expect("Failed to serialize thread comment to json string"), + .with_context(|| "Failed to serialize thread comment to json string")?, ), None, - ); - Self::send_api_request( + )?; + match Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.to_owned(), 0, ) - .await; + .await + { + Ok(response) => { + if let Err(e) = response.error_for_status_ref() { + log::error!("Failed to post thread comment: {e:?}"); + if let Ok(text) = response.text().await { + log::error!("{text}"); + } + } + } + Err(e) => { + log::error!("Failed to post thread comment: {e:?}"); + } + } } + Ok(()) } /// Remove thread comments previously posted by cpp-linter. - async fn remove_bot_comments(&self, url: &Url, delete: bool) -> Option { + async fn remove_bot_comments(&self, url: &Url, delete: bool) -> Result> { let mut comment_url = None; let mut comments_url = Some( - Url::parse_with_params(url.as_str(), &[("page", "1")]) - .expect("Failed to parse invalid URL string"), + Url::parse_with_params(url.clone().as_str(), &[("page", "1")]).with_context(|| { + format!("Failed to parse URL to fetch existing comments: {url}") + })?, ); let repo = format!( "repos/{}/comments/", + // if we got here, then we know it is on a CI runner as self.repo should be known self.repo.as_ref().expect("Repo name unknown.") ); let base_comment_url = self.api_url.join(&repo).unwrap(); while let Some(ref endpoint) = comments_url { let request = - Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None); - let response = Self::send_api_request( + Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; + match Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.to_owned(), 0, ) - .await; - if response.is_none() || response.as_ref().is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to get list of existing comments from {}", endpoint); - return comment_url; - } - comments_url = Self::try_next_page(&response.as_ref().unwrap().headers); - let payload: Vec = serde_json::from_str(&response.unwrap().text) - .expect("Failed to serialize response's text"); - for comment in payload { - if comment.body.starts_with(COMMENT_MARKER) { - log::debug!( - "comment id {} from user {} ({})", - comment.id, - comment.user.login, - comment.user.id, - ); - #[allow(clippy::nonminimal_bool)] // an inaccurate assessment - if delete || (!delete && comment_url.is_some()) { - // if not updating: remove all outdated comments - // if updating: remove all outdated comments except the last one - - // use last saved comment_url (if not None) or current comment url - let del_url = if let Some(last_url) = &comment_url { - last_url - } else { - let comment_id = comment.id.to_string(); - &base_comment_url - .join(&comment_id) - .expect("Failed to parse URL from JSON comment.url") - }; - let req = Self::make_api_request( - &self.client, - del_url.clone(), - Method::DELETE, - None, - None, - ); - Self::send_api_request( - self.client.clone(), - req, - false, - self.rate_limit_headers.to_owned(), - 0, - ) - .await; + .await + { + Ok(response) => { + if let Err(e) = response.error_for_status_ref() { + log::error!("Failed to get list of existing thread comments: {e:?}"); + if let Ok(text) = response.text().await { + log::error!("{text}"); + } + return Ok(comment_url); } - if !delete { - let comment_id = comment.id.to_string(); - comment_url = Some( - base_comment_url - .join(&comment_id) - .expect("Failed to parse URL from JSON comment.url"), - ) + comments_url = Self::try_next_page(response.headers()); + let payload: Vec = serde_json::from_str(&response.text().await?) + .with_context(|| { + "Failed to deserialize list of existing thread comments" + })?; + for comment in payload { + if comment.body.starts_with(COMMENT_MARKER) { + log::debug!( + "comment id {} from user {} ({})", + comment.id, + comment.user.login, + comment.user.id, + ); + let this_comment_url = base_comment_url + .join(&comment.id.to_string()) + .with_context(|| { + format!("Failed to parse URL for JSON comment.id: {}", comment.id) + })?; + if delete || comment_url.is_some() { + // if not updating: remove all outdated comments + // if updating: remove all outdated comments except the last one + + // use last saved comment_url (if not None) or current comment url + let del_url = if let Some(last_url) = &comment_url { + last_url + } else { + &this_comment_url + }; + let req = Self::make_api_request( + &self.client, + del_url.clone(), + Method::DELETE, + None, + None, + )?; + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.to_owned(), + 0, + ) + .await + { + Ok(result) => { + if let Err(e) = result.error_for_status_ref() { + log::error!( + "Failed to delete old thread comment {e:?}" + ); + if let Ok(text) = result.text().await { + log::error!("{text}"); + } + } + } + Err(e) => { + log::error!("Failed to delete old thread comment: {e:?}") + } + } + } + if !delete { + comment_url = Some(this_comment_url) + } + } } } + Err(e) => { + log::error!("Failed to get list of existing thread comments: {e:?}"); + return Ok(comment_url); + } } } - comment_url + Ok(comment_url) } /// Post a PR review with code suggestions. /// /// Note: `--no-lgtm` is applied when nothing is suggested. - pub async fn post_review(&self, files: &[Arc>], feedback_input: &FeedbackInput) { + pub async fn post_review( + &self, + files: &[Arc>], + feedback_input: &FeedbackInput, + ) -> Result<()> { let url = self .api_url - .join("repos/") - .unwrap() - .join(format!("{}/", self.repo.as_ref().expect("Repo name unknown")).as_str()) - .unwrap() - .join("pulls/") - .unwrap() + .join("repos/")? + // if we got here, then we know it is on a CI runner and self.repo should be known + .join(format!("{}/", self.repo.as_ref().expect("Repo name unknown")).as_str())? + .join("pulls/")? .join( self.pull_request + // if we got here, then we know that it is a pull_request event .expect("pull request number unknown") .to_string() .as_str(), - ) - .unwrap(); - let request = Self::make_api_request(&self.client, url.as_str(), Method::GET, None, None); + )?; + let request = Self::make_api_request(&self.client, url.as_str(), Method::GET, None, None)?; let response = Self::send_api_request( self.client.clone(), request, - true, self.rate_limit_headers.clone(), 0, ) - .await; - let pr_info: PullRequestInfo = - serde_json::from_str(&response.expect("Failed to get PR info").text) - .expect("Failed to deserialize PR info"); + .await + .with_context(|| "Failed to get PR info")?; + let pr_info: PullRequestInfo = serde_json::from_str(&response.text().await?) + .with_context(|| "Failed to deserialize PR info")?; - let url = Url::parse(format!("{}/", url.as_str()).as_str()) - .unwrap() + let url = Url::parse(format!("{}/", url.as_str()).as_str())? .join("reviews") - .expect("Failed to parse URL endpoint for PR reviews"); + .with_context(|| "Failed to parse URL endpoint for PR reviews")?; let dismissal = self.dismiss_outdated_reviews(&url); if pr_info.draft || pr_info.state != "open" { - dismissal.await; - return; + return dismissal.await; } - let summary_only = - env::var("CPP_LINTER_PR_REVIEW_SUMMARY_ONLY").unwrap_or("false".to_string()) == "true"; + let summary_only = ["true", "on", "1"].contains( + &env::var("CPP_LINTER_PR_REVIEW_SUMMARY_ONLY") + .unwrap_or("false".to_string()) + .as_str(), + ); let mut review_comments = ReviewComments::default(); for file in files { let file = file.lock().unwrap(); - file.make_suggestions_from_patch(&mut review_comments, summary_only); + file.make_suggestions_from_patch(&mut review_comments, summary_only)?; } let has_no_changes = review_comments.full_patch[0].is_empty() && review_comments.full_patch[1].is_empty(); if has_no_changes && feedback_input.no_lgtm { log::debug!("Not posting an approved review because `no-lgtm` is true"); - dismissal.await; - return; + return dismissal.await; } let mut payload = FullReview { event: if feedback_input.passive_reviews { @@ -356,93 +387,128 @@ impl GithubApiClient { comments }; } - dismissal.await; // free up the `url` variable + dismissal.await?; // free up the `url` variable let request = Self::make_api_request( &self.client, url, Method::POST, Some( serde_json::to_string(&payload) - .expect("Failed to serialize PR review to json string"), + .with_context(|| "Failed to serialize PR review to json string")?, ), None, - ); - let response = Self::send_api_request( + )?; + match Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.clone(), 0, ) - .await; - if response.is_none() || response.is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to post a new PR review"); + .await + { + Ok(response) => { + if let Err(e) = response.error_for_status_ref() { + log::error!("Failed to post a new PR review: {e:?}"); + if let Ok(text) = response.text().await { + log::error!("{text}"); + } + } + } + Err(e) => { + log::error!("Failed to post a new PR review: {e:?}"); + } } + Ok(()) } /// Dismiss any outdated reviews generated by cpp-linter. - async fn dismiss_outdated_reviews(&self, url: &Url) { + async fn dismiss_outdated_reviews(&self, url: &Url) -> Result<()> { let mut url_ = Some( - Url::parse_with_params(url.as_str(), [("page", "1")]) - .expect("Failed to parse endpoint for getting existing PR reviews"), + Url::parse_with_params(url.as_str(), [("page", "1")]).with_context(|| { + format!( + "Failed to parse endpoint for getting existing PR reviews: {}", + url.as_str() + ) + })?, ); while let Some(ref endpoint) = url_ { let request = - Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None); + Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; let response = Self::send_api_request( self.client.clone(), request, - false, self.rate_limit_headers.clone(), 0, ) .await; - if response.is_none() || response.as_ref().is_some_and(|r| !r.status.is_success()) { + if response.is_err() || response.as_ref().is_ok_and(|r| !r.status().is_success()) { log::error!("Failed to get a list of existing PR reviews"); - return; + return Ok(()); } let response = response.unwrap(); - url_ = Self::try_next_page(&response.headers); - let payload: Vec = serde_json::from_str(&response.text) - .expect("Unable to deserialize malformed JSON about review comments"); - for review in payload { - if let Some(body) = &review.body { - if body.starts_with(COMMENT_MARKER) - && !(["PENDING", "DISMISSED"].contains(&review.state.as_str())) - { - // dismiss outdated review - let req = Self::make_api_request( - &self.client, - url.join("reviews/") - .unwrap() - .join(review.id.to_string().as_str()) - .expect("Failed to parse URL for dismissing outdated review."), - Method::PUT, - Some( - serde_json::json!( - { - "message": "outdated suggestion", - "event": "DISMISS" + url_ = Self::try_next_page(response.headers()); + match serde_json::from_str::>(&response.text().await?) { + Ok(payload) => { + for review in payload { + if let Some(body) = &review.body { + if body.starts_with(COMMENT_MARKER) + && !(["PENDING", "DISMISSED"].contains(&review.state.as_str())) + { + // dismiss outdated review + match url.join("reviews/")?.join(review.id.to_string().as_str()) { + Ok(dismiss_url) => { + if let Ok(req) = Self::make_api_request( + &self.client, + dismiss_url, + Method::PUT, + Some( + serde_json::json!( + { + "message": "outdated suggestion", + "event": "DISMISS" + } + ) + .to_string(), + ), + None, + ) { + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.clone(), + 0, + ) + .await + { + Ok(result) => { + if let Err(e) = result.error_for_status_ref() { + log::error!("Failed to dismiss outdated review: {e:?}"); + if let Ok(text) = result.text().await { + log::error!("{text}"); + } + } + } + Err(e) => { + log::error!( + "Failed to dismiss outdated review: {e:}" + ); + } + } + } } - ) - .to_string(), - ), - None, - ); - let result = Self::send_api_request( - self.client.clone(), - req, - false, - self.rate_limit_headers.clone(), - 0, - ) - .await; - if result.is_none() || result.is_some_and(|r| !r.status.is_success()) { - log::error!("Failed to dismiss outdated review"); + Err(e) => { + log::error!("Failed to parse URL for dismissing outdated review: {e:?}"); + } + } + } } } } + Err(e) => { + log::error!("Unable to deserialize malformed JSON about review comments: {e:?}") + } } } + Ok(()) } } diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index b0059cd..700674a 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -3,16 +3,18 @@ //! //! Currently, only Github is supported. +use std::fmt::Debug; use std::future::Future; use std::path::PathBuf; use std::sync::{Arc, Mutex}; use std::time::Duration; // non-std crates +use anyhow::{anyhow, Context, Error, Result}; use chrono::DateTime; use futures::future::{BoxFuture, FutureExt}; use reqwest::header::{HeaderMap, HeaderValue}; -use reqwest::{Client, IntoUrl, Method, Request, Response, StatusCode, Url}; +use reqwest::{Client, IntoUrl, Method, Request, Response, Url}; // project specific modules pub mod github; @@ -34,28 +36,6 @@ pub struct RestApiRateLimitHeaders { pub retry: String, } -pub struct CachedResponse { - pub text: String, - pub headers: HeaderMap, - pub status: StatusCode, -} - -impl CachedResponse { - async fn from(response: Response) -> Self { - let headers = response.headers().to_owned(); - let status = response.status(); - let text = response - .text() - .await - .expect("Failed to decode response body to text."); - Self { - text, - headers, - status, - } - } -} - /// A custom trait that templates necessary functionality with a Git server's REST API. pub trait RestApiClient { /// A way to set output variables specific to cpp_linter executions in CI. @@ -70,7 +50,7 @@ pub trait RestApiClient { /// /// If an authentication token is provided (via environment variable), /// this method shall include the relative information. - fn make_headers() -> HeaderMap; + fn make_headers() -> Result>; /// Construct a HTTP request to be sent. /// @@ -100,7 +80,8 @@ pub trait RestApiClient { method: Method, data: Option, headers: Option, - ) -> Request { + ) -> Result { + let url_str = url.as_str().to_string(); let mut req = client.request(method, url); if let Some(h) = headers { req = req.headers(h); @@ -108,7 +89,8 @@ pub trait RestApiClient { if let Some(d) = data { req = req.body(d); } - req.build().expect("Failed to create a HTTP request") + req.build() + .with_context(|| format!("Failed to build request to {url_str}")) } /// A convenience function to send HTTP requests and respect a REST API rate limits. @@ -116,109 +98,93 @@ pub trait RestApiClient { /// This method must own all the data passed to it because asynchronous recursion is used. /// Recursion is needed when a secondary rate limit is hit. The server tells the client that /// it should back off and retry after a specified time interval. - /// - /// Setting the `strict` parameter to true will panic when the HTTP request fails to send or - /// the HTTP response's status code does not describe success. This should only be used for - /// requests that are vital to the app operation. - /// With `strict` as true, the returned type is guaranteed to be a [`Some`] value. - /// If `strict` is false, then a failure to send the request is returned as a [`None`] value, - /// and a [`Some`] value could also indicate if the server failed to process the request. fn send_api_request( client: Client, request: Request, - strict: bool, rate_limit_headers: RestApiRateLimitHeaders, retries: u64, - ) -> BoxFuture<'static, Option> { + ) -> BoxFuture<'static, Result> { async move { - match client - .execute( - request - .try_clone() - .expect("Could not clone HTTP request object"), - ) - .await - { - Ok(response) => { - if [403u16, 429u16].contains(&response.status().as_u16()) { - // rate limit exceeded - - // check if primary rate limit was violated; panic if so. - let remaining = response - .headers() - .get(&rate_limit_headers.remaining) - .expect("Response headers do not include remaining API usage count") - .to_str() - .expect("Failed to extract remaining attempts about rate limit") - .parse::() - .expect("Failed to parse i64 from remaining attempts about rate limit"); - if remaining <= 0 { - let reset = DateTime::from_timestamp( - response - .headers() - .get(&rate_limit_headers.reset) - .expect("response headers does not include a reset timestamp") - .to_str() - .expect("Failed to extract reset info about rate limit") - .parse::() - .expect("Failed to parse i64 from reset time about rate limit"), - 0, - ) - .expect("rate limit reset UTC timestamp is an invalid"); - panic!("REST API rate limit exceeded! Resets at {}", reset); - } + let result = client + .execute(request.try_clone().ok_or(anyhow!( + "Failed to clone request object for recursive behavior" + ))?) + .await; + if let Ok(response) = &result { + if [403u16, 429u16].contains(&response.status().as_u16()) { + // rate limit exceeded - // check if secondary rate limit is violated; backoff and try again. - if retries > 4 { - panic!("REST API secondary rate limit exceeded"); - } - if let Some(retry) = response.headers().get(&rate_limit_headers.retry) { - let interval = Duration::from_secs( - retry - .to_str() - .expect("Failed to extract retry interval about rate limit") - .parse::() - .expect( - "Failed to parse u64 from retry interval about rate limit", - ) - + retries.pow(2), - ); - tokio::time::sleep(interval).await; - return Self::send_api_request( - client, - request, - strict, - rate_limit_headers, - retries + 1, - ) - .await; + // check if primary rate limit was violated; panic if so. + let mut requests_remaining = None; + if let Some(remaining) = response.headers().get(&rate_limit_headers.remaining) { + if let Ok(count) = remaining.to_str() { + if let Ok(value) = count.parse::() { + requests_remaining = Some(value); + } else { + log::debug!( + "Failed to parse i64 from remaining attempts about rate limit: {count}" + ); + } + } else { + log::debug!("Failed to extract remaining attempts about rate limit: {remaining:?}"); } + } else { + log::debug!("Response headers do not include remaining API usage count"); } - let cached_response = CachedResponse::from(response).await; - if !cached_response.status.is_success() { - let summary = format!( - "Got {} response from {} request to {}:\n{}", - cached_response.status.as_u16(), - request.method().as_str(), - request.url().as_str(), - cached_response.text, - ); - if strict { - panic!("{summary}"); + if requests_remaining.is_some_and(|v| v <= 0) { + if let Some(reset_value) = response.headers().get(&rate_limit_headers.reset) + { + if let Ok(epoch) = reset_value.to_str() { + if let Ok(value) = epoch.parse::() { + if let Some(reset) = DateTime::from_timestamp(value, 0) { + return Err(anyhow!( + "REST API rate limit exceeded! Resets at {}", + reset + )); + } else { + log::debug!("Rate limit reset UTC timestamp is an invalid: {value}"); + } + } else { + log::debug!( + "Failed to parse i64 from reset time about rate limit: {epoch}" + ); + } + } else { + log::debug!("Failed to extract reset info about rate limit: {reset_value:?}"); + } } else { - log::error!("{summary}"); + log::debug!("Response headers does not include a reset timestamp"); } } - Some(cached_response) - } - Err(e) => { - if strict { - panic!("Failed to complete the HTTP request.\n{}", e); - } else { - None + + // check if secondary rate limit is violated; backoff and try again. + if retries > 4 { + return Err(anyhow!("REST API secondary rate limit exceeded")); + } + if let Some(retry_value) = response.headers().get(&rate_limit_headers.retry) { + if let Ok(retry_str) = retry_value.to_str() { + if let Ok(retry) = retry_str.parse::() { + let interval = Duration::from_secs(retry + retries.pow(2)); + tokio::time::sleep(interval).await; + } else { + log::debug!( + "Failed to parse u64 from retry interval about rate limit: {retry_str}" + ); + } + } else { + log::debug!("Failed to extract retry interval about rate limit: {retry_value:?}"); + } + return Self::send_api_request( + client, + request, + rate_limit_headers, + retries + 1, + ) + .await; } } } + result.map_err(Error::from) } .boxed() } @@ -231,7 +197,7 @@ pub trait RestApiClient { fn get_list_of_changed_files( &self, file_filter: &FileFilter, - ) -> impl Future>; + ) -> impl Future>>; /// A way to get the list of changed files using REST API calls that employ a paginated response. /// @@ -241,7 +207,7 @@ pub trait RestApiClient { &self, url: Url, file_filter: &FileFilter, - ) -> impl Future>; + ) -> impl Future>>; /// Makes a comment in MarkDown syntax based on the concerns in `format_advice` and /// `tidy_advice` about the given set of `files`. @@ -303,30 +269,31 @@ pub trait RestApiClient { &self, files: &[Arc>], user_inputs: FeedbackInput, - ) -> impl Future; + ) -> impl Future>; /// Gets the URL for the next page in a paginated response. /// /// Returns [`None`] if current response is the last page. fn try_next_page(headers: &HeaderMap) -> Option { - if headers.contains_key("link") { - let pages = headers["link"] - .to_str() - .expect("Failed to convert header value of links to a str") - .split(", "); - for page in pages { - if page.ends_with("; rel=\"next\"") { - let url = page - .split_once(">;") - .expect("header link for pagination is malformed") - .0 - .trim_start_matches("<") - .to_string(); - return Some( - Url::parse(&url) - .expect("Failed to parse next page link from response header"), - ); + if let Some(links) = headers.get("link") { + if let Ok(pg_str) = links.to_str() { + let pages = pg_str.split(", "); + for page in pages { + if page.ends_with("; rel=\"next\"") { + if let Some(link) = page.split_once(">;") { + let url = link.0.trim_start_matches("<").to_string(); + if let Ok(next) = Url::parse(&url) { + return Some(next); + } else { + log::debug!("Failed to parse next page link from response header"); + } + } else { + log::debug!("Response header link for pagination is malformed"); + } + } } + } else { + log::debug!("Failed to convert header value of links to a str"); } } None @@ -390,7 +357,7 @@ fn make_tidy_comment( rationale = tidy_note.rationale, concerned_code = if tidy_note.suggestion.is_empty() {String::from("")} else { format!("\n ```{ext}\n {suggestion}\n ```\n", - ext = file_path.extension().expect("file extension was not determined").to_string_lossy(), + ext = file_path.extension().unwrap_or_default().to_string_lossy(), suggestion = tidy_note.suggestion.join("\n "), ).to_string() }, diff --git a/cpp-linter/src/run.rs b/cpp-linter/src/run.rs index c2bcb0c..3167015 100644 --- a/cpp-linter/src/run.rs +++ b/cpp-linter/src/run.rs @@ -8,6 +8,7 @@ use std::path::Path; use std::sync::{Arc, Mutex}; // non-std crates +use anyhow::Result; use log::{set_max_level, LevelFilter}; #[cfg(feature = "openssl-vendored")] use openssl_probe; @@ -21,14 +22,11 @@ use crate::rest_api::{github::GithubApiClient, RestApiClient}; const VERSION: &str = env!("CARGO_PKG_VERSION"); -#[cfg(feature = "openssl-vendored")] fn probe_ssl_certs() { + #[cfg(feature = "openssl-vendored")] openssl_probe::init_ssl_cert_env_vars(); } -#[cfg(not(feature = "openssl-vendored"))] -fn probe_ssl_certs() {} - /// This is the backend entry point for console applications. /// /// The idea here is that all functionality is implemented in Rust. However, passing @@ -43,7 +41,7 @@ fn probe_ssl_certs() {} /// is used instead of python's `sys.argv`, then the list of strings includes the entry point /// alias ("path/to/cpp-linter.exe"). Thus, the parser in [`crate::cli`] will halt on an error /// because it is not configured to handle positional arguments. -pub async fn run_main(args: Vec) -> i32 { +pub async fn run_main(args: Vec) -> Result { probe_ssl_certs(); let arg_parser = get_arg_parser(); @@ -52,7 +50,7 @@ pub async fn run_main(args: Vec) -> i32 { if args.subcommand_matches("version").is_some() { println!("cpp-linter v{}", VERSION); - return 0; + return Ok(0); } logger::init().unwrap(); @@ -60,7 +58,7 @@ pub async fn run_main(args: Vec) -> i32 { if cli.version == "NO-VERSION" { log::error!("The `--version` arg is used to specify which version of clang to use."); log::error!("To get the cpp-linter version, use `cpp-linter version` sub-command."); - return 1; + return Ok(1); } if cli.repo_root != "." { @@ -68,7 +66,7 @@ pub async fn run_main(args: Vec) -> i32 { .unwrap_or_else(|_| panic!("'{}' is inaccessible or does not exist", cli.repo_root)); } - let rest_api_client = GithubApiClient::new(); + let rest_api_client = GithubApiClient::new()?; set_max_level(if cli.verbosity || rest_api_client.debug_enabled { LevelFilter::Debug } else { @@ -100,14 +98,14 @@ pub async fn run_main(args: Vec) -> i32 { // parse_diff(github_rest_api_payload) rest_api_client .get_list_of_changed_files(&file_filter) - .await + .await? } else { // walk the folder and look for files with specified extensions according to ignore values. - let mut all_files = file_filter.list_source_files("."); + let mut all_files = file_filter.list_source_files(".")?; if rest_api_client.event_name == "pull_request" && (cli.tidy_review || cli.format_review) { let changed_files = rest_api_client .get_list_of_changed_files(&file_filter) - .await; + .await?; for changed_file in changed_files { for file in &mut all_files { if changed_file.name == file.name { @@ -130,14 +128,16 @@ pub async fn run_main(args: Vec) -> i32 { let mut clang_params = ClangParams::from(&cli); let user_inputs = FeedbackInput::from(&cli); - capture_clang_tools_output(&mut arc_files, cli.version.as_str(), &mut clang_params).await; + capture_clang_tools_output(&mut arc_files, cli.version.as_str(), &mut clang_params).await?; start_log_group(String::from("Posting feedback")); - let checks_failed = rest_api_client.post_feedback(&arc_files, user_inputs).await; + let checks_failed = rest_api_client + .post_feedback(&arc_files, user_inputs) + .await?; end_log_group(); if env::var("PRE_COMMIT").is_ok_and(|v| v == "1") { - return (checks_failed > 1) as i32; + return Ok((checks_failed > 1) as u8); } - 0 + Ok(0) } #[cfg(test)] @@ -148,57 +148,49 @@ mod test { #[tokio::test] async fn run() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec![ - "cpp-linter".to_string(), - "-l".to_string(), - "false".to_string(), - "--repo-root".to_string(), - "tests".to_string(), - "demo/demo.cpp".to_string(), - ]) - .await, - 0 - ); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "--repo-root".to_string(), + "tests".to_string(), + "demo/demo.cpp".to_string(), + ]) + .await; + assert!(result.is_ok_and(|v| v == 0)); } #[tokio::test] async fn run_version_command() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await, - 0 - ); + let result = run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await; + assert!(result.is_ok_and(|v| v == 0)); } #[tokio::test] async fn run_force_debug_output() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec![ - "cpp-linter".to_string(), - "-l".to_string(), - "false".to_string(), - "-v".to_string(), - "debug".to_string(), - ]) - .await, - 0 - ); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "-v".to_string(), + "debug".to_string(), + ]) + .await; + assert!(result.is_ok_and(|v| v == 0)); } #[tokio::test] async fn run_bad_version_input() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests - assert_eq!( - run_main(vec![ - "cpp-linter".to_string(), - "-l".to_string(), - "false".to_string(), - "-V".to_string() - ]) - .await, - 1 - ); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "-V".to_string(), + ]) + .await; + assert!(result.is_ok_and(|v| v == 1)); } } diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index e4856b9..305a3c8 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -223,7 +223,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { args.push("-e=c".to_string()); } let result = run_main(args).await; - assert_eq!(result, 0); + assert!(result.is_ok_and(|v| v == 0)); for mock in mocks { mock.assert(); } diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index 32d8abd..d1f388a 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -55,7 +55,7 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { let mut server = mock_server().await; env::set_var("GITHUB_API_URL", server.url()); env::set_current_dir(tmp.path()).unwrap(); - let gh_client = GithubApiClient::new(); + let gh_client = GithubApiClient::new().unwrap(); let mut mocks = vec![]; let diff_end_point = format!( @@ -109,7 +109,10 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { } let file_filter = FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]); - let files = gh_client.get_list_of_changed_files(&file_filter).await; + let files = gh_client + .get_list_of_changed_files(&file_filter) + .await + .unwrap(); assert_eq!(files.len(), 2); for file in files { assert!(["src/demo.cpp", "src/demo.hpp"].contains( diff --git a/cpp-linter/tests/reviews.rs b/cpp-linter/tests/reviews.rs index 9d8a2fe..d227983 100644 --- a/cpp-linter/tests/reviews.rs +++ b/cpp-linter/tests/reviews.rs @@ -203,7 +203,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { args.push("-e=c".to_string()); } let result = run_main(args).await; - assert_eq!(result, 0); + assert!(result.is_ok_and(|v| v == 0)); for mock in mocks { mock.assert(); } diff --git a/cspell.config.yml b/cspell.config.yml index d5a1fc0..16da1ff 100644 --- a/cspell.config.yml +++ b/cspell.config.yml @@ -1,6 +1,7 @@ version: "0.2" language: en words: + - bindgen - binstall - bndy - Boolish @@ -10,6 +11,7 @@ words: - consts - cppcoreguidelines - cstdio + - cstdlib - Doherty - endfor - endgroup diff --git a/node-binding/src/lib.rs b/node-binding/src/lib.rs index 3cb83c0..51832ea 100644 --- a/node-binding/src/lib.rs +++ b/node-binding/src/lib.rs @@ -1,8 +1,11 @@ #[macro_use] extern crate napi_derive; use ::cpp_linter::run::run_main; +use napi::bindgen_prelude::*; #[napi] -pub async fn main(args: Vec) -> i32 { - run_main(args).await +pub async fn main(args: Vec) -> Result { + run_main(args) + .await + .map_err(|e| Error::new(Status::GenericFailure, e.to_string())) } diff --git a/py-binding/src/lib.rs b/py-binding/src/lib.rs index dbe62a0..479345f 100644 --- a/py-binding/src/lib.rs +++ b/py-binding/src/lib.rs @@ -1,16 +1,17 @@ -use pyo3::prelude::*; +use pyo3::{exceptions::PyOSError, prelude::*}; use tokio::runtime::Builder; use ::cpp_linter::run::run_main; /// A wrapper for the ``::cpp_linter::run::run_main()``` #[pyfunction] -fn main(args: Vec) -> PyResult { +fn main(args: Vec) -> PyResult { Builder::new_multi_thread() .enable_all() .build() .unwrap() - .block_on(async { Ok(run_main(args).await) }) + .block_on(async { run_main(args).await }) + .map_err(|e| PyOSError::new_err(e.to_string())) } /// The python binding for the cpp_linter package. It only exposes a ``main()`` function From fcb343f9b43cc6ae738289e1b31f6d94a97449a7 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Thu, 26 Sep 2024 05:28:43 -0700 Subject: [PATCH 02/21] setup non-system python in CI's sdist build job --- .github/workflows/python-packaging.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/python-packaging.yml b/.github/workflows/python-packaging.yml index 7a501d0..297693c 100644 --- a/.github/workflows/python-packaging.yml +++ b/.github/workflows/python-packaging.yml @@ -155,6 +155,9 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: 3.x - name: Build sdist uses: PyO3/maturin-action@v1 with: From 9e8f3ded92ea7df70e86079f3467fdbba044cab5 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Thu, 26 Sep 2024 13:27:06 -0700 Subject: [PATCH 03/21] improve log output for silent errors --- cpp-linter/src/rest_api/github/mod.rs | 3 + .../src/rest_api/github/specific_api.rs | 60 +++++++++---------- cpp-linter/src/rest_api/mod.rs | 11 ++++ cpp-linter/tests/paginated_changed_files.rs | 3 + 4 files changed, 45 insertions(+), 32 deletions(-) diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index d40f08e..96c1b56 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -126,6 +126,7 @@ impl RestApiClient for GithubApiClient { .join(if is_pr { pr.as_str() } else { sha.as_str() })?; let mut diff_header = HeaderMap::new(); diff_header.insert("Accept", "application/vnd.github.diff".parse()?); + log::debug!("Getting file changes from {}", url.as_str()); let request = Self::make_api_request( &self.client, url.as_str(), @@ -150,6 +151,8 @@ impl RestApiClient for GithubApiClient { } else { url }; + Self::log_response(response, "Failed to get full diff for event").await; + log::debug!("Trying paginated request to {}", endpoint.as_str()); self.get_changed_files_paginated(endpoint, file_filter) .await } diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 11225c9..df3d86d 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -186,12 +186,7 @@ impl GithubApiClient { .await { Ok(response) => { - if let Err(e) = response.error_for_status_ref() { - log::error!("Failed to post thread comment: {e:?}"); - if let Ok(text) = response.text().await { - log::error!("{text}"); - } - } + Self::log_response(response, "Failed to post thread comment").await; } Err(e) => { log::error!("Failed to post thread comment: {e:?}"); @@ -227,11 +222,12 @@ impl GithubApiClient { .await { Ok(response) => { - if let Err(e) = response.error_for_status_ref() { - log::error!("Failed to get list of existing thread comments: {e:?}"); - if let Ok(text) = response.text().await { - log::error!("{text}"); - } + if !response.status().is_success() { + Self::log_response( + response, + "Failed to get list of existing thread comments", + ) + .await; return Ok(comment_url); } comments_url = Self::try_next_page(response.headers()); @@ -264,7 +260,7 @@ impl GithubApiClient { }; let req = Self::make_api_request( &self.client, - del_url.clone(), + del_url.as_str(), Method::DELETE, None, None, @@ -278,13 +274,12 @@ impl GithubApiClient { .await { Ok(result) => { - if let Err(e) = result.error_for_status_ref() { - log::error!( - "Failed to delete old thread comment {e:?}" - ); - if let Ok(text) = result.text().await { - log::error!("{text}"); - } + if !result.status().is_success() { + Self::log_response( + result, + "Failed to delete old thread comment", + ) + .await; } } Err(e) => { @@ -336,7 +331,7 @@ impl GithubApiClient { 0, ) .await - .with_context(|| "Failed to get PR info")?; + .with_context(|| format!("Failed to get PR info from {}", url.as_str()))?; let pr_info: PullRequestInfo = serde_json::from_str(&response.text().await?) .with_context(|| "Failed to deserialize PR info")?; @@ -390,7 +385,7 @@ impl GithubApiClient { dismissal.await?; // free up the `url` variable let request = Self::make_api_request( &self.client, - url, + url.clone(), Method::POST, Some( serde_json::to_string(&payload) @@ -407,11 +402,8 @@ impl GithubApiClient { .await { Ok(response) => { - if let Err(e) = response.error_for_status_ref() { - log::error!("Failed to post a new PR review: {e:?}"); - if let Ok(text) = response.text().await { - log::error!("{text}"); - } + if !response.status().is_success() { + Self::log_response(response, "Failed to post a new PR review").await; } } Err(e) => { @@ -442,7 +434,10 @@ impl GithubApiClient { ) .await; if response.is_err() || response.as_ref().is_ok_and(|r| !r.status().is_success()) { - log::error!("Failed to get a list of existing PR reviews"); + log::error!( + "Failed to get a list of existing PR reviews: {}", + endpoint.as_str() + ); return Ok(()); } let response = response.unwrap(); @@ -481,11 +476,12 @@ impl GithubApiClient { .await { Ok(result) => { - if let Err(e) = result.error_for_status_ref() { - log::error!("Failed to dismiss outdated review: {e:?}"); - if let Ok(text) = result.text().await { - log::error!("{text}"); - } + if !result.status().is_success() { + Self::log_response( + result, + "Failed to dismiss outdated review", + ) + .await; } } Err(e) => { diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index 700674a..60f14fd 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -298,6 +298,17 @@ pub trait RestApiClient { } None } + + fn log_response(response: Response, context: &str) -> impl Future + Send { + async move { + if let Err(e) = response.error_for_status_ref() { + log::error!("{}: {e:?}", context.to_owned()); + if let Ok(text) = response.text().await { + log::error!("{text}"); + } + } + } + } } fn make_format_comment( diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index d1f388a..74ef364 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -6,6 +6,7 @@ use tempfile::{NamedTempFile, TempDir}; use cpp_linter::{ common_fs::FileFilter, + logger, rest_api::{github::GithubApiClient, RestApiClient}, }; use std::{env, io::Write, path::Path}; @@ -56,6 +57,8 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { env::set_var("GITHUB_API_URL", server.url()); env::set_current_dir(tmp.path()).unwrap(); let gh_client = GithubApiClient::new().unwrap(); + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); let mut mocks = vec![]; let diff_end_point = format!( From 5835363a6e887ef9f353942aea5ef7bf0bc573a3 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Thu, 26 Sep 2024 14:21:57 -0700 Subject: [PATCH 04/21] remove trailing '/' in endpoint url --- cpp-linter/src/rest_api/github/mod.rs | 15 +++++---------- cpp-linter/src/rest_api/github/specific_api.rs | 15 +++++++++------ cpp-linter/tests/comments.rs | 6 +++--- 3 files changed, 17 insertions(+), 19 deletions(-) diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index 96c1b56..6f9f469 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -261,16 +261,11 @@ impl RestApiClient for GithubApiClient { let sha = self.sha.clone().unwrap() + "/"; let comments_url = self .api_url - .join("repos/") - .unwrap() - .join(format!("{}/", repo).as_str()) - .unwrap() - .join(if is_pr { "issues/" } else { "commits/" }) - .unwrap() - .join(if is_pr { pr.as_str() } else { sha.as_str() }) - .unwrap() - .join("comments/") - .unwrap(); + .join("repos/")? + .join(format!("{}/", repo).as_str())? + .join(if is_pr { "issues/" } else { "commits/" })? + .join(if is_pr { pr.as_str() } else { sha.as_str() })? + .join("comments")?; self.update_comment( comments_url, diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index df3d86d..7492763 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -205,7 +205,7 @@ impl GithubApiClient { })?, ); let repo = format!( - "repos/{}/comments/", + "repos/{}/comments", // if we got here, then we know it is on a CI runner as self.repo should be known self.repo.as_ref().expect("Repo name unknown.") ); @@ -243,11 +243,14 @@ impl GithubApiClient { comment.user.login, comment.user.id, ); - let this_comment_url = base_comment_url - .join(&comment.id.to_string()) - .with_context(|| { - format!("Failed to parse URL for JSON comment.id: {}", comment.id) - })?; + let this_comment_url = + Url::parse(format!("{base_comment_url}/{}", &comment.id).as_str()) + .with_context(|| { + format!( + "Failed to parse URL for JSON comment.id: {}", + comment.id + ) + })?; if delete || comment_url.is_some() { // if not updating: remove all outdated comments // if updating: remove all outdated comments except the last one diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index 305a3c8..08504a4 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -106,7 +106,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock( "GET", - format!("/repos/{REPO}/commits/{SHA}/comments/").as_str(), + format!("/repos/{REPO}/commits/{SHA}/comments").as_str(), ) .match_header("Accept", "application/vnd.github.raw+json") .match_header("Authorization", TOKEN) @@ -123,7 +123,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { .create(), ); } else { - let pr_endpoint = format!("/repos/{REPO}/issues/{PR}/comments/"); + let pr_endpoint = format!("/repos/{REPO}/issues/{PR}/comments"); for pg in ["1", "2"] { let link = if pg == "1" { format!("<{}{pr_endpoint}?page=2>; rel=\"next\"", server.url()) @@ -190,7 +190,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { .mock( "POST", format!( - "/repos/{REPO}/{}/comments/", + "/repos/{REPO}/{}/comments", if test_params.event_t == EventType::PullRequest { format!("issues/{PR}") } else { From b569b68fd0bc08da87d7156742e6347396854298 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Thu, 26 Sep 2024 18:00:11 -0700 Subject: [PATCH 05/21] add user-agent header; adjust auth haeader value adjust tests accordingly --- cpp-linter/src/rest_api/github/mod.rs | 4 ++-- cpp-linter/src/rest_api/github/specific_api.rs | 3 ++- cpp-linter/src/rest_api/mod.rs | 6 +++++- cpp-linter/tests/comments.rs | 9 ++++++--- cpp-linter/tests/paginated_changed_files.rs | 4 ++-- cpp-linter/tests/reviews.rs | 8 +++++--- 6 files changed, 22 insertions(+), 12 deletions(-) diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index 6f9f469..58d29b8 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -100,9 +100,9 @@ impl RestApiClient for GithubApiClient { "Accept", HeaderValue::from_str("application/vnd.github.raw+json")?, ); - // headers.insert("User-Agent", USER_AGENT.parse().unwrap()); if let Ok(token) = env::var("GITHUB_TOKEN") { - let mut val = HeaderValue::from_str(token.as_str())?; + log::debug!("Using auth token from GITHUB_TOKEN environment variable"); + let mut val = HeaderValue::from_str(format!("token {token}").as_str())?; val.set_sensitive(true); headers.insert(AUTHORIZATION, val); } diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 7492763..7a18abd 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -15,7 +15,7 @@ use crate::{ clang_tools::{clang_format::summarize_style, ReviewComments}, cli::FeedbackInput, common_fs::FileObj, - rest_api::{RestApiRateLimitHeaders, COMMENT_MARKER}, + rest_api::{RestApiRateLimitHeaders, COMMENT_MARKER, USER_AGENT}, }; use super::{ @@ -58,6 +58,7 @@ impl GithubApiClient { Ok(GithubApiClient { client: Client::builder() .default_headers(Self::make_headers()?) + .user_agent(USER_AGENT) .build() .with_context(|| "Failed to create a session client for REST API calls")?, pull_request, diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index 60f14fd..2e0b59d 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -22,7 +22,11 @@ use crate::cli::FeedbackInput; use crate::common_fs::{FileFilter, FileObj}; pub static COMMENT_MARKER: &str = "\n"; -pub static USER_OUTREACH: &str = "\n\nHave any feedback or feature suggestions? [Share it here.](https://github.com/cpp-linter/cpp-linter-action/issues)"; +pub static USER_OUTREACH: &str = concat!( + "\n\nHave any feedback or feature suggestions? [Share it here.]", + "(https://github.com/cpp-linter/cpp-linter-action/issues)" +); +pub static USER_AGENT: &str = concat!("cpp-linter/", env!("CARGO_PKG_VERSION")); /// A structure to contain the different forms of headers that /// describe a REST API's rate limit status. diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index 08504a4..7653ccd 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -94,7 +94,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", format!("/repos/{REPO}/{diff_end_point}").as_str()) .match_header("Accept", "application/vnd.github.diff") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_body_from_file(format!("{asset_path}patch.diff")) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -109,7 +109,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { format!("/repos/{REPO}/commits/{SHA}/comments").as_str(), ) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .match_body(Matcher::Any) .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) .with_body_from_file(format!("{asset_path}push_comments_{SHA}.json")) @@ -134,7 +134,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .match_body(Matcher::Any) .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) .with_body_from_file(format!("{asset_path}pr_comments_pg{pg}.json")) @@ -153,6 +153,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("DELETE", comment_url.as_str()) .match_body(Matcher::Any) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_status(if test_params.fail_dismissal { 403 } else { 200 }) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -177,6 +178,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("PATCH", comment_url.as_str()) .match_body(new_comment_match.clone()) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_status(if test_params.fail_posting { 403 } else { 200 }) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -200,6 +202,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { .as_str(), ) .match_body(new_comment_match) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(if test_params.fail_posting { 403 } else { 200 }) diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index 74ef364..e151685 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -73,7 +73,7 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { server .mock("GET", diff_end_point.as_str()) .match_header("Accept", "application/vnd.github.diff") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(403) @@ -94,7 +94,7 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { server .mock("GET", pg_end_point.as_str()) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) diff --git a/cpp-linter/tests/reviews.rs b/cpp-linter/tests/reviews.rs index d227983..a310aed 100644 --- a/cpp-linter/tests/reviews.rs +++ b/cpp-linter/tests/reviews.rs @@ -84,7 +84,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.diff") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_body_from_file(format!("{asset_path}pr_{PR}.diff")) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) @@ -94,7 +94,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_body( json!({"state": test_params.pr_state, "draft": test_params.pr_draft}).to_string(), ) @@ -109,7 +109,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("GET", reviews_endpoint.as_str()) .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", TOKEN) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .match_body(Matcher::Any) .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) .with_body_from_file(format!("{asset_path}pr_reviews.json")) @@ -127,6 +127,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("PUT", format!("{reviews_endpoint}/1807607546").as_str()) .match_body(r#"{"event":"DISMISS","message":"outdated suggestion"}"#) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(if test_params.fail_dismissal { 403 } else { 200 }) @@ -170,6 +171,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { server .mock("POST", reviews_endpoint.as_str()) .match_body(Matcher::Regex(expected_review_payload)) + .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_status(if test_params.fail_posting { 403 } else { 200 }) From 52e0244913bddeab0384b520fe1abe7503bfca35 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Fri, 27 Sep 2024 01:54:46 -0700 Subject: [PATCH 06/21] skip serializing `ReviewComment::start_line` if it is null --- cpp-linter/src/rest_api/github/serde_structs.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/cpp-linter/src/rest_api/github/serde_structs.rs b/cpp-linter/src/rest_api/github/serde_structs.rs index df03443..3f45cef 100644 --- a/cpp-linter/src/rest_api/github/serde_structs.rs +++ b/cpp-linter/src/rest_api/github/serde_structs.rs @@ -16,6 +16,7 @@ pub struct FullReview { pub struct ReviewDiffComment { pub body: String, pub line: i64, + #[serde(skip_serializing_if = "Option::is_none")] pub start_line: Option, pub path: String, } From e7490618b8c5a2f6873d560562959ca7aa8b2dd3 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Fri, 27 Sep 2024 02:23:21 -0700 Subject: [PATCH 07/21] add tests; review error handling --- cpp-linter/src/clang_tools/clang_format.rs | 18 +- cpp-linter/src/clang_tools/clang_tidy.rs | 20 +- cpp-linter/src/rest_api/github/mod.rs | 87 +++--- .../src/rest_api/github/specific_api.rs | 14 +- cpp-linter/src/rest_api/mod.rs | 275 +++++++++++++++++- cpp-linter/src/run.rs | 21 +- cpp-linter/tests/paginated_changed_files.rs | 90 +++--- 7 files changed, 389 insertions(+), 136 deletions(-) diff --git a/cpp-linter/src/clang_tools/clang_format.rs b/cpp-linter/src/clang_tools/clang_format.rs index 52b0480..7b31c13 100644 --- a/cpp-linter/src/clang_tools/clang_format.rs +++ b/cpp-linter/src/clang_tools/clang_format.rs @@ -160,17 +160,13 @@ pub fn run_clang_format( .output() .with_context(|| format!("Failed to get replacements from clang-format: {file_name}"))?; if !output.stderr.is_empty() || !output.status.success() { - if let Ok(stderr) = String::from_utf8(output.stderr) { - logs.push(( - log::Level::Debug, - format!("clang-format raised the follow errors:\n{}", stderr), - )); - } else { - logs.push(( - log::Level::Error, - "stderr from clang-format was not UTF-8 encoded".to_string(), - )); - } + logs.push(( + log::Level::Debug, + format!( + "clang-format raised the follow errors:\n{}", + String::from_utf8_lossy(&output.stderr) + ), + )); } if output.stdout.is_empty() { return Ok(logs); diff --git a/cpp-linter/src/clang_tools/clang_tidy.rs b/cpp-linter/src/clang_tools/clang_tidy.rs index 46bd6a6..52d5640 100644 --- a/cpp-linter/src/clang_tools/clang_tidy.rs +++ b/cpp-linter/src/clang_tools/clang_tidy.rs @@ -304,21 +304,17 @@ pub fn run_clang_tidy( log::Level::Debug, format!( "Output from clang-tidy:\n{}", - String::from_utf8(output.stdout.to_vec()).unwrap() + String::from_utf8_lossy(&output.stdout) ), )); if !output.stderr.is_empty() { - if let Ok(stderr) = String::from_utf8(output.stderr) { - logs.push(( - log::Level::Debug, - format!("clang-tidy made the following summary:\n{}", stderr), - )); - } else { - logs.push(( - log::Level::Error, - "clang-tidy stderr is not UTF-8 encoded".to_string(), - )); - } + logs.push(( + log::Level::Debug, + format!( + "clang-tidy made the following summary:\n{}", + String::from_utf8_lossy(&output.stderr) + ), + )); } file.tidy_advice = parse_tidy_output(&output.stdout, &clang_params.database_json)?; if clang_params.tidy_review { diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index 58d29b8..21b3cdf 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -292,31 +292,31 @@ mod test { default::Default, env, io::Read, - path::PathBuf, + path::{Path, PathBuf}, sync::{Arc, Mutex}, }; - use chrono::Utc; - use mockito::{Matcher, Server}; use regex::Regex; - use reqwest::{Method, Url}; use tempfile::{tempdir, NamedTempFile}; use super::GithubApiClient; use crate::{ clang_tools::capture_clang_tools_output, cli::{ClangParams, FeedbackInput, LinesChangedOnly}, - common_fs::FileObj, + common_fs::{FileFilter, FileObj}, + logger, rest_api::{RestApiClient, USER_OUTREACH}, }; // ************************* tests for step-summary and output variables - async fn create_comment(tidy_checks: &str, style: &str) -> (String, String) { + async fn create_comment(tidy_checks: &str, style: &str, fail_gh_out: bool) -> (String, String) { let tmp_dir = tempdir().unwrap(); let rest_api_client = GithubApiClient::new().unwrap(); + logger::init().unwrap(); if env::var("ACTIONS_STEP_DEBUG").is_ok_and(|var| var == "true") { assert!(rest_api_client.debug_enabled); + log::set_max_level(log::LevelFilter::Debug); } let mut files = vec![Arc::new(Mutex::new(FileObj::new(PathBuf::from( "tests/demo/demo.cpp", @@ -342,7 +342,14 @@ mod test { let mut step_summary_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); env::set_var("GITHUB_STEP_SUMMARY", step_summary_path.path()); let mut gh_out_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); - env::set_var("GITHUB_OUTPUT", gh_out_path.path()); + env::set_var( + "GITHUB_OUTPUT", + if fail_gh_out { + Path::new("not-a-file.txt") + } else { + gh_out_path.path() + }, + ); rest_api_client .post_feedback(&files, feedback_inputs) .await @@ -354,13 +361,15 @@ mod test { assert!(&step_summary_content.contains(USER_OUTREACH)); let mut gh_out_content = String::new(); gh_out_path.read_to_string(&mut gh_out_content).unwrap(); - assert!(gh_out_content.starts_with("checks-failed=")); + if !fail_gh_out { + assert!(gh_out_content.starts_with("checks-failed=")); + } (step_summary_content, gh_out_content) } #[tokio::test] async fn check_comment_concerns() { - let (comment, gh_out) = create_comment("readability-*", "file").await; + let (comment, gh_out) = create_comment("readability-*", "file", false).await; assert!(&comment.contains(":warning:\nSome files did not pass the configured checks!\n")); let fmt_pattern = Regex::new(r"format-checks-failed=(\d+)\n").unwrap(); let tidy_pattern = Regex::new(r"tidy-checks-failed=(\d+)\n").unwrap(); @@ -380,7 +389,7 @@ mod test { #[tokio::test] async fn check_comment_lgtm() { env::set_var("ACTIONS_STEP_DEBUG", "true"); - let (comment, gh_out) = create_comment("-*", "").await; + let (comment, gh_out) = create_comment("-*", "", false).await; assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); assert_eq!( &gh_out, @@ -388,53 +397,23 @@ mod test { ); } - async fn simulate_rate_limit(secondary: bool) { - let mut server = Server::new_async().await; - let url = Url::parse(server.url().as_str()).unwrap(); - env::set_var("GITHUB_API_URL", server.url()); - let client = GithubApiClient::new().unwrap(); - let reset_timestamp = (Utc::now().timestamp() + 60).to_string(); - let mock = server - .mock("GET", "/") - .match_body(Matcher::Any) - .expect_at_least(1) - .expect_at_most(5) - .with_status(429) - .with_header( - &client.rate_limit_headers.remaining, - if secondary { "1" } else { "0" }, - ) - .with_header(&client.rate_limit_headers.reset, &reset_timestamp); - if secondary { - mock.with_header(&client.rate_limit_headers.retry, "0") - .create(); - } else { - mock.create(); - } - let request = - GithubApiClient::make_api_request(&client.client, url, Method::GET, None, None) - .unwrap(); - GithubApiClient::send_api_request( - client.client.clone(), - request, - client.rate_limit_headers.clone(), - 0, - ) - .await - .unwrap(); - } - #[tokio::test] - #[ignore] - #[should_panic(expected = "REST API secondary rate limit exceeded")] - async fn secondary_rate_limit() { - simulate_rate_limit(true).await; + async fn fail_gh_output() { + env::set_var("ACTIONS_STEP_DEBUG", "true"); + let (comment, gh_out) = create_comment("-*", "", true).await; + assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); + assert_eq!(&gh_out, ""); } #[tokio::test] - #[ignore] - #[should_panic(expected = "REST API rate limit exceeded!")] - async fn primary_rate_limit() { - simulate_rate_limit(false).await; + async fn fail_get_local_diff() { + env::set_var("CI", "false"); + let tmp_dir = tempdir().unwrap(); + env::set_current_dir(tmp_dir.path()).unwrap(); + let rest_client = GithubApiClient::new().unwrap(); + let files = rest_client + .get_list_of_changed_files(&FileFilter::new(&[], vec![])) + .await; + assert!(files.is_err()) } } diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 7a18abd..12216e6 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -30,9 +30,9 @@ impl GithubApiClient { let pull_request = { match event_name.as_str() { "pull_request" => { - let event_payload_path = env::var("GITHUB_EVENT_PATH").with_context(|| { - "GITHUB_EVENT_NAME is set to 'pull_request', but GITHUB_EVENT_PATH is not set" - })?; + // GITHUB_*** env vars cannot be overwritten in CI runners on GitHub. + let event_payload_path = env::var("GITHUB_EVENT_PATH")?; + // event payload JSON file can be overwritten/removed in CI runners let file_buf = &mut String::new(); OpenOptions::new() .read(true) @@ -51,16 +51,15 @@ impl GithubApiClient { _ => None, } }; + // GITHUB_*** env vars cannot be overwritten in CI runners on GitHub. let gh_api_url = env::var("GITHUB_API_URL").unwrap_or("https://api.github.com".to_string()); - let api_url = Url::parse(gh_api_url.clone().as_str()) - .with_context(|| format!("Failed to parse URL from GITHUB_API_URL: {}", gh_api_url))?; + let api_url = Url::parse(gh_api_url.as_str())?; Ok(GithubApiClient { client: Client::builder() .default_headers(Self::make_headers()?) .user_agent(USER_AGENT) - .build() - .with_context(|| "Failed to create a session client for REST API calls")?, + .build()?, pull_request, event_name, api_url, @@ -87,6 +86,7 @@ impl GithubApiClient { /// Append step summary to CI workflow's summary page. pub fn post_step_summary(&self, comment: &String) { if let Ok(gh_out) = env::var("GITHUB_STEP_SUMMARY") { + // step summary MD file can be overwritten/removed in CI runners if let Ok(mut gh_out_file) = OpenOptions::new().append(true).open(gh_out) { if let Err(e) = writeln!(gh_out_file, "\n{}\n", comment) { log::error!("Could not write to GITHUB_STEP_SUMMARY file: {}", e); diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index 2e0b59d..9c4de1b 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -129,10 +129,10 @@ pub trait RestApiClient { "Failed to parse i64 from remaining attempts about rate limit: {count}" ); } - } else { - log::debug!("Failed to extract remaining attempts about rate limit: {remaining:?}"); } } else { + // NOTE: I guess it is sometimes valid for a request to + // not include remaining rate limit attempts log::debug!("Response headers do not include remaining API usage count"); } if requests_remaining.is_some_and(|v| v <= 0) { @@ -145,20 +145,17 @@ pub trait RestApiClient { "REST API rate limit exceeded! Resets at {}", reset )); - } else { - log::debug!("Rate limit reset UTC timestamp is an invalid: {value}"); } } else { log::debug!( "Failed to parse i64 from reset time about rate limit: {epoch}" ); } - } else { - log::debug!("Failed to extract reset info about rate limit: {reset_value:?}"); } } else { log::debug!("Response headers does not include a reset timestamp"); } + return Err(anyhow!("REST API rate limit exceeded!")); } // check if secondary rate limit is violated; backoff and try again. @@ -175,8 +172,6 @@ pub trait RestApiClient { "Failed to parse u64 from retry interval about rate limit: {retry_str}" ); } - } else { - log::debug!("Failed to extract retry interval about rate limit: {retry_value:?}"); } return Self::send_api_request( client, @@ -296,8 +291,6 @@ pub trait RestApiClient { } } } - } else { - log::debug!("Failed to convert header value of links to a str"); } } None @@ -390,3 +383,265 @@ fn make_tidy_comment( comment.push_str(&tidy_comment); comment.push_str(&closer); } + +/// This module tests the silent errors' debug logs +/// from `try_next_page()` and `send_api_request()` functions. +#[cfg(test)] +mod test { + use std::sync::{Arc, Mutex}; + + use anyhow::{anyhow, Result}; + use chrono::Utc; + use mockito::{Matcher, Server}; + use reqwest::{ + header::{HeaderMap, HeaderValue}, + Client, + }; + use reqwest::{Method, Url}; + + use crate::{ + cli::FeedbackInput, + common_fs::{FileFilter, FileObj}, + logger, + }; + + use super::{RestApiClient, RestApiRateLimitHeaders}; + + /// A dummy struct to impl RestApiClient + #[derive(Default)] + struct TestClient {} + + impl RestApiClient for TestClient { + fn set_exit_code( + &self, + _checks_failed: u64, + _format_checks_failed: Option, + _tidy_checks_failed: Option, + ) -> u64 { + 0 + } + + fn make_headers() -> Result> { + Err(anyhow!("Not implemented")) + } + + async fn get_list_of_changed_files( + &self, + _file_filter: &FileFilter, + ) -> Result> { + Err(anyhow!("Not implemented")) + } + + async fn get_changed_files_paginated( + &self, + _url: reqwest::Url, + _file_filter: &FileFilter, + ) -> Result> { + Err(anyhow!("Not implemented")) + } + + async fn post_feedback( + &self, + _files: &[Arc>], + _user_inputs: FeedbackInput, + ) -> Result { + Err(anyhow!("Not implemented")) + } + } + + #[tokio::test] + async fn dummy_coverage() { + assert!(TestClient::make_headers().is_err()); + let dummy = TestClient::default(); + assert_eq!(dummy.set_exit_code(1, None, None), 0); + assert!(dummy + .get_list_of_changed_files(&FileFilter::new(&[], vec![])) + .await + .is_err()); + assert!(dummy + .get_changed_files_paginated( + Url::parse("https://example.net").unwrap(), + &FileFilter::new(&[], vec![]) + ) + .await + .is_err()); + assert!(dummy + .post_feedback(&[], FeedbackInput::default()) + .await + .is_err()) + } + + // ************************************************* try_next_page() tests + + #[test] + fn bad_link_header() { + let mut headers = HeaderMap::with_capacity(1); + assert!(headers + .insert("link", HeaderValue::from_str("; rel=\"next\"").unwrap()) + .is_none()); + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); + let result = TestClient::try_next_page(&headers); + assert!(result.is_none()); + } + + #[test] + fn bad_link_domain() { + let mut headers = HeaderMap::with_capacity(1); + assert!(headers + .insert( + "link", + HeaderValue::from_str("; rel=\"next\"").unwrap() + ) + .is_none()); + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); + let result = TestClient::try_next_page(&headers); + assert!(result.is_none()); + } + + // ************************************************* Rate Limit Tests + + #[derive(Default)] + struct RateLimitTestParams { + secondary: bool, + has_remaining_count: bool, + bad_remaining_count: bool, + has_reset_timestamp: bool, + bad_reset_timestamp: bool, + has_retry_interval: bool, + bad_retry_interval: bool, + } + + async fn simulate_rate_limit(test_params: &RateLimitTestParams) { + let rate_limit_headers = RestApiRateLimitHeaders { + reset: "reset".to_string(), + remaining: "remaining".to_string(), + retry: "retry".to_string(), + }; + logger::init().unwrap(); + log::set_max_level(log::LevelFilter::Debug); + + let mut server = Server::new_async().await; + let client = Client::new(); + let reset_timestamp = (Utc::now().timestamp() + 60).to_string(); + let mut mock = server + .mock("GET", "/") + .match_body(Matcher::Any) + .expect_at_least(1) + .expect_at_most(5) + .with_status(429); + if test_params.has_remaining_count { + mock = mock.with_header( + &rate_limit_headers.remaining, + if test_params.secondary { + "1" + } else if test_params.bad_remaining_count { + "X" + } else { + "0" + }, + ); + } + if test_params.has_reset_timestamp { + mock = mock.with_header( + &rate_limit_headers.reset, + if test_params.bad_reset_timestamp { + "X" + } else { + &reset_timestamp + }, + ); + } + if test_params.secondary && test_params.has_retry_interval { + mock.with_header( + &rate_limit_headers.retry, + if test_params.bad_retry_interval { + "X" + } else { + "0" + }, + ) + .create(); + } else { + mock.create(); + } + let request = + TestClient::make_api_request(&client, server.url(), Method::GET, None, None).unwrap(); + TestClient::send_api_request(client.clone(), request, rate_limit_headers.clone(), 0) + .await + .unwrap(); + } + + #[tokio::test] + #[ignore] + #[should_panic(expected = "REST API secondary rate limit exceeded")] + async fn rate_limit_secondary() { + simulate_rate_limit(&RateLimitTestParams { + secondary: true, + has_retry_interval: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[ignore] + #[should_panic(expected = "REST API secondary rate limit exceeded")] + async fn rate_limit_bad_retry() { + simulate_rate_limit(&RateLimitTestParams { + secondary: true, + has_retry_interval: true, + bad_retry_interval: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[ignore] + #[should_panic(expected = "REST API rate limit exceeded!")] + async fn rate_limit_primary() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + has_reset_timestamp: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[ignore] + #[should_panic(expected = "REST API rate limit exceeded!")] + async fn rate_limit_no_reset() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[ignore] + #[should_panic(expected = "REST API rate limit exceeded!")] + async fn rate_limit_bad_reset() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + has_reset_timestamp: true, + bad_reset_timestamp: true, + ..Default::default() + }) + .await; + } + + #[tokio::test] + #[ignore] + async fn rate_limit_bad_count() { + simulate_rate_limit(&RateLimitTestParams { + has_remaining_count: true, + bad_remaining_count: true, + ..Default::default() + }) + .await; + } +} diff --git a/cpp-linter/src/run.rs b/cpp-linter/src/run.rs index 3167015..c16b7e8 100644 --- a/cpp-linter/src/run.rs +++ b/cpp-linter/src/run.rs @@ -146,7 +146,7 @@ mod test { use std::env; #[tokio::test] - async fn run() { + async fn normal() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests let result = run_main(vec![ "cpp-linter".to_string(), @@ -161,14 +161,14 @@ mod test { } #[tokio::test] - async fn run_version_command() { + async fn version_command() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests let result = run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await; assert!(result.is_ok_and(|v| v == 0)); } #[tokio::test] - async fn run_force_debug_output() { + async fn force_debug_output() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests let result = run_main(vec![ "cpp-linter".to_string(), @@ -182,7 +182,7 @@ mod test { } #[tokio::test] - async fn run_bad_version_input() { + async fn bad_version_input() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests let result = run_main(vec![ "cpp-linter".to_string(), @@ -193,4 +193,17 @@ mod test { .await; assert!(result.is_ok_and(|v| v == 1)); } + + #[tokio::test] + async fn pre_commit_env() { + env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests + env::set_var("PRE_COMMIT", "1"); + let result = run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + ]) + .await; + assert!(result.is_ok_and(|v| v == 1)); + } } diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index e151685..ede4843 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -22,8 +22,9 @@ const SHA: &str = "DEADBEEF"; const TOKEN: &str = "123456"; const RESET_RATE_LIMIT_HEADER: &str = "x-ratelimit-reset"; const REMAINING_RATE_LIMIT_HEADER: &str = "x-ratelimit-remaining"; +const MALFORMED_RESPONSE_PAYLOAD: &str = "{\"message\":\"Resource not accessible by integration\"}"; -async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { +async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_serialization: bool) { env::set_var("GITHUB_REPOSITORY", REPO); env::set_var("GITHUB_SHA", SHA); env::set_var("GITHUB_TOKEN", TOKEN); @@ -84,68 +85,81 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType) { } else { format!("{diff_end_point}/files") }; - for pg in 1..=2 { + let pg_count = if fail_serialization { 1 } else { 2 }; + for pg in 1..=pg_count { let link = if pg == 1 { format!("<{}{pg_end_point}?page=2>; rel=\"next\"", server.url()) } else { "".to_string() }; - mocks.push( - server - .mock("GET", pg_end_point.as_str()) - .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", format!("token {TOKEN}").as_str()) - .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) - .with_header(REMAINING_RATE_LIMIT_HEADER, "50") - .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) - .with_body_from_file(format!( - "{asset_path}/{}_files_pg{pg}.json", - if event_type == EventType::Push { - "push" - } else { - "pull_request" - } - )) - .with_header("link", link.as_str()) - .create(), - ); + let mut mock = server + .mock("GET", pg_end_point.as_str()) + .match_header("Accept", "application/vnd.github.raw+json") + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .match_query(Matcher::UrlEncoded("page".to_string(), pg.to_string())) + .with_header(REMAINING_RATE_LIMIT_HEADER, "50") + .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) + .with_header("link", link.as_str()); + if fail_serialization { + mock = mock.with_body(MALFORMED_RESPONSE_PAYLOAD); + } else { + mock = mock.with_body_from_file(format!( + "{asset_path}/{}_files_pg{pg}.json", + if event_type == EventType::Push { + "push" + } else { + "pull_request" + } + )); + } + mocks.push(mock.create()); } let file_filter = FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]); - let files = gh_client - .get_list_of_changed_files(&file_filter) - .await - .unwrap(); - assert_eq!(files.len(), 2); - for file in files { - assert!(["src/demo.cpp", "src/demo.hpp"].contains( - &file - .name - .as_path() - .to_str() - .expect("Failed to get file name from path") - )); + let files = gh_client.get_list_of_changed_files(&file_filter).await; + if let Ok(files) = files { + // if !fail_serialization + assert_eq!(files.len(), 2); + for file in files { + assert!(["src/demo.cpp", "src/demo.hpp"].contains( + &file + .name + .as_path() + .to_str() + .expect("Failed to get file name from path") + )); + } } for mock in mocks { mock.assert(); } } -async fn test_get_changes(event_type: EventType) { +async fn test_get_changes(event_type: EventType, fail_serialization: bool) { let tmp_dir = create_test_space(false); let lib_root = env::current_dir().unwrap(); env::set_current_dir(tmp_dir.path()).unwrap(); - get_paginated_changes(&lib_root, event_type).await; + get_paginated_changes(&lib_root, event_type, fail_serialization).await; env::set_current_dir(lib_root.as_path()).unwrap(); drop(tmp_dir); } #[tokio::test] async fn get_push_files_paginated() { - test_get_changes(EventType::Push).await + test_get_changes(EventType::Push, false).await } #[tokio::test] async fn get_pr_files_paginated() { - test_get_changes(EventType::PullRequest(42)).await + test_get_changes(EventType::PullRequest(42), false).await +} + +#[tokio::test] +async fn fail_push_files_paginated() { + test_get_changes(EventType::Push, true).await +} + +#[tokio::test] +async fn fail_pr_files_paginated() { + test_get_changes(EventType::PullRequest(42), true).await } From d18d282668161c6a19dc89375d76b92a1f615514 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Fri, 27 Sep 2024 03:41:23 -0700 Subject: [PATCH 08/21] fix review dismissal and thread comment updating --- .../src/rest_api/github/serde_structs.rs | 3 + .../src/rest_api/github/specific_api.rs | 85 +++++++++---------- cpp-linter/tests/comments.rs | 9 +- cpp-linter/tests/reviews.rs | 5 +- 4 files changed, 56 insertions(+), 46 deletions(-) diff --git a/cpp-linter/src/rest_api/github/serde_structs.rs b/cpp-linter/src/rest_api/github/serde_structs.rs index 3f45cef..c23eb40 100644 --- a/cpp-linter/src/rest_api/github/serde_structs.rs +++ b/cpp-linter/src/rest_api/github/serde_structs.rs @@ -36,6 +36,9 @@ impl From for ReviewDiffComment { } } +/// A constant string used as a payload to dismiss PR reviews. +pub const REVIEW_DISMISSAL: &str = r#"{"event":"DISMISS","message":"outdated suggestion"}"#; + /// A structure for deserializing a single changed file in a CI event. #[derive(Debug, Deserialize, PartialEq, Clone)] pub struct GithubChangedFile { diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 12216e6..e059c58 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -19,7 +19,10 @@ use crate::{ }; use super::{ - serde_structs::{FullReview, PullRequestInfo, ReviewComment, ReviewDiffComment, ThreadComment}, + serde_structs::{ + FullReview, PullRequestInfo, ReviewComment, ReviewDiffComment, ThreadComment, + REVIEW_DISMISSAL, + }, GithubApiClient, RestApiClient, }; @@ -206,9 +209,14 @@ impl GithubApiClient { })?, ); let repo = format!( - "repos/{}/comments", + "repos/{}{}/comments", // if we got here, then we know it is on a CI runner as self.repo should be known - self.repo.as_ref().expect("Repo name unknown.") + self.repo.as_ref().expect("Repo name unknown."), + if self.event_name == "pull_request" { + "/issues" + } else { + "" + }, ); let base_comment_url = self.api_url.join(&repo).unwrap(); while let Some(ref endpoint) = comments_url { @@ -454,51 +462,40 @@ impl GithubApiClient { && !(["PENDING", "DISMISSED"].contains(&review.state.as_str())) { // dismiss outdated review - match url.join("reviews/")?.join(review.id.to_string().as_str()) { - Ok(dismiss_url) => { - if let Ok(req) = Self::make_api_request( - &self.client, - dismiss_url, - Method::PUT, - Some( - serde_json::json!( - { - "message": "outdated suggestion", - "event": "DISMISS" - } - ) - .to_string(), - ), - None, - ) { - match Self::send_api_request( - self.client.clone(), - req, - self.rate_limit_headers.clone(), - 0, - ) - .await - { - Ok(result) => { - if !result.status().is_success() { - Self::log_response( - result, - "Failed to dismiss outdated review", - ) - .await; - } - } - Err(e) => { - log::error!( - "Failed to dismiss outdated review: {e:}" - ); + if let Ok(dismiss_url) = + url.join(format!("reviews/{}/dismissals", review.id).as_str()) + { + if let Ok(req) = Self::make_api_request( + &self.client, + dismiss_url, + Method::PUT, + Some(REVIEW_DISMISSAL.to_string()), + None, + ) { + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.clone(), + 0, + ) + .await + { + Ok(result) => { + if !result.status().is_success() { + Self::log_response( + result, + "Failed to dismiss outdated review", + ) + .await; } } + Err(e) => { + log::error!( + "Failed to dismiss outdated review: {e:}" + ); + } } } - Err(e) => { - log::error!("Failed to parse URL for dismissing outdated review: {e:?}"); - } } } } diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index 7653ccd..a0ed328 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -146,7 +146,14 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { ); } } - let comment_url = format!("/repos/{REPO}/comments/76453652"); + let comment_url = format!( + "/repos/{REPO}{}/comments/76453652", + if test_params.event_t == EventType::PullRequest { + "/issues" + } else { + "" + } + ); if !test_params.fail_get_existing_comments { mocks.push( diff --git a/cpp-linter/tests/reviews.rs b/cpp-linter/tests/reviews.rs index a310aed..4779378 100644 --- a/cpp-linter/tests/reviews.rs +++ b/cpp-linter/tests/reviews.rs @@ -125,7 +125,10 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { if !test_params.fail_get_existing_reviews { mocks.push( server - .mock("PUT", format!("{reviews_endpoint}/1807607546").as_str()) + .mock( + "PUT", + format!("{reviews_endpoint}/1807607546/dismissals").as_str(), + ) .match_body(r#"{"event":"DISMISS","message":"outdated suggestion"}"#) .match_header("Authorization", format!("token {TOKEN}").as_str()) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") From 5691ec7d1901d78368d0bf3180b315d64daba4f9 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Fri, 27 Sep 2024 20:47:49 -0700 Subject: [PATCH 09/21] use test profiles - organize config files into .config/ folder only those that do not require repo root location - minor test improvements --- .../.readthedocs.yaml | 0 cliff.toml => .config/cliff.toml | 0 .config/nextest.toml | 31 ++++++ .github/workflows/bump-n-release.yml | 2 +- .github/workflows/bump_version.py | 11 +- .github/workflows/run-dev-tests.yml | 24 ++-- cpp-linter/src/clang_tools/clang_format.rs | 27 +++-- cpp-linter/src/git.rs | 3 - cpp-linter/src/rest_api/github/mod.rs | 104 +++++++++++++----- .../src/rest_api/github/specific_api.rs | 9 +- cpp-linter/src/rest_api/mod.rs | 14 +-- justfile | 5 +- 12 files changed, 156 insertions(+), 74 deletions(-) rename .readthedocs.yaml => .config/.readthedocs.yaml (100%) rename cliff.toml => .config/cliff.toml (100%) create mode 100644 .config/nextest.toml diff --git a/.readthedocs.yaml b/.config/.readthedocs.yaml similarity index 100% rename from .readthedocs.yaml rename to .config/.readthedocs.yaml diff --git a/cliff.toml b/.config/cliff.toml similarity index 100% rename from cliff.toml rename to .config/cliff.toml diff --git a/.config/nextest.toml b/.config/nextest.toml new file mode 100644 index 0000000..00d072c --- /dev/null +++ b/.config/nextest.toml @@ -0,0 +1,31 @@ +# required minimum nextest version +nextest-version = "0.9.77" + +[profile.default] +# A profile to run most tests, except tests that run longer than 10 seconds +default-filter = "not test(#*rate_limit_secondary) - test(#git::test::with_*)" + +# This will flag any test that runs longer than 10 seconds. Useful when writing new tests. +slow-timeout = "10s" + +[profile.ci] +# A profile to run only tests that use clang-tidy and/or clang-format +# NOTE: This profile is intended to keep CI runtime low. Locally, use default or all profiles + +# This is all tests in tests/ folder + unit test for --extra-args. +default-filter = "kind(test) + test(#*use_extra_args)" + +# show wich tests were skipped +status-level = "skip" + +# show log output from each test +success-output = "final" +failure-output = "immediate-final" + +[profile.all] +# A profile to run all tests (including tests that run longer than 10 seconds) +default-filter = "all()" + +# Revert slow-timeout to nextest default value. +# Otherwise, default profile value (10s) is inherited. +slow-timeout = "60s" diff --git a/.github/workflows/bump-n-release.yml b/.github/workflows/bump-n-release.yml index 12c5fd0..f764aa5 100644 --- a/.github/workflows/bump-n-release.yml +++ b/.github/workflows/bump-n-release.yml @@ -54,7 +54,7 @@ jobs: uses: orhun/git-cliff-action@v4 id: git-cliff with: - config: cliff.toml + config: .config/cliff.toml args: --unreleased env: OUTPUT: ${{ runner.temp }}/changes.md diff --git a/.github/workflows/bump_version.py b/.github/workflows/bump_version.py index 523cae8..ec616c2 100644 --- a/.github/workflows/bump_version.py +++ b/.github/workflows/bump_version.py @@ -120,7 +120,16 @@ def main(): subprocess.run(["napi", "version"], cwd="node-binding", check=True) print("Updated version in node-binding/**package.json") - subprocess.run(["git", "cliff", "--tag", Updater.new_version], check=True) + subprocess.run( + [ + "git-cliff", + "--config", + ".config/cliff.toml", + "--tag", + Updater.new_version, + ], + check=True, + ) print("Updated CHANGELOG.md") subprocess.run(["git", "add", "--all"], check=True) diff --git a/.github/workflows/run-dev-tests.yml b/.github/workflows/run-dev-tests.yml index 9a0e5ca..6c6ed06 100644 --- a/.github/workflows/run-dev-tests.yml +++ b/.github/workflows/run-dev-tests.yml @@ -88,7 +88,7 @@ jobs: if: runner.os == 'Linux' env: CLANG_VERSION: '7' - run: just test + run: just test ci - name: Install clang v8 if: runner.os == 'Linux' @@ -100,7 +100,7 @@ jobs: if: runner.os == 'Linux' env: CLANG_VERSION: '8' - run: just test + run: just test ci - name: Install clang v9 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -110,7 +110,7 @@ jobs: - name: Collect Coverage for clang v9 env: CLANG_VERSION: '9' - run: just test + run: just test ci - name: Install clang v10 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -120,7 +120,7 @@ jobs: - name: Collect Coverage for clang v10 env: CLANG_VERSION: '10' - run: just test + run: just test ci - name: Install clang 11 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -130,7 +130,7 @@ jobs: - name: Collect Coverage for clang v11 env: CLANG_VERSION: '11' - run: just test + run: just test ci - name: Install clang 12 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -140,7 +140,7 @@ jobs: - name: Collect Coverage for clang v12 env: CLANG_VERSION: '12' - run: just test + run: just test ci - name: Install clang 13 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -150,7 +150,7 @@ jobs: - name: Collect Coverage for clang v13 env: CLANG_VERSION: '13' - run: just test + run: just test ci - name: Install clang 14 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -160,7 +160,7 @@ jobs: - name: Collect Coverage for clang v14 env: CLANG_VERSION: '14' - run: just test + run: just test ci - name: Install clang 15 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -170,7 +170,7 @@ jobs: - name: Collect Coverage for clang v15 env: CLANG_VERSION: '15' - run: just test + run: just test ci - name: Install clang 16 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -180,7 +180,7 @@ jobs: - name: Collect Coverage for clang v16 env: CLANG_VERSION: '16' - run: just test + run: just test ci - name: Install clang 17 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -190,7 +190,7 @@ jobs: - name: Collect Coverage for clang v17 env: CLANG_VERSION: '17' - run: just test + run: just test ci - name: Install clang 18 uses: cpp-linter/cpp_linter_rs/install-clang-action@main @@ -200,7 +200,7 @@ jobs: - name: Collect Coverage for clang v18 env: CLANG_VERSION: '18' - run: just test --run-ignored=all + run: just test all - name: Generate Coverage HTML report run: just pretty-cov diff --git a/cpp-linter/src/clang_tools/clang_format.rs b/cpp-linter/src/clang_tools/clang_format.rs index 7b31c13..a3c82d3 100644 --- a/cpp-linter/src/clang_tools/clang_format.rs +++ b/cpp-linter/src/clang_tools/clang_format.rs @@ -57,13 +57,15 @@ pub struct Replacement { /// /// This value is not provided by the XML output, but we calculate it after /// deserialization. - pub line: Option, + #[serde(default)] + pub line: usize, /// The column number on the line described by the [`Replacement::offset`]. /// /// This value is not provided by the XML output, but we calculate it after /// deserialization. - pub cols: Option, + #[serde(default)] + pub cols: usize, } impl Clone for Replacement { @@ -188,11 +190,12 @@ pub fn run_clang_format( }); format_advice.patched = patched; if !format_advice.replacements.is_empty() { + // get line and column numbers from format_advice.offset let mut filtered_replacements = Vec::new(); for replacement in &mut format_advice.replacements { let (line_number, columns) = get_line_cols_from_offset(&file.name, replacement.offset); - replacement.line = Some(line_number); - replacement.cols = Some(columns); + replacement.line = line_number; + replacement.cols = columns; for range in &ranges { if range.contains(&line_number.try_into().unwrap_or(0)) { filtered_replacements.push(replacement.clone()); @@ -233,29 +236,29 @@ mod tests { offset: 113, length: 5, value: Some(String::from("\n ")), - line: None, - cols: None, + line: 0, + cols: 0, }, Replacement { offset: 147, length: 0, value: Some(String::from(" ")), - line: None, - cols: None, + line: 0, + cols: 0, }, Replacement { offset: 161, length: 0, value: None, - line: None, - cols: None, + line: 0, + cols: 0, }, Replacement { offset: 165, length: 19, value: Some(String::from("\n\n")), - line: None, - cols: None, + line: 0, + cols: 0, }, ], patched: None, diff --git a/cpp-linter/src/git.rs b/cpp-linter/src/git.rs index b3e0cf5..030ed65 100644 --- a/cpp-linter/src/git.rs +++ b/cpp-linter/src/git.rs @@ -415,7 +415,6 @@ mod test { } #[tokio::test] - #[ignore] async fn with_no_changed_sources() { // commit with no modified C/C++ sources let sha = "0c236809891000b16952576dc34de082d7a40bf3"; @@ -430,7 +429,6 @@ mod test { } #[tokio::test] - #[ignore] async fn with_changed_sources() { // commit with modified C/C++ sources let sha = "950ff0b690e1903797c303c5fc8d9f3b52f1d3c5"; @@ -450,7 +448,6 @@ mod test { } #[tokio::test] - #[ignore] async fn with_staged_changed_sources() { // commit with no modified C/C++ sources let sha = "0c236809891000b16952576dc34de082d7a40bf3"; diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index 21b3cdf..bb35da9 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -301,8 +301,11 @@ mod test { use super::GithubApiClient; use crate::{ - clang_tools::capture_clang_tools_output, - cli::{ClangParams, FeedbackInput, LinesChangedOnly}, + clang_tools::{ + clang_format::{FormatAdvice, Replacement}, + clang_tidy::{TidyAdvice, TidyNotification}, + }, + cli::FeedbackInput, common_fs::{FileFilter, FileObj}, logger, rest_api::{RestApiClient, USER_OUTREACH}, @@ -310,7 +313,11 @@ mod test { // ************************* tests for step-summary and output variables - async fn create_comment(tidy_checks: &str, style: &str, fail_gh_out: bool) -> (String, String) { + async fn create_comment( + is_lgtm: bool, + fail_gh_out: bool, + fail_summary: bool, + ) -> (String, String) { let tmp_dir = tempdir().unwrap(); let rest_api_client = GithubApiClient::new().unwrap(); logger::init().unwrap(); @@ -318,29 +325,57 @@ mod test { assert!(rest_api_client.debug_enabled); log::set_max_level(log::LevelFilter::Debug); } - let mut files = vec![Arc::new(Mutex::new(FileObj::new(PathBuf::from( - "tests/demo/demo.cpp", - ))))]; - let mut clang_params = ClangParams { - tidy_checks: tidy_checks.to_string(), - lines_changed_only: LinesChangedOnly::Off, - style: style.to_string(), - ..Default::default() - }; - capture_clang_tools_output( - &mut files, - env::var("CLANG-VERSION").unwrap_or("".to_string()).as_str(), - &mut clang_params, - ) - .await - .unwrap(); + let mut files = vec![]; + if !is_lgtm { + for _i in 0..65535 { + let filename = String::from("tests/demo/demo.cpp"); + let mut file = FileObj::new(PathBuf::from(&filename)); + let notes = vec![TidyNotification { + filename, + line: 0, + cols: 0, + severity: String::from("note"), + rationale: String::from("A test dummy rationale"), + diagnostic: String::from("clang-diagnostic-warning"), + suggestion: vec![], + fixed_lines: vec![], + }]; + file.tidy_advice = Some(TidyAdvice { + notes, + patched: None, + }); + let replacements = vec![Replacement { + offset: 0, + length: 0, + value: Some(String::new()), + line: 1, + cols: 1, + }]; + file.format_advice = Some(FormatAdvice { + replacements, + patched: None, + }); + files.push(Arc::new(Mutex::new(file))); + } + } let feedback_inputs = FeedbackInput { - style: style.to_string(), + style: if is_lgtm { + String::new() + } else { + String::from("file") + }, step_summary: true, ..Default::default() }; let mut step_summary_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); - env::set_var("GITHUB_STEP_SUMMARY", step_summary_path.path()); + env::set_var( + "GITHUB_STEP_SUMMARY", + if fail_summary { + Path::new("not-a-file.txt") + } else { + step_summary_path.path() + }, + ); let mut gh_out_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); env::set_var( "GITHUB_OUTPUT", @@ -358,7 +393,9 @@ mod test { step_summary_path .read_to_string(&mut step_summary_content) .unwrap(); - assert!(&step_summary_content.contains(USER_OUTREACH)); + if !fail_summary { + assert!(&step_summary_content.contains(USER_OUTREACH)); + } let mut gh_out_content = String::new(); gh_out_path.read_to_string(&mut gh_out_content).unwrap(); if !fail_gh_out { @@ -369,7 +406,7 @@ mod test { #[tokio::test] async fn check_comment_concerns() { - let (comment, gh_out) = create_comment("readability-*", "file", false).await; + let (comment, gh_out) = create_comment(false, false, false).await; assert!(&comment.contains(":warning:\nSome files did not pass the configured checks!\n")); let fmt_pattern = Regex::new(r"format-checks-failed=(\d+)\n").unwrap(); let tidy_pattern = Regex::new(r"tidy-checks-failed=(\d+)\n").unwrap(); @@ -389,10 +426,10 @@ mod test { #[tokio::test] async fn check_comment_lgtm() { env::set_var("ACTIONS_STEP_DEBUG", "true"); - let (comment, gh_out) = create_comment("-*", "", false).await; - assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); + let (comment, gh_out) = create_comment(true, false, false).await; + assert!(comment.contains(":heavy_check_mark:\nNo problems need attention.")); assert_eq!( - &gh_out, + gh_out, "checks-failed=0\nformat-checks-failed=0\ntidy-checks-failed=0\n" ); } @@ -400,9 +437,20 @@ mod test { #[tokio::test] async fn fail_gh_output() { env::set_var("ACTIONS_STEP_DEBUG", "true"); - let (comment, gh_out) = create_comment("-*", "", true).await; + let (comment, gh_out) = create_comment(true, true, false).await; assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); - assert_eq!(&gh_out, ""); + assert!(gh_out.is_empty()); + } + + #[tokio::test] + async fn fail_gh_summary() { + env::set_var("ACTIONS_STEP_DEBUG", "true"); + let (comment, gh_out) = create_comment(true, false, true).await; + assert!(comment.is_empty()); + assert_eq!( + gh_out, + "checks-failed=0\nformat-checks-failed=0\ntidy-checks-failed=0\n" + ); } #[tokio::test] diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index e059c58..eab4220 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -94,8 +94,9 @@ impl GithubApiClient { if let Err(e) = writeln!(gh_out_file, "\n{}\n", comment) { log::error!("Could not write to GITHUB_STEP_SUMMARY file: {}", e); } + } else { + log::error!("GITHUB_STEP_SUMMARY file could not be opened"); } - log::error!("GITHUB_STEP_SUMMARY file could not be opened"); } } @@ -110,10 +111,8 @@ impl GithubApiClient { // assemble a list of line numbers let mut lines: Vec = Vec::new(); for replacement in &format_advice.replacements { - if let Some(line_int) = replacement.line { - if !lines.contains(&line_int) { - lines.push(line_int); - } + if !lines.contains(&replacement.line) { + lines.push(replacement.line); } } // post annotation if any applicable lines were formatted diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index 9c4de1b..217d30b 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -10,7 +10,7 @@ use std::sync::{Arc, Mutex}; use std::time::Duration; // non-std crates -use anyhow::{anyhow, Context, Error, Result}; +use anyhow::{anyhow, Error, Result}; use chrono::DateTime; use futures::future::{BoxFuture, FutureExt}; use reqwest::header::{HeaderMap, HeaderValue}; @@ -85,7 +85,6 @@ pub trait RestApiClient { data: Option, headers: Option, ) -> Result { - let url_str = url.as_str().to_string(); let mut req = client.request(method, url); if let Some(h) = headers { req = req.headers(h); @@ -93,8 +92,9 @@ pub trait RestApiClient { if let Some(d) = data { req = req.body(d); } - req.build() - .with_context(|| format!("Failed to build request to {url_str}")) + // RequestBuilder only fails to `build()` if there is a malformed `url`. We + // should be safe here because of this function's `url` parameter type. + req.build().map_err(Error::from) } /// A convenience function to send HTTP requests and respect a REST API rate limits. @@ -574,7 +574,6 @@ mod test { } #[tokio::test] - #[ignore] #[should_panic(expected = "REST API secondary rate limit exceeded")] async fn rate_limit_secondary() { simulate_rate_limit(&RateLimitTestParams { @@ -586,7 +585,6 @@ mod test { } #[tokio::test] - #[ignore] #[should_panic(expected = "REST API secondary rate limit exceeded")] async fn rate_limit_bad_retry() { simulate_rate_limit(&RateLimitTestParams { @@ -599,7 +597,6 @@ mod test { } #[tokio::test] - #[ignore] #[should_panic(expected = "REST API rate limit exceeded!")] async fn rate_limit_primary() { simulate_rate_limit(&RateLimitTestParams { @@ -611,7 +608,6 @@ mod test { } #[tokio::test] - #[ignore] #[should_panic(expected = "REST API rate limit exceeded!")] async fn rate_limit_no_reset() { simulate_rate_limit(&RateLimitTestParams { @@ -622,7 +618,6 @@ mod test { } #[tokio::test] - #[ignore] #[should_panic(expected = "REST API rate limit exceeded!")] async fn rate_limit_bad_reset() { simulate_rate_limit(&RateLimitTestParams { @@ -635,7 +630,6 @@ mod test { } #[tokio::test] - #[ignore] async fn rate_limit_bad_count() { simulate_rate_limit(&RateLimitTestParams { has_remaining_count: true, diff --git a/justfile b/justfile index 1d0cfef..000d796 100644 --- a/justfile +++ b/justfile @@ -1,11 +1,12 @@ set windows-shell := ["powershell.exe", "-NoLogo", "-Command"] + # run the test suite [group("code coverage")] -test arg='': +test profile='default': cargo llvm-cov --no-report \ nextest --manifest-path cpp-linter/Cargo.toml \ - --lib --tests --color always {{ arg }} + --lib --tests --color always --profile {{ profile }} # Clear previous test build artifacts [group("code coverage")] From e16ff13eb39599923971a1f1049fc520b5b2cab8 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 06:34:52 -0700 Subject: [PATCH 10/21] add more tests third round of review --- cpp-linter/src/rest_api/github/mod.rs | 44 ++-- .../src/rest_api/github/specific_api.rs | 248 +++++++++--------- cpp-linter/src/rest_api/mod.rs | 8 +- cpp-linter/tests/comments.rs | 61 +++-- cpp-linter/tests/paginated_changed_files.rs | 108 ++++++-- cpp-linter/tests/reviews.rs | 78 ++++-- cspell.config.yml | 1 + 7 files changed, 324 insertions(+), 224 deletions(-) diff --git a/cpp-linter/src/rest_api/github/mod.rs b/cpp-linter/src/rest_api/github/mod.rs index bb35da9..0fa66c3 100644 --- a/cpp-linter/src/rest_api/github/mod.rs +++ b/cpp-linter/src/rest_api/github/mod.rs @@ -9,7 +9,7 @@ use std::io::Write; use std::sync::{Arc, Mutex}; // non-std crates -use anyhow::{anyhow, Context, Result}; +use anyhow::{Context, Result}; use reqwest::{ header::{HeaderMap, HeaderValue, AUTHORIZATION}, Client, Method, Url, @@ -35,7 +35,7 @@ pub struct GithubApiClient { client: Client, /// The CI run's event payload from the webhook that triggered the workflow. - pull_request: Option, + pull_request: i64, /// The name of the event that was triggered when running cpp_linter. pub event_name: String, @@ -52,6 +52,7 @@ pub struct GithubApiClient { /// The value of the `ACTIONS_STEP_DEBUG` environment variable. pub debug_enabled: bool, + /// The response header names that describe the rate limit status. rate_limit_headers: RestApiRateLimitHeaders, } @@ -116,7 +117,7 @@ impl RestApiClient for GithubApiClient { { // get diff from Github REST API let is_pr = self.event_name == "pull_request"; - let pr = self.pull_request.unwrap_or(-1).to_string(); + let pr = self.pull_request.to_string(); let sha = self.sha.clone().unwrap(); let url = self .api_url @@ -134,33 +135,26 @@ impl RestApiClient for GithubApiClient { None, Some(diff_header), )?; - match Self::send_api_request( + let response = Self::send_api_request( self.client.clone(), request, self.rate_limit_headers.to_owned(), 0, ) .await - { - Ok(response) => { - if response.status().is_success() { - Ok(parse_diff_from_buf(&response.bytes().await?, file_filter)) - } else { - let endpoint = if is_pr { - Url::parse(format!("{}/files", url.as_str()).as_str())? - } else { - url - }; - Self::log_response(response, "Failed to get full diff for event").await; - log::debug!("Trying paginated request to {}", endpoint.as_str()); - self.get_changed_files_paginated(endpoint, file_filter) - .await - } - } - Err(e) => Err(anyhow!( - "Failed to connect with GitHub server to get list of changed files." - ) - .context(e)), + .with_context(|| "Failed to get list of changed files from GitHub server.")?; + if response.status().is_success() { + Ok(parse_diff_from_buf(&response.bytes().await?, file_filter)) + } else { + let endpoint = if is_pr { + Url::parse(format!("{}/files", url.as_str()).as_str())? + } else { + url + }; + Self::log_response(response, "Failed to get full diff for event").await; + log::debug!("Trying paginated request to {}", endpoint.as_str()); + self.get_changed_files_paginated(endpoint, file_filter) + .await } } else { // get diff from libgit2 API @@ -257,7 +251,7 @@ impl RestApiClient for GithubApiClient { } if let Some(repo) = &self.repo { let is_pr = self.event_name == "pull_request"; - let pr = self.pull_request.unwrap_or(-1).to_string() + "/"; + let pr = self.pull_request.to_string() + "/"; let sha = self.sha.clone().unwrap() + "/"; let comments_url = self .api_url diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index eab4220..821e81c 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -1,14 +1,13 @@ //! This submodule implements functionality exclusively specific to Github's REST API. use std::{ - collections::HashMap, env, fs::OpenOptions, io::{Read, Write}, sync::{Arc, Mutex}, }; -use anyhow::{Context, Result}; +use anyhow::{anyhow, Context, Result}; use reqwest::{Client, Method, Url}; use crate::{ @@ -49,9 +48,9 @@ impl GithubApiClient { file_buf, ) .with_context(|| "Failed to deserialize event payload")?; - payload["number"].as_i64() + payload["number"].as_i64().unwrap_or(-1) } - _ => None, + _ => -1, } }; // GITHUB_*** env vars cannot be overwritten in CI runners on GitHub. @@ -66,18 +65,9 @@ impl GithubApiClient { pull_request, event_name, api_url, - repo: match env::var("GITHUB_REPOSITORY") { - Ok(val) => Some(val), - Err(_) => None, - }, - sha: match env::var("GITHUB_SHA") { - Ok(val) => Some(val), - Err(_) => None, - }, - debug_enabled: match env::var("ACTIONS_STEP_DEBUG") { - Ok(val) => val == "true", - Err(_) => false, - }, + repo: env::var("GITHUB_REPOSITORY").ok(), + sha: env::var("GITHUB_SHA").ok(), + debug_enabled: env::var("ACTIONS_STEP_DEBUG").is_ok_and(|val| &val == "true"), rate_limit_headers: RestApiRateLimitHeaders { reset: "x-ratelimit-reset".to_string(), remaining: "x-ratelimit-remaining".to_string(), @@ -159,8 +149,7 @@ impl GithubApiClient { .remove_bot_comments(&url, !update_only || (is_lgtm && no_lgtm)) .await?; if !is_lgtm || !no_lgtm { - let payload = HashMap::from([("body", comment.to_owned())]); - // log::debug!("payload body:\n{:?}", payload); + // log::debug!("payload body:\n{:?}", comment); let req_meth = if comment_url.is_some() { Method::PATCH } else { @@ -168,16 +157,9 @@ impl GithubApiClient { }; let request = Self::make_api_request( &self.client, - if let Some(url_) = comment_url { - url_ - } else { - url - }, + comment_url.unwrap_or(url), req_meth, - Some( - serde_json::to_string(&payload) - .with_context(|| "Failed to serialize thread comment to json string")?, - ), + Some(format!(r#"{{"body":"{comment}"}}"#)), None, )?; match Self::send_api_request( @@ -202,11 +184,7 @@ impl GithubApiClient { /// Remove thread comments previously posted by cpp-linter. async fn remove_bot_comments(&self, url: &Url, delete: bool) -> Result> { let mut comment_url = None; - let mut comments_url = Some( - Url::parse_with_params(url.clone().as_str(), &[("page", "1")]).with_context(|| { - format!("Failed to parse URL to fetch existing comments: {url}") - })?, - ); + let mut comments_url = Some(Url::parse_with_params(url.as_str(), &[("page", "1")])?); let repo = format!( "repos/{}{}/comments", // if we got here, then we know it is on a CI runner as self.repo should be known @@ -221,14 +199,18 @@ impl GithubApiClient { while let Some(ref endpoint) = comments_url { let request = Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; - match Self::send_api_request( + let result = Self::send_api_request( self.client.clone(), request, self.rate_limit_headers.to_owned(), 0, ) - .await - { + .await; + match result { + Err(e) => { + log::error!("Failed to get list of existing thread comments: {e:?}"); + return Ok(comment_url); + } Ok(response) => { if !response.status().is_success() { Self::log_response( @@ -239,26 +221,24 @@ impl GithubApiClient { return Ok(comment_url); } comments_url = Self::try_next_page(response.headers()); - let payload: Vec = serde_json::from_str(&response.text().await?) - .with_context(|| { - "Failed to deserialize list of existing thread comments" - })?; - for comment in payload { + let payload = + serde_json::from_str::>(&response.text().await?); + if let Err(e) = payload { + log::error!( + "Failed to deserialize list of existing thread comments: {e:?}" + ); + continue; + } + for comment in payload.unwrap() { if comment.body.starts_with(COMMENT_MARKER) { log::debug!( - "comment id {} from user {} ({})", + "Found cpp-linter comment id {} from user {} ({})", comment.id, comment.user.login, comment.user.id, ); let this_comment_url = - Url::parse(format!("{base_comment_url}/{}", &comment.id).as_str()) - .with_context(|| { - format!( - "Failed to parse URL for JSON comment.id: {}", - comment.id - ) - })?; + Url::parse(format!("{base_comment_url}/{}", comment.id).as_str())?; if delete || comment_url.is_some() { // if not updating: remove all outdated comments // if updating: remove all outdated comments except the last one @@ -304,10 +284,6 @@ impl GithubApiClient { } } } - Err(e) => { - log::error!("Failed to get list of existing thread comments: {e:?}"); - return Ok(comment_url); - } } } Ok(comment_url) @@ -324,35 +300,45 @@ impl GithubApiClient { let url = self .api_url .join("repos/")? - // if we got here, then we know it is on a CI runner and self.repo should be known - .join(format!("{}/", self.repo.as_ref().expect("Repo name unknown")).as_str())? - .join("pulls/")? .join( - self.pull_request - // if we got here, then we know that it is a pull_request event - .expect("pull request number unknown") - .to_string() - .as_str(), - )?; + format!( + "{}/", + // if we got here, then we know self.repo should be known + self.repo.as_ref().ok_or(anyhow!("Repo name unknown"))? + ) + .as_str(), + )? + .join("pulls/")? + // if we got here, then we know that it is a self.pull_request is a valid value + .join(self.pull_request.to_string().as_str())?; let request = Self::make_api_request(&self.client, url.as_str(), Method::GET, None, None)?; let response = Self::send_api_request( self.client.clone(), request, self.rate_limit_headers.clone(), 0, - ) - .await - .with_context(|| format!("Failed to get PR info from {}", url.as_str()))?; - let pr_info: PullRequestInfo = serde_json::from_str(&response.text().await?) - .with_context(|| "Failed to deserialize PR info")?; + ); - let url = Url::parse(format!("{}/", url.as_str()).as_str())? - .join("reviews") - .with_context(|| "Failed to parse URL endpoint for PR reviews")?; + let url = Url::parse(format!("{}/", url).as_str())?.join("reviews")?; let dismissal = self.dismiss_outdated_reviews(&url); - - if pr_info.draft || pr_info.state != "open" { - return dismissal.await; + match response.await { + Ok(response) => { + match serde_json::from_str::(&response.text().await?) { + Err(e) => { + log::error!("Failed to deserialize PR info: {e:?}"); + return dismissal.await; + } + Ok(pr_info) => { + if pr_info.draft || pr_info.state != "open" { + return dismissal.await; + } + } + } + } + Err(e) => { + log::error!("Failed to get PR info from {e:?}"); + return dismissal.await; + } } let summary_only = ["true", "on", "1"].contains( @@ -426,73 +412,76 @@ impl GithubApiClient { /// Dismiss any outdated reviews generated by cpp-linter. async fn dismiss_outdated_reviews(&self, url: &Url) -> Result<()> { - let mut url_ = Some( - Url::parse_with_params(url.as_str(), [("page", "1")]).with_context(|| { - format!( - "Failed to parse endpoint for getting existing PR reviews: {}", - url.as_str() - ) - })?, - ); + let mut url_ = Some(Url::parse_with_params(url.as_str(), [("page", "1")])?); while let Some(ref endpoint) = url_ { let request = Self::make_api_request(&self.client, endpoint.as_str(), Method::GET, None, None)?; - let response = Self::send_api_request( + let result = Self::send_api_request( self.client.clone(), request, self.rate_limit_headers.clone(), 0, ) .await; - if response.is_err() || response.as_ref().is_ok_and(|r| !r.status().is_success()) { - log::error!( - "Failed to get a list of existing PR reviews: {}", - endpoint.as_str() - ); - return Ok(()); - } - let response = response.unwrap(); - url_ = Self::try_next_page(response.headers()); - match serde_json::from_str::>(&response.text().await?) { - Ok(payload) => { - for review in payload { - if let Some(body) = &review.body { - if body.starts_with(COMMENT_MARKER) - && !(["PENDING", "DISMISSED"].contains(&review.state.as_str())) - { - // dismiss outdated review - if let Ok(dismiss_url) = - url.join(format!("reviews/{}/dismissals", review.id).as_str()) - { - if let Ok(req) = Self::make_api_request( - &self.client, - dismiss_url, - Method::PUT, - Some(REVIEW_DISMISSAL.to_string()), - None, - ) { - match Self::send_api_request( - self.client.clone(), - req, - self.rate_limit_headers.clone(), - 0, - ) - .await - { - Ok(result) => { - if !result.status().is_success() { - Self::log_response( - result, - "Failed to dismiss outdated review", - ) - .await; + match result { + Err(e) => { + log::error!("Failed to get a list of existing PR reviews: {e:?}"); + return Ok(()); + } + Ok(response) => { + if !response.status().is_success() { + Self::log_response(response, "Failed to get a list of existing PR reviews") + .await; + return Ok(()); + } + url_ = Self::try_next_page(response.headers()); + match serde_json::from_str::>(&response.text().await?) { + Err(e) => { + log::error!("Unable to serialize JSON about review comments: {e:?}"); + return Ok(()); + } + Ok(payload) => { + for review in payload { + if let Some(body) = &review.body { + if body.starts_with(COMMENT_MARKER) + && !(["PENDING", "DISMISSED"] + .contains(&review.state.as_str())) + { + // dismiss outdated review + if let Ok(dismiss_url) = url.join( + format!("reviews/{}/dismissals", review.id).as_str(), + ) { + if let Ok(req) = Self::make_api_request( + &self.client, + dismiss_url, + Method::PUT, + Some(REVIEW_DISMISSAL.to_string()), + None, + ) { + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.clone(), + 0, + ) + .await + { + Ok(result) => { + if !result.status().is_success() { + Self::log_response( + result, + "Failed to dismiss outdated review", + ) + .await; + } + } + Err(e) => { + log::error!( + "Failed to dismiss outdated review: {e:}" + ); + } } } - Err(e) => { - log::error!( - "Failed to dismiss outdated review: {e:}" - ); - } } } } @@ -500,9 +489,6 @@ impl GithubApiClient { } } } - Err(e) => { - log::error!("Unable to deserialize malformed JSON about review comments: {e:?}") - } } } Ok(()) diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index 217d30b..76a84e5 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -70,8 +70,8 @@ pub trait RestApiClient { /// let response = Self::send_api_request( /// self.client.clone(), /// request, - /// false, // false means don't panic - /// 0, // start recursion count at 0 + /// self.rest_api_headers.clone(), + /// 0, // start recursion count at 0 (max iterations is 4) /// ); /// match response.await { /// Some(res) => {/* handle response */} @@ -102,6 +102,10 @@ pub trait RestApiClient { /// This method must own all the data passed to it because asynchronous recursion is used. /// Recursion is needed when a secondary rate limit is hit. The server tells the client that /// it should back off and retry after a specified time interval. + /// + /// Note, pass `0` to the `retries` parameter, which is used to count recursive iterations. + /// This function will recur a maximum of 4 times, and this only happens when the response's + /// headers includes a retry interval. fn send_api_request( client: Client, request: Request, diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index a0ed328..ff209b5 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -42,6 +42,7 @@ struct TestParams { pub fail_get_existing_comments: bool, pub fail_dismissal: bool, pub fail_posting: bool, + pub bad_existing_comments: bool, } impl Default for TestParams { @@ -55,6 +56,7 @@ impl Default for TestParams { fail_get_existing_comments: false, fail_dismissal: false, fail_posting: false, + bad_existing_comments: false, } } } @@ -102,26 +104,30 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { ); } if test_params.event_t == EventType::Push { - mocks.push( - server - .mock( - "GET", - format!("/repos/{REPO}/commits/{SHA}/comments").as_str(), - ) - .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", format!("token {TOKEN}").as_str()) - .match_body(Matcher::Any) - .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + let mut mock = server + .mock( + "GET", + format!("/repos/{REPO}/commits/{SHA}/comments").as_str(), + ) + .match_header("Accept", "application/vnd.github.raw+json") + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .match_body(Matcher::Any) + .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + .with_header(REMAINING_RATE_LIMIT_HEADER, "50") + .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) + .with_status(if test_params.fail_get_existing_comments { + 403 + } else { + 200 + }); + if test_params.bad_existing_comments { + mock = mock.with_body(String::new()).create(); + } else { + mock = mock .with_body_from_file(format!("{asset_path}push_comments_{SHA}.json")) - .with_header(REMAINING_RATE_LIMIT_HEADER, "50") - .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) - .with_status(if test_params.fail_get_existing_comments { - 403 - } else { - 200 - }) - .create(), - ); + .create(); + } + mocks.push(mock); } else { let pr_endpoint = format!("/repos/{REPO}/issues/{PR}/comments"); for pg in ["1", "2"] { @@ -155,7 +161,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { } ); - if !test_params.fail_get_existing_comments { + if !test_params.fail_get_existing_comments && !test_params.bad_existing_comments { mocks.push( server .mock("DELETE", comment_url.as_str()) @@ -193,7 +199,10 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { ); } - if test_params.thread_comments == ThreadComments::On && lgtm_allowed { + if test_params.thread_comments == ThreadComments::On + && lgtm_allowed + && !test_params.bad_existing_comments + { mocks.push( server .mock( @@ -405,3 +414,13 @@ async fn fail_posting() { }) .await; } + +#[tokio::test] +async fn bad_existing_comments() { + test_comment(&TestParams { + bad_existing_comments: true, + force_lgtm: true, + ..Default::default() + }) + .await; +} diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index ede4843..c6349c6 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -14,24 +14,40 @@ use std::{env, io::Write, path::Path}; #[derive(PartialEq)] enum EventType { Push, - PullRequest(u64), + PullRequest, +} + +impl Default for EventType { + fn default() -> Self { + Self::Push + } +} + +#[derive(Default)] +struct TestParams { + event_t: EventType, + fail_serde_diff: bool, + fail_serde_event_payload: bool, + no_event_payload: bool, } const REPO: &str = "cpp-linter/test-cpp-linter-action"; const SHA: &str = "DEADBEEF"; +const PR: u8 = 42; const TOKEN: &str = "123456"; +const EVENT_PAYLOAD: &str = r#"{"number": 42}"#; const RESET_RATE_LIMIT_HEADER: &str = "x-ratelimit-reset"; const REMAINING_RATE_LIMIT_HEADER: &str = "x-ratelimit-remaining"; const MALFORMED_RESPONSE_PAYLOAD: &str = "{\"message\":\"Resource not accessible by integration\"}"; -async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_serialization: bool) { +async fn get_paginated_changes(lib_root: &Path, test_params: &TestParams) { env::set_var("GITHUB_REPOSITORY", REPO); env::set_var("GITHUB_SHA", SHA); env::set_var("GITHUB_TOKEN", TOKEN); env::set_var("CI", "true"); env::set_var( "GITHUB_EVENT_NAME", - if event_type == EventType::Push { + if test_params.event_t == EventType::Push { "push" } else { "pull_request" @@ -40,14 +56,20 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_seri let tmp = TempDir::new().expect("Failed to create a temp dir for test"); let mut event_payload = NamedTempFile::new_in(tmp.path()) .expect("Failed to spawn a tmp file for test event payload"); - env::set_var("GITHUB_EVENT_PATH", event_payload.path()); - if let EventType::PullRequest(pr_number) = event_type { + env::set_var( + "GITHUB_EVENT_PATH", + if test_params.no_event_payload { + Path::new("no a file.txt") + } else { + event_payload.path() + }, + ); + if EventType::PullRequest == test_params.event_t + && !test_params.fail_serde_event_payload + && !test_params.no_event_payload + { event_payload - .write_all( - serde_json::json!({"number": pr_number}) - .to_string() - .as_bytes(), - ) + .write_all(EVENT_PAYLOAD.as_bytes()) .expect("Failed to write data to test event payload file") } @@ -57,15 +79,20 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_seri let mut server = mock_server().await; env::set_var("GITHUB_API_URL", server.url()); env::set_current_dir(tmp.path()).unwrap(); - let gh_client = GithubApiClient::new().unwrap(); logger::init().unwrap(); log::set_max_level(log::LevelFilter::Debug); + let gh_client = GithubApiClient::new(); + if test_params.fail_serde_event_payload || test_params.no_event_payload { + assert!(gh_client.is_err()); + return; + } + let client = gh_client.unwrap(); let mut mocks = vec![]; let diff_end_point = format!( "/repos/{REPO}/{}", - if let EventType::PullRequest(pr) = event_type { - format!("pulls/{pr}") + if EventType::PullRequest == test_params.event_t { + format!("pulls/{PR}") } else { format!("commits/{SHA}") } @@ -80,12 +107,12 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_seri .with_status(403) .create(), ); - let pg_end_point = if event_type == EventType::Push { + let pg_end_point = if test_params.event_t == EventType::Push { diff_end_point.clone() } else { format!("{diff_end_point}/files") }; - let pg_count = if fail_serialization { 1 } else { 2 }; + let pg_count = if test_params.fail_serde_diff { 1 } else { 2 }; for pg in 1..=pg_count { let link = if pg == 1 { format!("<{}{pg_end_point}?page=2>; rel=\"next\"", server.url()) @@ -100,12 +127,12 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_seri .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .with_header("link", link.as_str()); - if fail_serialization { + if test_params.fail_serde_diff { mock = mock.with_body(MALFORMED_RESPONSE_PAYLOAD); } else { mock = mock.with_body_from_file(format!( "{asset_path}/{}_files_pg{pg}.json", - if event_type == EventType::Push { + if test_params.event_t == EventType::Push { "push" } else { "pull_request" @@ -116,7 +143,7 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_seri } let file_filter = FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]); - let files = gh_client.get_list_of_changed_files(&file_filter).await; + let files = client.get_list_of_changed_files(&file_filter).await; if let Ok(files) = files { // if !fail_serialization assert_eq!(files.len(), 2); @@ -135,31 +162,64 @@ async fn get_paginated_changes(lib_root: &Path, event_type: EventType, fail_seri } } -async fn test_get_changes(event_type: EventType, fail_serialization: bool) { +async fn test_get_changes(test_params: &TestParams) { let tmp_dir = create_test_space(false); let lib_root = env::current_dir().unwrap(); env::set_current_dir(tmp_dir.path()).unwrap(); - get_paginated_changes(&lib_root, event_type, fail_serialization).await; + get_paginated_changes(&lib_root, test_params).await; env::set_current_dir(lib_root.as_path()).unwrap(); drop(tmp_dir); } #[tokio::test] async fn get_push_files_paginated() { - test_get_changes(EventType::Push, false).await + test_get_changes(&TestParams::default()).await } #[tokio::test] async fn get_pr_files_paginated() { - test_get_changes(EventType::PullRequest(42), false).await + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + ..Default::default() + }) + .await } #[tokio::test] async fn fail_push_files_paginated() { - test_get_changes(EventType::Push, true).await + test_get_changes(&TestParams { + fail_serde_diff: true, + ..Default::default() + }) + .await } #[tokio::test] async fn fail_pr_files_paginated() { - test_get_changes(EventType::PullRequest(42), true).await + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + fail_serde_diff: true, + ..Default::default() + }) + .await +} + +#[tokio::test] +async fn fail_event_payload() { + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + fail_serde_event_payload: true, + ..Default::default() + }) + .await +} + +#[tokio::test] +async fn no_event_payload() { + test_get_changes(&TestParams { + event_t: EventType::PullRequest, + no_event_payload: true, + ..Default::default() + }) + .await } diff --git a/cpp-linter/tests/reviews.rs b/cpp-linter/tests/reviews.rs index 4779378..0c673d6 100644 --- a/cpp-linter/tests/reviews.rs +++ b/cpp-linter/tests/reviews.rs @@ -35,6 +35,8 @@ struct TestParams { pub fail_dismissal: bool, pub fail_get_existing_reviews: bool, pub fail_posting: bool, + pub bad_pr_info: bool, + pub bad_existing_reviews: bool, } impl Default for TestParams { @@ -52,6 +54,8 @@ impl Default for TestParams { fail_dismissal: false, fail_get_existing_reviews: false, fail_posting: false, + bad_pr_info: false, + bad_existing_reviews: false, } } } @@ -95,9 +99,11 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { .mock("GET", pr_endpoint.as_str()) .match_header("Accept", "application/vnd.github.raw+json") .match_header("Authorization", format!("token {TOKEN}").as_str()) - .with_body( - json!({"state": test_params.pr_state, "draft": test_params.pr_draft}).to_string(), - ) + .with_body(if test_params.bad_pr_info { + String::new() + } else { + json!({"state": test_params.pr_state, "draft": test_params.pr_draft}).to_string() + }) .with_header(REMAINING_RATE_LIMIT_HEADER, "50") .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) .create(), @@ -105,24 +111,28 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { let reviews_endpoint = format!("/repos/{REPO}/pulls/{PR}/reviews"); - mocks.push( - server - .mock("GET", reviews_endpoint.as_str()) - .match_header("Accept", "application/vnd.github.raw+json") - .match_header("Authorization", format!("token {TOKEN}").as_str()) - .match_body(Matcher::Any) - .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + let mut mock = server + .mock("GET", reviews_endpoint.as_str()) + .match_header("Accept", "application/vnd.github.raw+json") + .match_header("Authorization", format!("token {TOKEN}").as_str()) + .match_body(Matcher::Any) + .match_query(Matcher::UrlEncoded("page".to_string(), "1".to_string())) + .with_header(REMAINING_RATE_LIMIT_HEADER, "50") + .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) + .with_status(if test_params.fail_get_existing_reviews { + 403 + } else { + 200 + }); + if test_params.bad_existing_reviews { + mock = mock.with_body(String::new()).create(); + } else { + mock = mock .with_body_from_file(format!("{asset_path}pr_reviews.json")) - .with_header(REMAINING_RATE_LIMIT_HEADER, "50") - .with_header(RESET_RATE_LIMIT_HEADER, reset_timestamp.as_str()) - .with_status(if test_params.fail_get_existing_reviews { - 403 - } else { - 200 - }) - .create(), - ); - if !test_params.fail_get_existing_reviews { + .create() + } + mocks.push(mock); + if !test_params.fail_get_existing_reviews && !test_params.bad_existing_reviews { mocks.push( server .mock( @@ -139,7 +149,11 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { } let lgtm_allowed = !test_params.force_lgtm || !test_params.no_lgtm; - if lgtm_allowed && !test_params.pr_draft && test_params.pr_state == "open" { + if lgtm_allowed + && !test_params.pr_draft + && test_params.pr_state == "open" + && !test_params.bad_pr_info + { let review_reaction = if test_params.passive_reviews { "COMMENT" } else if test_params.force_lgtm { @@ -346,3 +360,25 @@ async fn fail_get_existing_reviews() { }) .await; } + +#[tokio::test] +async fn bad_existing_reviews() { + test_review(&TestParams { + lines_changed_only: LinesChangedOnly::Off, + force_lgtm: true, + bad_existing_reviews: true, + ..Default::default() + }) + .await; +} + +#[tokio::test] +async fn bad_pr_info() { + test_review(&TestParams { + lines_changed_only: LinesChangedOnly::Off, + force_lgtm: true, + bad_pr_info: true, + ..Default::default() + }) + .await; +} diff --git a/cspell.config.yml b/cspell.config.yml index 16da1ff..800dd06 100644 --- a/cspell.config.yml +++ b/cspell.config.yml @@ -20,6 +20,7 @@ words: - Falsey - gitmodules - iomanip + - libc - libgit - markdownlint - maturin From f765aa65a5847b1ff8cd5a2157c40eeec346fd7b Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 07:17:30 -0700 Subject: [PATCH 11/21] revert comment serialization tweak --- cpp-linter/src/rest_api/github/specific_api.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 821e81c..37fc750 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -1,6 +1,7 @@ //! This submodule implements functionality exclusively specific to Github's REST API. use std::{ + collections::HashMap, env, fs::OpenOptions, io::{Read, Write}, @@ -149,7 +150,8 @@ impl GithubApiClient { .remove_bot_comments(&url, !update_only || (is_lgtm && no_lgtm)) .await?; if !is_lgtm || !no_lgtm { - // log::debug!("payload body:\n{:?}", comment); + let payload = HashMap::from([("body", comment)]); + // log::debug!("payload body:\n{:?}", payload); let req_meth = if comment_url.is_some() { Method::PATCH } else { @@ -159,7 +161,7 @@ impl GithubApiClient { &self.client, comment_url.unwrap_or(url), req_meth, - Some(format!(r#"{{"body":"{comment}"}}"#)), + Some(serde_json::json!(&payload).to_string()), None, )?; match Self::send_api_request( From 056d54f51916e2ee28ae20fb476d16dd81db51a4 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 11:14:18 -0700 Subject: [PATCH 12/21] upload coverage reports from windows runners to codecov.io --- .github/workflows/run-dev-tests.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/run-dev-tests.yml b/.github/workflows/run-dev-tests.yml index 6c6ed06..5c06a0a 100644 --- a/.github/workflows/run-dev-tests.yml +++ b/.github/workflows/run-dev-tests.yml @@ -212,13 +212,11 @@ jobs: path: target/llvm-cov-pretty - name: Generate Coverage lcov report - if: runner.os == 'Linux' run: | rm coverage.json just lcov - uses: codecov/codecov-action@v4 - if: runner.os == 'Linux' with: token: ${{secrets.CODECOV_TOKEN}} files: lcov.info From 2bfb8c4a712c7726f313276bf0efa1cd89b3a0c2 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 11:20:34 -0700 Subject: [PATCH 13/21] adjust errors emitted by clang-tidy --- cpp-linter/src/clang_tools/mod.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/cpp-linter/src/clang_tools/mod.rs b/cpp-linter/src/clang_tools/mod.rs index 0cf7fdd..68063ae 100644 --- a/cpp-linter/src/clang_tools/mod.rs +++ b/cpp-linter/src/clang_tools/mod.rs @@ -93,7 +93,7 @@ fn analyze_single_file( ) -> Result<(PathBuf, Vec<(log::Level, String)>)> { let mut file = file .lock() - .map_err(|_| anyhow!("File mutex is already locked!"))?; + .map_err(|_| anyhow!("Failed to lock file mutex"))?; let mut logs = vec![]; if clang_params.clang_tidy_command.is_some() { if clang_params @@ -174,11 +174,9 @@ pub async fn capture_clang_tools_output( if let Some(db_path) = &clang_params.database { if let Ok(db_str) = fs::read(db_path.join("compile_commands.json")) { clang_params.database_json = Some( - serde_json::from_str::>( - // A compilation database should be UTF-8 encoded. Its safe to `unwrap()` - String::from_utf8(db_str).unwrap().as_str(), - ) - .with_context(|| "Failed to parse compile_commands.json")?, + // A compilation database should be UTF-8 encoded, but file paths are not; use lossy conversion. + serde_json::from_str::>(&String::from_utf8_lossy(&db_str)) + .with_context(|| "Failed to parse compile_commands.json")?, ) } }; From da725189d14e77d90c44ef2b842b5b51fef7f37a Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 11:39:57 -0700 Subject: [PATCH 14/21] switch to match block instead of if block --- .../src/rest_api/github/specific_api.rs | 109 ++++++++++-------- 1 file changed, 58 insertions(+), 51 deletions(-) diff --git a/cpp-linter/src/rest_api/github/specific_api.rs b/cpp-linter/src/rest_api/github/specific_api.rs index 37fc750..0ecb25a 100644 --- a/cpp-linter/src/rest_api/github/specific_api.rs +++ b/cpp-linter/src/rest_api/github/specific_api.rs @@ -225,64 +225,71 @@ impl GithubApiClient { comments_url = Self::try_next_page(response.headers()); let payload = serde_json::from_str::>(&response.text().await?); - if let Err(e) = payload { - log::error!( - "Failed to deserialize list of existing thread comments: {e:?}" - ); - continue; - } - for comment in payload.unwrap() { - if comment.body.starts_with(COMMENT_MARKER) { - log::debug!( - "Found cpp-linter comment id {} from user {} ({})", - comment.id, - comment.user.login, - comment.user.id, + match payload { + Err(e) => { + log::error!( + "Failed to deserialize list of existing thread comments: {e:?}" ); - let this_comment_url = - Url::parse(format!("{base_comment_url}/{}", comment.id).as_str())?; - if delete || comment_url.is_some() { - // if not updating: remove all outdated comments - // if updating: remove all outdated comments except the last one + continue; + } + Ok(payload) => { + for comment in payload { + if comment.body.starts_with(COMMENT_MARKER) { + log::debug!( + "Found cpp-linter comment id {} from user {} ({})", + comment.id, + comment.user.login, + comment.user.id, + ); + let this_comment_url = Url::parse( + format!("{base_comment_url}/{}", comment.id).as_str(), + )?; + if delete || comment_url.is_some() { + // if not updating: remove all outdated comments + // if updating: remove all outdated comments except the last one - // use last saved comment_url (if not None) or current comment url - let del_url = if let Some(last_url) = &comment_url { - last_url - } else { - &this_comment_url - }; - let req = Self::make_api_request( - &self.client, - del_url.as_str(), - Method::DELETE, - None, - None, - )?; - match Self::send_api_request( - self.client.clone(), - req, - self.rate_limit_headers.to_owned(), - 0, - ) - .await - { - Ok(result) => { - if !result.status().is_success() { - Self::log_response( - result, - "Failed to delete old thread comment", - ) - .await; + // use last saved comment_url (if not None) or current comment url + let del_url = if let Some(last_url) = &comment_url { + last_url + } else { + &this_comment_url + }; + let req = Self::make_api_request( + &self.client, + del_url.as_str(), + Method::DELETE, + None, + None, + )?; + match Self::send_api_request( + self.client.clone(), + req, + self.rate_limit_headers.to_owned(), + 0, + ) + .await + { + Ok(result) => { + if !result.status().is_success() { + Self::log_response( + result, + "Failed to delete old thread comment", + ) + .await; + } + } + Err(e) => { + log::error!( + "Failed to delete old thread comment: {e:?}" + ) + } } } - Err(e) => { - log::error!("Failed to delete old thread comment: {e:?}") + if !delete { + comment_url = Some(this_comment_url) } } } - if !delete { - comment_url = Some(this_comment_url) - } } } } From a2253d79d504c7266effb1968bdc0cffbe9640fb Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 11:49:20 -0700 Subject: [PATCH 15/21] iteration instead of recursion --- cpp-linter/src/rest_api/mod.rs | 124 +++++++++++++++++---------------- 1 file changed, 63 insertions(+), 61 deletions(-) diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index 76a84e5..a1e3a34 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -12,7 +12,6 @@ use std::time::Duration; // non-std crates use anyhow::{anyhow, Error, Result}; use chrono::DateTime; -use futures::future::{BoxFuture, FutureExt}; use reqwest::header::{HeaderMap, HeaderValue}; use reqwest::{Client, IntoUrl, Method, Request, Response, Url}; @@ -111,85 +110,88 @@ pub trait RestApiClient { request: Request, rate_limit_headers: RestApiRateLimitHeaders, retries: u64, - ) -> BoxFuture<'static, Result> { + ) -> impl Future> + Send { async move { - let result = client - .execute(request.try_clone().ok_or(anyhow!( - "Failed to clone request object for recursive behavior" - ))?) - .await; - if let Ok(response) = &result { - if [403u16, 429u16].contains(&response.status().as_u16()) { - // rate limit exceeded - - // check if primary rate limit was violated; panic if so. - let mut requests_remaining = None; - if let Some(remaining) = response.headers().get(&rate_limit_headers.remaining) { - if let Ok(count) = remaining.to_str() { - if let Ok(value) = count.parse::() { - requests_remaining = Some(value); - } else { - log::debug!( + for i in retries..5 { + let result = client + .execute(request.try_clone().ok_or(anyhow!( + "Failed to clone request object for recursive behavior" + ))?) + .await; + if let Ok(response) = &result { + if [403u16, 429u16].contains(&response.status().as_u16()) { + // rate limit may have been exceeded + + // check if primary rate limit was violated; panic if so. + let mut requests_remaining = None; + if let Some(remaining) = + response.headers().get(&rate_limit_headers.remaining) + { + if let Ok(count) = remaining.to_str() { + if let Ok(value) = count.parse::() { + requests_remaining = Some(value); + } else { + log::debug!( "Failed to parse i64 from remaining attempts about rate limit: {count}" ); + } } + } else { + // NOTE: I guess it is sometimes valid for a request to + // not include remaining rate limit attempts + log::debug!( + "Response headers do not include remaining API usage count" + ); } - } else { - // NOTE: I guess it is sometimes valid for a request to - // not include remaining rate limit attempts - log::debug!("Response headers do not include remaining API usage count"); - } - if requests_remaining.is_some_and(|v| v <= 0) { - if let Some(reset_value) = response.headers().get(&rate_limit_headers.reset) - { - if let Ok(epoch) = reset_value.to_str() { - if let Ok(value) = epoch.parse::() { - if let Some(reset) = DateTime::from_timestamp(value, 0) { - return Err(anyhow!( - "REST API rate limit exceeded! Resets at {}", - reset - )); - } - } else { - log::debug!( + if requests_remaining.is_some_and(|v| v <= 0) { + if let Some(reset_value) = + response.headers().get(&rate_limit_headers.reset) + { + if let Ok(epoch) = reset_value.to_str() { + if let Ok(value) = epoch.parse::() { + if let Some(reset) = DateTime::from_timestamp(value, 0) { + return Err(anyhow!( + "REST API rate limit exceeded! Resets at {}", + reset + )); + } + } else { + log::debug!( "Failed to parse i64 from reset time about rate limit: {epoch}" ); + } } + } else { + log::debug!("Response headers does not include a reset timestamp"); } - } else { - log::debug!("Response headers does not include a reset timestamp"); + return Err(anyhow!("REST API rate limit exceeded!")); } - return Err(anyhow!("REST API rate limit exceeded!")); - } - // check if secondary rate limit is violated; backoff and try again. - if retries > 4 { - return Err(anyhow!("REST API secondary rate limit exceeded")); - } - if let Some(retry_value) = response.headers().get(&rate_limit_headers.retry) { - if let Ok(retry_str) = retry_value.to_str() { - if let Ok(retry) = retry_str.parse::() { - let interval = Duration::from_secs(retry + retries.pow(2)); - tokio::time::sleep(interval).await; - } else { - log::debug!( + // check if secondary rate limit is violated; backoff and try again. + if i >= 4 { + break; + } + if let Some(retry_value) = response.headers().get(&rate_limit_headers.retry) + { + if let Ok(retry_str) = retry_value.to_str() { + if let Ok(retry) = retry_str.parse::() { + let interval = Duration::from_secs(retry + i.pow(2)); + tokio::time::sleep(interval).await; + } else { + log::debug!( "Failed to parse u64 from retry interval about rate limit: {retry_str}" ); + } } + continue; } - return Self::send_api_request( - client, - request, - rate_limit_headers, - retries + 1, - ) - .await; } + return result.map_err(Error::from); } + return result.map_err(Error::from); } - result.map_err(Error::from) + Err(anyhow!("REST API secondary rate limit exceeded")) } - .boxed() } /// A way to get the list of changed files using REST API calls. It is this method's From cb0bd4c25b0d1d4eff756c5520dee68f96c02eaf Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 12:00:13 -0700 Subject: [PATCH 16/21] make main return unit, not u8 --- cpp-linter/src/main.rs | 8 +++----- cpp-linter/src/run.rs | 26 +++++++++++++++----------- cpp-linter/tests/comments.rs | 2 +- cpp-linter/tests/reviews.rs | 2 +- node-binding/src/lib.rs | 2 +- py-binding/src/lib.rs | 2 +- 6 files changed, 22 insertions(+), 20 deletions(-) diff --git a/cpp-linter/src/main.rs b/cpp-linter/src/main.rs index fcc4012..27c079a 100644 --- a/cpp-linter/src/main.rs +++ b/cpp-linter/src/main.rs @@ -1,14 +1,12 @@ #![cfg(not(test))] /// This crate is the binary executable's entrypoint. -use std::{env, process::ExitCode}; +use std::env; use ::cpp_linter::run::run_main; use anyhow::Result; /// This function simply forwards CLI args to [`run_main()`]. #[tokio::main] -pub async fn main() -> Result { - Ok(ExitCode::from( - run_main(env::args().collect::>()).await?, - )) +pub async fn main() -> Result<()> { + run_main(env::args().collect::>()).await } diff --git a/cpp-linter/src/run.rs b/cpp-linter/src/run.rs index c16b7e8..42cc050 100644 --- a/cpp-linter/src/run.rs +++ b/cpp-linter/src/run.rs @@ -8,7 +8,7 @@ use std::path::Path; use std::sync::{Arc, Mutex}; // non-std crates -use anyhow::Result; +use anyhow::{anyhow, Result}; use log::{set_max_level, LevelFilter}; #[cfg(feature = "openssl-vendored")] use openssl_probe; @@ -41,7 +41,7 @@ fn probe_ssl_certs() { /// is used instead of python's `sys.argv`, then the list of strings includes the entry point /// alias ("path/to/cpp-linter.exe"). Thus, the parser in [`crate::cli`] will halt on an error /// because it is not configured to handle positional arguments. -pub async fn run_main(args: Vec) -> Result { +pub async fn run_main(args: Vec) -> Result<()> { probe_ssl_certs(); let arg_parser = get_arg_parser(); @@ -50,7 +50,7 @@ pub async fn run_main(args: Vec) -> Result { if args.subcommand_matches("version").is_some() { println!("cpp-linter v{}", VERSION); - return Ok(0); + return Ok(()); } logger::init().unwrap(); @@ -58,7 +58,7 @@ pub async fn run_main(args: Vec) -> Result { if cli.version == "NO-VERSION" { log::error!("The `--version` arg is used to specify which version of clang to use."); log::error!("To get the cpp-linter version, use `cpp-linter version` sub-command."); - return Ok(1); + return Err(anyhow!("Clang version not specified.")); } if cli.repo_root != "." { @@ -135,9 +135,13 @@ pub async fn run_main(args: Vec) -> Result { .await?; end_log_group(); if env::var("PRE_COMMIT").is_ok_and(|v| v == "1") { - return Ok((checks_failed > 1) as u8); + if checks_failed > 1 { + return Err(anyhow!("Some checks did not pass")); + } else { + return Ok(()); + } } - Ok(0) + Ok(()) } #[cfg(test)] @@ -157,14 +161,14 @@ mod test { "demo/demo.cpp".to_string(), ]) .await; - assert!(result.is_ok_and(|v| v == 0)); + assert!(result.is_ok()); } #[tokio::test] async fn version_command() { env::remove_var("GITHUB_OUTPUT"); // avoid writing to GH_OUT in parallel-running tests let result = run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await; - assert!(result.is_ok_and(|v| v == 0)); + assert!(result.is_ok()); } #[tokio::test] @@ -178,7 +182,7 @@ mod test { "debug".to_string(), ]) .await; - assert!(result.is_ok_and(|v| v == 0)); + assert!(result.is_ok()); } #[tokio::test] @@ -191,7 +195,7 @@ mod test { "-V".to_string(), ]) .await; - assert!(result.is_ok_and(|v| v == 1)); + assert!(result.is_err()); } #[tokio::test] @@ -204,6 +208,6 @@ mod test { "false".to_string(), ]) .await; - assert!(result.is_ok_and(|v| v == 1)); + assert!(result.is_err()); } } diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index ff209b5..eb035c5 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -242,7 +242,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { args.push("-e=c".to_string()); } let result = run_main(args).await; - assert!(result.is_ok_and(|v| v == 0)); + assert!(result.is_ok()); for mock in mocks { mock.assert(); } diff --git a/cpp-linter/tests/reviews.rs b/cpp-linter/tests/reviews.rs index 0c673d6..a949f6c 100644 --- a/cpp-linter/tests/reviews.rs +++ b/cpp-linter/tests/reviews.rs @@ -222,7 +222,7 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { args.push("-e=c".to_string()); } let result = run_main(args).await; - assert!(result.is_ok_and(|v| v == 0)); + assert!(result.is_ok()); for mock in mocks { mock.assert(); } diff --git a/node-binding/src/lib.rs b/node-binding/src/lib.rs index 51832ea..b6bd5ca 100644 --- a/node-binding/src/lib.rs +++ b/node-binding/src/lib.rs @@ -4,7 +4,7 @@ use ::cpp_linter::run::run_main; use napi::bindgen_prelude::*; #[napi] -pub async fn main(args: Vec) -> Result { +pub async fn main(args: Vec) -> Result<()> { run_main(args) .await .map_err(|e| Error::new(Status::GenericFailure, e.to_string())) diff --git a/py-binding/src/lib.rs b/py-binding/src/lib.rs index 479345f..a3538b0 100644 --- a/py-binding/src/lib.rs +++ b/py-binding/src/lib.rs @@ -5,7 +5,7 @@ use ::cpp_linter::run::run_main; /// A wrapper for the ``::cpp_linter::run::run_main()``` #[pyfunction] -fn main(args: Vec) -> PyResult { +fn main(args: Vec) -> PyResult<()> { Builder::new_multi_thread() .enable_all() .build() From ff8b60cc5b1031ec6b2ae91eb7e1514b62d2927f Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 12:02:22 -0700 Subject: [PATCH 17/21] factor out repeated function call --- cpp-linter/tests/comments.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/cpp-linter/tests/comments.rs b/cpp-linter/tests/comments.rs index eb035c5..99cc46c 100644 --- a/cpp-linter/tests/comments.rs +++ b/cpp-linter/tests/comments.rs @@ -121,12 +121,11 @@ async fn setup(lib_root: &Path, test_params: &TestParams) { 200 }); if test_params.bad_existing_comments { - mock = mock.with_body(String::new()).create(); + mock = mock.with_body(String::new()); } else { - mock = mock - .with_body_from_file(format!("{asset_path}push_comments_{SHA}.json")) - .create(); + mock = mock.with_body_from_file(format!("{asset_path}push_comments_{SHA}.json")); } + mock = mock.create(); mocks.push(mock); } else { let pr_endpoint = format!("/repos/{REPO}/issues/{PR}/comments"); From 61a9073fe844b291987f232e960fc6de22f9a639 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 12:04:02 -0700 Subject: [PATCH 18/21] derive default rather than impl default --- cpp-linter/tests/paginated_changed_files.rs | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index c6349c6..81775a5 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -11,18 +11,13 @@ use cpp_linter::{ }; use std::{env, io::Write, path::Path}; -#[derive(PartialEq)] +#[derive(PartialEq, Default)] enum EventType { + #[default] Push, PullRequest, } -impl Default for EventType { - fn default() -> Self { - Self::Push - } -} - #[derive(Default)] struct TestParams { event_t: EventType, From c281281484a3b0bbbd20ddc3928e5e424e2a63b0 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 12:12:58 -0700 Subject: [PATCH 19/21] make test panic if unexpected error occurred --- cpp-linter/tests/paginated_changed_files.rs | 28 +++++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/cpp-linter/tests/paginated_changed_files.rs b/cpp-linter/tests/paginated_changed_files.rs index 81775a5..682e170 100644 --- a/cpp-linter/tests/paginated_changed_files.rs +++ b/cpp-linter/tests/paginated_changed_files.rs @@ -139,17 +139,23 @@ async fn get_paginated_changes(lib_root: &Path, test_params: &TestParams) { let file_filter = FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]); let files = client.get_list_of_changed_files(&file_filter).await; - if let Ok(files) = files { - // if !fail_serialization - assert_eq!(files.len(), 2); - for file in files { - assert!(["src/demo.cpp", "src/demo.hpp"].contains( - &file - .name - .as_path() - .to_str() - .expect("Failed to get file name from path") - )); + match files { + Err(e) => { + if !test_params.fail_serde_diff { + panic!("Failed to get changed files: {e:?}"); + } + } + Ok(files) => { + assert_eq!(files.len(), 2); + for file in files { + assert!(["src/demo.cpp", "src/demo.hpp"].contains( + &file + .name + .as_path() + .to_str() + .expect("Failed to get file name from path") + )); + } } } for mock in mocks { From 98b0a9fb894a9ee2b4dd322514ad83347af3f0d2 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 12:31:27 -0700 Subject: [PATCH 20/21] add anyhow to node-binding deps --- Cargo.lock | 1 + node-binding/Cargo.toml | 1 + 2 files changed, 2 insertions(+) diff --git a/Cargo.lock b/Cargo.lock index 731f5c9..2e66778 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -360,6 +360,7 @@ dependencies = [ name = "cpp-linter-js" version = "2.0.0-rc4" dependencies = [ + "anyhow", "cpp-linter", "napi", "napi-build", diff --git a/node-binding/Cargo.toml b/node-binding/Cargo.toml index d52fa46..bad3332 100644 --- a/node-binding/Cargo.toml +++ b/node-binding/Cargo.toml @@ -19,6 +19,7 @@ crate-type = ["cdylib"] napi = { version = "2.12.2", default-features = false, features = ["napi4", "async"] } napi-derive = "2.12.2" cpp-linter = { path = "../cpp-linter" } +anyhow = "1.0.89" [features] openssl-vendored = ["cpp-linter/openssl-vendored"] From f28d22b14e328bdee0d97653de755a1fc8c3d174 Mon Sep 17 00:00:00 2001 From: Brendan <2bndy5@gmail.com> Date: Sat, 28 Sep 2024 12:34:09 -0700 Subject: [PATCH 21/21] improve error message for secondary rate violation --- cpp-linter/src/rest_api/mod.rs | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/cpp-linter/src/rest_api/mod.rs b/cpp-linter/src/rest_api/mod.rs index a1e3a34..4c9da48 100644 --- a/cpp-linter/src/rest_api/mod.rs +++ b/cpp-linter/src/rest_api/mod.rs @@ -109,10 +109,11 @@ pub trait RestApiClient { client: Client, request: Request, rate_limit_headers: RestApiRateLimitHeaders, - retries: u64, + retries: u8, ) -> impl Future> + Send { async move { - for i in retries..5 { + static MAX_RETRIES: u8 = 5; + for i in retries..MAX_RETRIES { let result = client .execute(request.try_clone().ok_or(anyhow!( "Failed to clone request object for recursive behavior" @@ -168,14 +169,11 @@ pub trait RestApiClient { } // check if secondary rate limit is violated; backoff and try again. - if i >= 4 { - break; - } if let Some(retry_value) = response.headers().get(&rate_limit_headers.retry) { if let Ok(retry_str) = retry_value.to_str() { if let Ok(retry) = retry_str.parse::() { - let interval = Duration::from_secs(retry + i.pow(2)); + let interval = Duration::from_secs(retry + (i as u64).pow(2)); tokio::time::sleep(interval).await; } else { log::debug!( @@ -190,7 +188,9 @@ pub trait RestApiClient { } return result.map_err(Error::from); } - Err(anyhow!("REST API secondary rate limit exceeded")) + Err(anyhow!( + "REST API secondary rate limit exceeded after {MAX_RETRIES} retries." + )) } }