diff --git a/.gitattributes b/.gitattributes index 62db1a1..ef12704 100644 --- a/.gitattributes +++ b/.gitattributes @@ -5,7 +5,23 @@ # to native line endings on checkout. *.py text eol=lf *.rst text eol=lf +*.css text eol=lf +*.md text eol=lf +*.rs text eol=lf +*.yaml text eol=lf +*.toml text eol=lf +*.lock text eol=lf +*.yml text eol=lf *.sh text eol=lf *.cpp text eol=lf *.hpp text eol=lf *.patch text eol=lf +*.diff text eol=lf +.gitattributes text eol=lf +.gitmodules text eol=lf +.gitignore text eol=lf +*.txt text eol=lf +*.json text eol=lf +*.code-workspace text eol=lf +*.clang-tidy text eol=lf +*.clang-format text eol=lf diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b34d095 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,35 @@ +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for all configuration options: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates + +version: 2 +updates: + - package-ecosystem: github-actions + directory: / + schedule: + interval: "weekly" + groups: + actions: + patterns: + - "*" + - package-ecosystem: pip + directory: cpp-linter-py/ + schedule: + interval: "daily" + groups: + pip: + patterns: + - "*" + - package-ecosystem: cargo + directories: + - cpp-linter-lib/ + - cpp-linter-py/ + schedule: + interval: "daily" + ignore: + - dependency-name: cpp-linter-lib + groups: + cargo: + patterns: + - "*" diff --git a/.github/workflows/binary-builds.yml b/.github/workflows/binary-builds.yml index 4da65d8..c6c038f 100644 --- a/.github/workflows/binary-builds.yml +++ b/.github/workflows/binary-builds.yml @@ -34,118 +34,110 @@ jobs: include: - target: aarch64-unknown-linux-gnu os: ubuntu-latest - ## I GIVE UP! For this target, OpenSSL needs to be cross compiled - ## which is driven by openssl-sys crate's custom build script... - ## Linux users with aarch64 (aka ARM64) using musl C lib can go fish (or build from source). - # - target: aarch64-unknown-linux-musl - # os: ubuntu-latest + vendered: true + cross: true + - target: aarch64-unknown-linux-musl + os: ubuntu-latest + vendered: true + cross: true - target: x86_64-unknown-linux-gnu os: ubuntu-latest + vendered: false + cross: false - target: x86_64-unknown-linux-musl os: ubuntu-latest + vendered: true + cross: true + - target: arm-unknown-linux-gnueabi + os: ubuntu-latest + vendered: true + cross: true + - target: arm-unknown-linux-gnueabihf + os: ubuntu-latest + vendered: true + cross: true + - target: armv7-unknown-linux-gnueabihf + os: ubuntu-latest + vendered: true + cross: true + - target: powerpc-unknown-linux-gnu + os: ubuntu-latest + vendered: true + cross: true + - target: powerpc64-unknown-linux-gnu + os: ubuntu-latest + vendered: true + cross: true + - target: powerpc64le-unknown-linux-gnu + os: ubuntu-latest + vendered: true + cross: true + - target: s390x-unknown-linux-gnu + os: ubuntu-latest + vendered: true + cross: true - target: aarch64-apple-darwin os: macos-latest + vendered: true + cross: false - target: x86_64-apple-darwin os: macos-latest + vendered: true + cross: false - target: x86_64-pc-windows-msvc os: windows-latest + vendered: false + cross: false + - target: aarch64-pc-windows-msvc + os: windows-latest + vendered: false + cross: false runs-on: ${{ matrix.os }} permissions: contents: write steps: - - name: Calculate Release Version - id: calc-version - run: | - if [ "${{ github.event_name }}" = "pull_request" ]; then - short_sha=$(echo "${{ github.sha }}" | awk '{print substr($0,0,5)}') - echo "RELEASE_VERSION=nightly-$(date '+%Y-%m-%d')-$short_sha" >> $GITHUB_OUTPUT - else - echo "RELEASE_VERSION=${{ github.ref_name }}" >> $GITHUB_OUTPUT - fi - - - name: Install native OpenSSL on Linux - if: runner.os == 'Linux' && !(startsWith(matrix.target, 'aarch64') || endsWith(matrix.target, 'musl')) - run: sudo apt-get install -y pkg-config libssl-dev - - name: Install GCC for aarch64 (for cross-compiling openssl) - if: runner.os == 'Linux' && startsWith(matrix.target, 'aarch64') - run: | - sudo apt-get update - sudo apt-get install gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu - if [[ "${{matrix.target}}" == *musl ]]; then - sudo apt-get install musl-dev musl-tools - fi - - name: Install musl-gcc (for compiling OpenSSL) - if: matrix.target == 'x86_64-unknown-linux-musl' - run: sudo apt-get install musl-tools - - - name: Calculate openssl-vendored - shell: bash - id: is-openssl-vendored - run: | - case "${{ matrix.target }}" in - "aarch64-apple-darwin" | "x86_64-apple-darwin" | "aarch64-unknown-linux-gnu" | "aarch64-unknown-linux-musl" | "x86_64-unknown-linux-musl") - echo "enabled=--features openssl-vendored" >> $GITHUB_OUTPUT - ;; - *) - echo "enabled=" >> $GITHUB_OUTPUT - ;; - esac - - name: Checkout uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + if: startsWith(github.ref, 'refs/tags/') + with: + python-version: '3.x' + + - name: Increment version + if: startsWith(github.ref, 'refs/tags/') + run: python .github/workflows/replace_version_spec.py --new-version=${{ github.ref_name }} + - name: Setup Rust uses: dtolnay/rust-toolchain@stable with: target: ${{ matrix.target }} - # problems with cross-compiling linux with musl - - run: echo "RUSTFLAGS=-D warnings -C target-feature=+crt-static -C link-self-contained=yes" >> "${GITHUB_ENV}" - if: contains(matrix.target, '-linux-musl') - - run: | - echo "CC=aarch64-linux-gnu-gcc" >> "$GITHUB_ENV" - echo "CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=aarch64-linux-gnu-gcc" >> "$GITHUB_ENV" - if: matrix.target == 'aarch64-unknown-linux-musl' + - name: Install cross (cargo cross compiler) + if: matrix.cross + uses: taiki-e/install-action@v2 + with: + tool: cross + + - name: Build (native) + if: ${{ !matrix.cross }} + run: cargo build --manifest-path cpp-linter-lib/Cargo.toml --release --bin cpp-linter --target ${{ matrix.target }} ${{ matrix.vendered && '--features openssl-vendored' || '' }} - - name: Build - env: - # problems with cross-compiling aarch64 linux with gnu - CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: /usr/bin/aarch64-linux-gnu-gcc - run: cargo build --manifest-path cpp-linter-cli/Cargo.toml --release --bin cpp-linter-cli --target ${{ matrix.target }} ${{ steps.is-openssl-vendored.outputs.enabled }} + - name: Build (cross) + if: matrix.cross + run: cross build --manifest-path cpp-linter-lib/Cargo.toml --release --bin cpp-linter --target ${{ matrix.target }} ${{ matrix.vendered && '--features openssl-vendored' || '' }} - - name: Prepare artifacts [Windows] - shell: bash - if: matrix.os == 'windows-latest' - id: prep-artifacts-windows - run: | - release_dir="cpp-linter-cli-${{ steps.calc-version.outputs.RELEASE_VERSION }}" - artifact_path="cpp-linter-cli-${{ steps.calc-version.outputs.RELEASE_VERSION }}-${{ matrix.target }}.zip" - echo "ARTIFACT_PATH=$artifact_path" >> $GITHUB_OUTPUT - mkdir $release_dir - cp target/${{ matrix.target }}/release/cpp-linter-cli.exe $release_dir/ - cp LICENSE $release_dir/ - 7z a -tzip $artifact_path $release_dir/ - - name: Prepare artifacts [Unix] - shell: bash - id: prep-artifacts-unix - if: matrix.os != 'windows-latest' - run: | - release_dir="cpp-linter-cli-${{ steps.calc-version.outputs.RELEASE_VERSION }}" - artifact_path="cpp-linter-cli-${{ steps.calc-version.outputs.RELEASE_VERSION }}-${{ matrix.target }}.tar.gz" - echo "ARTIFACT_PATH=$artifact_path" >> $GITHUB_OUTPUT - mkdir $release_dir - cp target/${{ matrix.target }}/release/cpp-linter-cli $release_dir/ - cp LICENSE $release_dir - tar -czvf $artifact_path $release_dir/ + - name: Prepare artifacts + run: mv target/${{ matrix.target }}/release/cpp-linter${{ runner.os == 'Windows' && '.exe' || '' }} ./cpp-linter-${{ matrix.target }}${{ runner.os == 'Windows' && '.exe' || '' }} - name: Upload artifacts uses: actions/upload-artifact@v4 with: - name: ${{ steps.prep-artifacts-unix.outputs.ARTIFACT_PATH || steps.prep-artifacts-windows.outputs.ARTIFACT_PATH }} - path: ${{ steps.prep-artifacts-unix.outputs.ARTIFACT_PATH || steps.prep-artifacts-windows.outputs.ARTIFACT_PATH }} + name: cpp-linter-${{ matrix.target }} + path: cpp-linter-${{ matrix.target }}* if-no-files-found: error create-release: - if: startswith(github.ref, 'refs/tags') + if: startswith(github.ref, 'refs/tagsv') runs-on: ubuntu-latest needs: [create-assets] permissions: @@ -156,38 +148,24 @@ jobs: persist-credentials: false - name: Install Rust run: rustup update stable --no-self-update + - uses: actions/setup-python@v5 + with: + python-version: '3.x' + - name: Increment version + run: python .github/workflows/replace_version_spec.py --new-version=${{ github.ref_name }} - run: cargo package + - name: Download built assets + uses: actions/download-artifact@v4 + with: + pattern: cpp-linter-* + path: dist - name: Create a Github Release - if: ${{ startsWith(github.ref, 'refs/tags/v') }} env: GH_TOKEN: ${{ github.token }} - run: gh release create ${{ github.ref_name }} --generate-notes + run: | + files=$(ls dist/cpp-linter* + gh release create ${{ github.ref_name }} --generate-notes $files - run: cargo publish working-directory: cpp-linter-lib env: CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} - - upload-assets: - needs: [create-release] - runs-on: ubuntu-latest - strategy: - matrix: - target: - - aarch64-unknown-linux-gnu - # skip this target due to cross-compiling OpenSSL for musl C lib - # - aarch64-unknown-linux-musl - - x86_64-unknown-linux-gnu - - x86_64-unknown-linux-musl - - aarch64-apple-darwin - - x86_64-apple-darwin - - x86_64-pc-windows-msvc - steps: - - name: Download build asset - uses: actions/download-artifact@v4 - with: - name: cpp-linter-cli-${{ matrix.target }} - path: dist - - name: Upload release assets - env: - GH_TOKEN: ${{ github.token }} - run: gh release upload ${{ github.ref_name }} dist/cpp-linter-cli${{ contains(matrix.target, 'windows') || '.exe' }}%#%cpp-linter-cli_${{ matrix.target }} --clobber diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index c0d91cc..29c936d 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -2,36 +2,73 @@ name: Docs on: [push, workflow_dispatch] +env: + CARGO_TERM_COLOR: always + jobs: - build: - permissions: - contents: write + cache-deps: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + - run: rustup update --no-self-update + - name: Cache .cargo locked resources + uses: actions/cache@v4 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + - run: cargo fetch + build-sphinx: + runs-on: ubuntu-latest + needs: [cache-deps] + steps: + - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: 3.x - - name: Install docs dependencies + - name: Install dependencies working-directory: cpp-linter-py run: pip install -r docs/requirements.txt + - name: Cache .cargo locked resources + uses: actions/cache@v4 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + - name: Build docs working-directory: cpp-linter-py run: sphinx-build docs docs/_build/html - - name: upload docs build as artifact + - name: Upload docs build as artifact uses: actions/upload-artifact@v4 with: - name: "cpp-linter-py-docs" + name: cpp-linter-py_docs path: cpp-linter-py/docs/_build/html - - name: upload to github pages + - name: Upload to github pages # only publish doc changes from main branch if: github.ref == 'refs/heads/main' - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: cpp-linter-py/docs/_build/html + + build-rustdoc: + runs-on: ubuntu-latest + needs: [cache-deps] + steps: + - uses: actions/checkout@v4 + - run: rustup update --no-self-update + - name: Cache .cargo locked resources + uses: actions/cache@v4 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + - run: cargo doc --no-deps --manifest-path cpp-linter-lib/Cargo.toml + - name: upload rustdoc build as artifact + uses: actions/upload-artifact@v4 + with: + path: target/doc + name: cpp-linter-lib_docs diff --git a/.github/workflows/pre-commit-hooks.yml b/.github/workflows/pre-commit-hooks.yml index 0df1903..acbd5a9 100644 --- a/.github/workflows/pre-commit-hooks.yml +++ b/.github/workflows/pre-commit-hooks.yml @@ -2,16 +2,25 @@ name: Pre-commit on: push: + branches: [main] pull_request: - types: opened + branches: [main] jobs: - check-source-files: + pre-commit: + uses: cpp-linter/.github/.github/workflows/pre-commit.yml@main + + cargo-tools: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + - run: rustup update + - name: Cache .cargo locked resources + uses: actions/cache@v4 with: - python-version: '3.x' - - run: python3 -m pip install pre-commit - - run: pre-commit run --all-files + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + - name: cargo clippy + run: cargo clippy + - name: cargo fmt + run: cargo fmt --check diff --git a/.github/workflows/python-packaging.yml b/.github/workflows/python-packaging.yml index 9b2994b..2c82c23 100644 --- a/.github/workflows/python-packaging.yml +++ b/.github/workflows/python-packaging.yml @@ -8,6 +8,9 @@ on: tags: - '*' pull_request: + branches: + - main + - master workflow_dispatch: permissions: @@ -19,12 +22,22 @@ jobs: strategy: fail-fast: false matrix: - target: [x86_64, x86, aarch64, armv7, s390x, ppc64le] + target: + - x86_64 + - x86 + - aarch64 + - armv7 + # - s390x + - ppc64le steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.x' + + - name: Increment version + if: startsWith(github.ref, 'refs/tags/') + run: python .github/workflows/replace_version_spec.py --new-version=${{ github.ref_name }} - name: Calculate openssl-vendored shell: bash @@ -71,8 +84,11 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.x' architecture: ${{ matrix.target }} + - name: Increment version + if: startsWith(github.ref, 'refs/tags/') + run: python .github/workflows/replace_version_spec.py --new-version=${{ github.ref_name }} - name: Build wheels uses: PyO3/maturin-action@v1 with: @@ -94,7 +110,10 @@ jobs: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: - python-version: '3.10' + python-version: '3.x' + - name: Increment version + if: startsWith(github.ref, 'refs/tags/') + run: python .github/workflows/replace_version_spec.py --new-version=${{ github.ref_name }} - name: Build wheels uses: PyO3/maturin-action@v1 with: diff --git a/.github/workflows/replace_version_spec.py b/.github/workflows/replace_version_spec.py new file mode 100644 index 0000000..9603f21 --- /dev/null +++ b/.github/workflows/replace_version_spec.py @@ -0,0 +1,30 @@ +import argparse +from pathlib import Path +import sys + + +class Args(argparse.Namespace): + new_version: str = "2.0.0" + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--new-version", required=True) + args = parser.parse_args(namespace=Args()) + cargo_path = Path("Cargo.toml") + if not cargo_path.exists(): + print("workspace Cargo.toml not in working directory") + return 1 + doc = cargo_path.read_text(encoding="utf-8") + version_pattern = 'version = "%s" # auto' + old_version = version_pattern % "2.0.0" + if old_version not in doc: + print("Could not find version in Cargo.toml:\n", doc) + return 1 + doc = doc.replace(old_version, version_pattern % args.new_version) + cargo_path.write_text(doc, encoding="utf-8") + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.github/workflows/run-dev-tests.yml b/.github/workflows/run-dev-tests.yml index 46e8469..9d988bc 100644 --- a/.github/workflows/run-dev-tests.yml +++ b/.github/workflows/run-dev-tests.yml @@ -19,7 +19,25 @@ env: CARGO_TERM_COLOR: always jobs: + cache-deps: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + os: ['windows-latest', ubuntu-latest] + steps: + - run: rustup update --no-self-update + - uses: actions/checkout@v4 + - name: Cache .cargo locked resources + uses: actions/cache@v4 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + - run: cargo fetch + test: + needs: [cache-deps] + name: test ${{ matrix.os }} w/ clang v${{ matrix.version }} strategy: fail-fast: false matrix: @@ -33,12 +51,12 @@ jobs: - run: rustup component add llvm-tools-preview # https://docs.rs/openssl/latest/openssl/#automatic - - name: Install OpenSSL (Linux) - if: runner.os == 'Linux' - run: sudo apt-get install -y pkg-config libssl-dev - - name: Install OpenSSL (MacOS) - if: runner.os == 'macOS' - run: brew install openssl@3 + # - name: Install OpenSSL (Linux) + # if: runner.os == 'Linux' + # run: sudo apt-get install -y pkg-config libssl-dev + # - name: Install OpenSSL (MacOS) + # if: runner.os == 'macOS' + # run: brew install openssl@3 # - name: Install OpenSSL (Windows) # if: runner.os == 'Windows' # run: vcpkg install openssl @@ -48,7 +66,7 @@ jobs: with: tool: cargo-nextest,cargo-llvm-cov,cargo-binstall - - name: Install llvm-cov-pretty (HTL report generator) + - name: Install llvm-cov-pretty (HTML report generator) run: cargo binstall -y llvm-cov-pretty - uses: actions/setup-python@v5 @@ -86,11 +104,17 @@ jobs: python -m pip install clang-tools clang-tools --install ${{ matrix.version }} + - name: Cache .cargo locked resources + uses: actions/cache/restore@v4 + with: + path: ~/.cargo + key: ${{ runner.os }}-cargo-${{ hashFiles('Cargo.lock') }} + - name: Collect Coverage working-directory: cpp-linter-lib env: CLANG_VERSION: ${{ matrix.version }} - run: cargo llvm-cov --hide-instantiations --lib --no-report nextest + run: cargo llvm-cov --lib --no-report nextest - name: Generate Coverage HTML report working-directory: cpp-linter-lib @@ -114,7 +138,7 @@ jobs: run: | cargo llvm-cov report --lcov --output-path lcov.info - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 if: matrix.version == '16' && runner.os == 'Linux' with: token: ${{secrets.CODECOV_TOKEN}} diff --git a/.gitignore b/.gitignore index d843371..20432c6 100644 --- a/.gitignore +++ b/.gitignore @@ -180,7 +180,7 @@ debug/ # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html -Cargo.lock +# Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3de3c3c..7b70074 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,20 +21,3 @@ repos: args: [ --fix ] # Run the python formatter. - id: ruff-format - - repo: local - # these hooks require a tools managed by the rustup installer - hooks: - # Run the rust formatter. - # Run the rust linter. - - id: cargo-clippy - name: cargo clippy - description: Lint all rust files with the clippy tool - entry: cargo clippy --allow-staged --allow-dirty --fix - language: rust - pass_filenames: false - - id: cargo-fmt - name: cargo fmt - description: Format all rust files with the rustfmt tool - entry: cargo fmt - language: rust - pass_filenames: false diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 0000000..f5a40b3 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,1768 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr2line" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" + +[[package]] +name = "aho-corasick" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +dependencies = [ + "memchr", +] + +[[package]] +name = "anstream" +version = "0.6.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" + +[[package]] +name = "anstyle-parse" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +dependencies = [ + "anstyle", + "windows-sys 0.52.0", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "backtrace" +version = "0.3.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +dependencies = [ + "addr2line", + "cc", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" + +[[package]] +name = "bumpalo" +version = "3.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" + +[[package]] +name = "bytes" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "jobserver", + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "clap" +version = "4.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d8838454fda655dafd3accb2b6e2bea645b9e4078abe84a22ceb947235c5cc" +dependencies = [ + "clap_builder", +] + +[[package]] +name = "clap_builder" +version = "4.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "216aec2b177652e3846684cbfe25c9964d18ec45234f0f5da5157b207ed1aab6" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_lex" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "colorchoice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" + +[[package]] +name = "cpp-linter-lib" +version = "2.0.0" +dependencies = [ + "clap", + "git2", + "lenient_semver", + "log", + "openssl", + "openssl-probe", + "regex", + "reqwest", + "semver", + "serde", + "serde-xml-rs", + "serde_json", + "tempfile", + "tokio", + "tokio-macros", + "tokio-stream", + "which", +] + +[[package]] +name = "cpp-linter-py" +version = "2.0.0" +dependencies = [ + "cpp-linter-lib", + "pyo3", + "tokio", +] + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "encoding_rs" +version = "0.8.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "fastrand" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" + +[[package]] +name = "futures-sink" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" + +[[package]] +name = "futures-task" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" + +[[package]] +name = "futures-util" +version = "0.3.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", +] + +[[package]] +name = "getrandom" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "gimli" +version = "0.28.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "git2" +version = "0.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" +dependencies = [ + "bitflags 2.4.1", + "libc", + "libgit2-sys", + "log", + "openssl-probe", + "openssl-sys", + "url", +] + +[[package]] +name = "h2" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.14.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "hermit-abi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" + +[[package]] +name = "home" +version = "0.5.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "http" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http", +] + +[[package]] +name = "http-body-util" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +dependencies = [ + "bytes", + "futures-util", + "http", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" + +[[package]] +name = "hyper" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +dependencies = [ + "futures-util", + "http", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cde7055719c54e36e95e8719f95883f22072a48ede39db7fc17a4e1d5281e9b9" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "http", + "http-body", + "hyper", + "pin-project-lite", + "socket2", + "tokio", + "tower", + "tower-service", + "tracing", +] + +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "indoc" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" + +[[package]] +name = "ipnet" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" + +[[package]] +name = "itoa" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" + +[[package]] +name = "jobserver" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" +dependencies = [ + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "lenient_semver" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de8de3f4f3754c280ce1c8c42ed8dd26a9c8385c2e5ad4ec5a77e774cea9c1ec" +dependencies = [ + "lenient_semver_parser", + "semver", +] + +[[package]] +name = "lenient_semver_parser" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f650c1d024ddc26b4bb79c3076b30030f2cf2b18292af698c81f7337a64d7d6" +dependencies = [ + "lenient_semver_version_builder", + "semver", +] + +[[package]] +name = "lenient_semver_version_builder" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9049f8ff49f75b946f95557148e70230499c8a642bf2d6528246afc7d0282d17" +dependencies = [ + "semver", +] + +[[package]] +name = "libc" +version = "0.2.152" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" + +[[package]] +name = "libgit2-sys" +version = "0.17.0+1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" +dependencies = [ + "cc", + "libc", + "libssh2-sys", + "libz-sys", + "openssl-sys", + "pkg-config", +] + +[[package]] +name = "libssh2-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dc8a030b787e2119a731f1951d6a773e2280c660f8ec4b0f5e1505a386e71ee" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "295c17e837573c8c821dbaeb3cceb3d745ad082f7572191409e69cbc1b3fd050" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "linux-raw-sys" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" + +[[package]] +name = "log" +version = "0.4.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "memchr" +version = "2.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" + +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +dependencies = [ + "adler", +] + +[[package]] +name = "mio" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec" +dependencies = [ + "hermit-abi", + "libc", + "wasi", + "windows-sys 0.52.0", +] + +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "object" +version = "0.32.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" + +[[package]] +name = "openssl" +version = "0.10.62" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cde4d2d9200ad5909f8dac647e29482e07c3a35de8a13fce7c9c7747ad9f671" +dependencies = [ + "bitflags 2.4.1", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-src" +version = "300.2.1+3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fe476c29791a5ca0d1273c697e96085bbabbbea2ef7afd5617e78a4b40332d3" +dependencies = [ + "cc", +] + +[[package]] +name = "openssl-sys" +version = "0.9.98" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1665caf8ab2dc9aef43d1c0023bd904633a6a05cb30b0ad59bec2ae986e57a7" +dependencies = [ + "cc", + "libc", + "openssl-src", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pkg-config" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69d3587f8a9e599cc7ec2c00e331f71c4e69a5f9a4b8a6efd5b07466b9736f9a" + +[[package]] +name = "portable-atomic" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" + +[[package]] +name = "proc-macro2" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "pyo3" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "831e8e819a138c36e212f3af3fd9eeffed6bf1510a805af35b0edee5ffa59433" +dependencies = [ + "cfg-if", + "indoc", + "libc", + "memoffset", + "once_cell", + "portable-atomic", + "pyo3-build-config", + "pyo3-ffi", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-build-config" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e8730e591b14492a8945cdff32f089250b05f5accecf74aeddf9e8272ce1fa8" +dependencies = [ + "once_cell", + "target-lexicon", +] + +[[package]] +name = "pyo3-ffi" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e97e919d2df92eb88ca80a037969f44e5e70356559654962cbb3316d00300c6" +dependencies = [ + "libc", + "pyo3-build-config", +] + +[[package]] +name = "pyo3-macros" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb57983022ad41f9e683a599f2fd13c3664d7063a3ac5714cae4b7bee7d3f206" +dependencies = [ + "proc-macro2", + "pyo3-macros-backend", + "quote", + "syn", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec480c0c51ddec81019531705acac51bcdbeae563557c982aa8263bb96880372" +dependencies = [ + "heck", + "proc-macro2", + "pyo3-build-config", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "reqwest" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "native-tls", + "once_cell", + "percent-encoding", + "pin-project-lite", + "rustls-pemfile", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "system-configuration", + "tokio", + "tokio-native-tls", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "winreg", +] + +[[package]] +name = "ring" +version = "0.17.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +dependencies = [ + "cc", + "cfg-if", + "getrandom", + "libc", + "spin", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" + +[[package]] +name = "rustix" +version = "0.38.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" +dependencies = [ + "bitflags 2.4.1", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls" +version = "0.23.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" +dependencies = [ + "once_cell", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "196fe16b00e106300d3e45ecfcb764fa292a535d7326a29a5875c579c7417425" +dependencies = [ + "base64", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0" + +[[package]] +name = "rustls-webpki" +version = "0.102.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "ryu" +version = "1.0.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" + +[[package]] +name = "schannel" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +dependencies = [ + "windows-sys 0.52.0", +] + +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" + +[[package]] +name = "serde" +version = "1.0.195" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-xml-rs" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb3aa78ecda1ebc9ec9847d5d3aba7d618823446a049ba2491940506da6e2782" +dependencies = [ + "log", + "serde", + "thiserror", + "xml-rs", +] + +[[package]] +name = "serde_derive" +version = "1.0.195" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.111" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" + +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "spin" +version = "0.9.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.74" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fceb41e3d546d0bd83421d3409b1460cc7444cd389341a4c880fe7a042cb3d7" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" + +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "target-lexicon" +version = "0.12.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "tempfile" +version = "3.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys 0.52.0", +] + +[[package]] +name = "thiserror" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.39.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa4fb1bc778bd6f04cbfc4bb2d06a7396a8f299dc33ea1900cedaa316f467b1" +dependencies = [ + "backtrace", + "bytes", + "libc", + "mio", + "pin-project-lite", + "socket2", + "tokio-macros", + "windows-sys 0.52.0", +] + +[[package]] +name = "tokio-macros" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" +dependencies = [ + "rustls", + "rustls-pki-types", + "tokio", +] + +[[package]] +name = "tokio-stream" +version = "0.1.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", + "tracing", +] + +[[package]] +name = "tower" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" +dependencies = [ + "futures-core", + "futures-util", + "pin-project", + "pin-project-lite", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" + +[[package]] +name = "tracing" +version = "0.1.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +dependencies = [ + "pin-project-lite", + "tracing-core", +] + +[[package]] +name = "tracing-core" +version = "0.1.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "unicode-bidi" +version = "0.3.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" + +[[package]] +name = "unicode-ident" +version = "1.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unindent" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "utf8parse" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "wasm-bindgen" +version = "0.2.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" +dependencies = [ + "bumpalo", + "log", + "once_cell", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" +dependencies = [ + "cfg-if", + "js-sys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.90" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" + +[[package]] +name = "web-sys" +version = "0.3.67" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "which" +version = "6.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d9c5ed668ee1f17edb3b627225343d210006a90bb1e3745ce1f30b1fb115075" +dependencies = [ + "either", + "home", + "rustix", + "winsafe", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + +[[package]] +name = "winreg" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +dependencies = [ + "cfg-if", + "windows-sys 0.48.0", +] + +[[package]] +name = "winsafe" +version = "0.0.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" + +[[package]] +name = "xml-rs" +version = "0.8.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a" + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/Cargo.toml b/Cargo.toml index 85b6dfc..180fd36 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,16 +1,14 @@ # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [workspace] -members = ["cpp-linter-lib", "cpp-linter-cli", "cpp-linter-py"] +members = ["cpp-linter-lib", "cpp-linter-py"] resolver = "2" [workspace.package] -version = "2.0.0" +version = "2.0.0" # auto authors = [ "Brendan Doherty", "Peter Shen", ] description = "Run clang-format and clang-tidy on a batch of files." documentation = "https://example.com/bar" - -[workspace.dependencies] diff --git a/README.md b/README.md index 8228c19..efe4b7e 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,4 @@ + # C/C++ Linting Package A Python and Rust package for linting C/C++ code with clang-tidy and/or clang-format to collect feedback provided in the form of thread comments, step summary, or file annotations. @@ -11,42 +12,57 @@ A Python and Rust package for linting C/C++ code with clang-tidy and/or clang-fo ## Usage -For usage in a CI workflow, see [the cpp-linter/cpp-linter-action repository](https://github.com/cpp-linter/cpp-linter-action). +For usage in a CI workflow, see +[the cpp-linter/cpp-linter-action repository](https://github.com/cpp-linter/cpp-linter-action). -For the description of supported Command Line Interface options, see [the CLI documentation](https://cpp-linter.github.io/cpp_linter_rs/cli_args.html). +For the description of supported Command Line Interface options, see +[the CLI documentation](https://cpp-linter.github.io/cpp_linter_rs/cli_args.html). ## Have question or feedback? -To provide feedback (requesting a feature or reporting a bug) please post to [issues](https://github.com/cpp-linter/cpp-linter/issues). +To provide feedback (requesting a feature or reporting a bug) please post to +[issues](https://github.com/cpp-linter/cpp_linter_rs/issues). ## License The scripts and documentation in this project are released under the [MIT][MIT]. -Dependencies (that are redistributed by us in binary form) have the following license agreements: +Dependencies (that are redistributed by us in binary form) have the following +license agreements: -- [clap](https://crates.io/crates/clap): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. -- [git2](https://crates.io/crates/git2): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [clap](https://crates.io/crates/clap): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [git2](https://crates.io/crates/git2): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. The following are conditionally included in binaries (using the `openssl-vendored` feature on a case-by-case basis) because it is a dependency of git2: - [openssl](https://crates.io/crates/openssl): Licensed under [Apache 2.0][Apache2] - - [openssl-probe](https://crates.io/crates/openssl-probe) : Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. - -- [lenient_semver](https://crates.io/crates/lenient_semver): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. -- [log](https://crates.io/crates/log): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. -- [regex](https://crates.io/crates/regex): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. -- [reqwest](https://crates.io/crates/reqwest): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. -- [semver](https://crates.io/crates/semver): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. -- [serde](https://crates.io/crates/serde): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. + - [openssl-probe](https://crates.io/crates/openssl-probe): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. + +- [lenient_semver](https://crates.io/crates/lenient_semver): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [log](https://crates.io/crates/log): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [regex](https://crates.io/crates/regex): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [reqwest](https://crates.io/crates/reqwest): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [semver](https://crates.io/crates/semver): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [serde](https://crates.io/crates/serde): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. - [serde-xml-rs](https://crates.io/crates/serde-xml-rs): Licensed under [MIT][MIT]. -- [serde_json](https://crates.io/crates/serde_json): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [serde_json](https://crates.io/crates/serde_json): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. - [which](https://crates.io/crates/which): Licensed under [MIT][MIT]. The python binding uses -- [pyo3](https://crates.io/crates/pyo3): Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. +- [pyo3](https://crates.io/crates/pyo3): + Dual-licensed under [Apache 2.0][Apache2] or [MIT][MIT]. [MIT]: https://choosealicense.com/licenses/mit [Apache2]: https://choosealicense.com/licenses/apache-2.0/ diff --git a/cpp-linter-cli/.gitignore b/cpp-linter-cli/.gitignore deleted file mode 100644 index ea8c4bf..0000000 --- a/cpp-linter-cli/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/target diff --git a/cpp-linter-cli/Cargo.toml b/cpp-linter-cli/Cargo.toml deleted file mode 100644 index 4c3eec2..0000000 --- a/cpp-linter-cli/Cargo.toml +++ /dev/null @@ -1,14 +0,0 @@ -[package] -name = "cpp-linter-cli" -edition = "2021" -version.workspace = true -authors.workspace = true -description.workspace = true -documentation.workspace = true -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -cpp-linter-lib = { path = "../cpp-linter-lib", version = "*" } - -[features] -openssl-vendored = ["cpp-linter-lib/openssl-vendored"] diff --git a/cpp-linter-cli/LICENSE b/cpp-linter-cli/LICENSE deleted file mode 100644 index b9ee95b..0000000 --- a/cpp-linter-cli/LICENSE +++ /dev/null @@ -1,9 +0,0 @@ -MIT License - -Copyright (c) 2023 2bndy5 - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/cpp-linter-cli/src/main.rs b/cpp-linter-cli/src/main.rs deleted file mode 100644 index d0e8fdc..0000000 --- a/cpp-linter-cli/src/main.rs +++ /dev/null @@ -1,7 +0,0 @@ -use std::env; - -use cpp_linter_lib::run::run_main; - -pub fn main() { - run_main(env::args().collect::>()); -} diff --git a/cpp-linter-lib/Cargo.toml b/cpp-linter-lib/Cargo.toml index e69d3e4..8746f71 100644 --- a/cpp-linter-lib/Cargo.toml +++ b/cpp-linter-lib/Cargo.toml @@ -8,22 +8,29 @@ documentation.workspace = true # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -clap = { version = ">=4.4.2" } -git2 = ">=0.18.1" +clap = { version = "4.5.15" } +git2 = "0.19.0" lenient_semver = "0.4.2" -log = ">=0.4.20" +log = "0.4.22" openssl = { version = "0.10", features = ["vendored"], optional = true } openssl-probe = { version = "0.1", optional = true } regex = "1.10.2" -reqwest = { version = "0.11", features = ["blocking", "json"] } +reqwest = { version = "0.12.5", features = ["json"]} semver = "1.0.20" serde = { version = "1.0.193", features = ["derive"] } serde-xml-rs = "0.6.0" serde_json = "1.0.108" -which = "5.0.0" +tokio = { version = "1.39.2", features = ["macros", "rt-multi-thread"]} +tokio-macros = "2.4.0" +tokio-stream = "0.1.15" +which = "6.0.2" [dev-dependencies] tempfile = "3.9.0" [features] openssl-vendored = ["dep:openssl", "dep:openssl-probe"] + +[[bin]] +name = "cpp-linter" +path = "src/main.rs" diff --git a/cpp-linter-lib/README.md b/cpp-linter-lib/README.md new file mode 100644 index 0000000..99bc2af --- /dev/null +++ b/cpp-linter-lib/README.md @@ -0,0 +1,14 @@ + +# cpp-linter-lib + +This crate contains the the library used as a backend for the +`cpp-linter` binary executable. + +Since the [cpp-linter python package][pypi-org] now uses this library +as a binding, the native binary's `main()` method is also present in this +library. + +See also the [CLI document hosted on github][gh-pages]. + +[pypi-org]: https://pypi.org/project/cpp-linter +[gh-pages]: https://cpp-linter.github.io/cpp_linter_rs/cli_args.html diff --git a/cpp-linter-lib/examples/cli_doc.rs b/cpp-linter-lib/examples/cli_doc.rs index 60d852c..a5788ad 100644 --- a/cpp-linter-lib/examples/cli_doc.rs +++ b/cpp-linter-lib/examples/cli_doc.rs @@ -8,32 +8,55 @@ pub fn main() -> std::io::Result<()> { .truncate(true) .create(true) .write(true) - .open("../cpp-linter-py/docs/cli_args.rst")?; - let title = "Command Line Interface Options".to_string(); - writeln!(&doc_file, "{}", &title)?; + .open("cpp-linter-py/docs/cli_args.rst")?; + let title = "Command Line Interface".to_string(); + writeln!(&doc_file, "\n{}", &title)?; for _ in title.chars() { write!(&doc_file, "=")?; } write!(&doc_file, "\n\n")?; - for arg in command.get_arguments() { - writeln!( - &doc_file, - ".. std:option:: -{}, --{}\n", - &arg.get_short().unwrap(), - &arg.get_long().unwrap() - )?; - for line in arg.get_long_help().unwrap().to_string().split('\n') { + writeln!(&doc_file, "Commands\n--------\n")?; + for cmd in command.get_subcommands() { + writeln!(&doc_file, ".. std:option:: {}\n", cmd.get_name())?; + for line in cmd.get_about().unwrap().to_string().split('\n') { writeln!(&doc_file, " {}", &line)?; } writeln!(&doc_file)?; - let default = arg.get_default_values(); - if !default.is_empty() { - writeln!(&doc_file, " :Default:")?; - if default.len() < 2 { - writeln!(&doc_file, " ``{:?}``", default.first().unwrap())?; - } else { - for val in default { - writeln!(&doc_file, " - ``{:?}``", val)?; + } + for group in command.get_groups() { + writeln!(&doc_file, "\n{}", group.get_id())?; + for _ in group.get_id().to_string().chars() { + write!(&doc_file, "-")?; + } + write!(&doc_file, "\n\n")?; + for arg_id in group.get_args() { + let mut arg_match = command.get_arguments().filter(|a| *a.get_id() == *arg_id); + let arg = arg_match.next().unwrap_or_else(|| { + panic!( + "arg {} expected in group {}", + arg_id.as_str(), + group.get_id().as_str() + ) + }); + writeln!( + &doc_file, + ".. std:option:: -{}, --{}\n", + &arg.get_short().unwrap(), + &arg.get_long().unwrap() + )?; + for line in arg.get_long_help().unwrap().to_string().split('\n') { + writeln!(&doc_file, " {}", &line)?; + } + writeln!(&doc_file)?; + let default = arg.get_default_values(); + if !default.is_empty() { + writeln!(&doc_file, " :Default:")?; + if default.len() < 2 { + writeln!(&doc_file, " ``{:?}``", default.first().unwrap())?; + } else { + for val in default { + writeln!(&doc_file, " - ``{:?}``", val)?; + } } } } diff --git a/cpp-linter-lib/examples/diff.rs b/cpp-linter-lib/examples/diff.rs deleted file mode 100644 index 5875b9e..0000000 --- a/cpp-linter-lib/examples/diff.rs +++ /dev/null @@ -1,27 +0,0 @@ -use cpp_linter_lib::{ - cli::parse_ignore, - git::{get_diff, open_repo, parse_diff}, -}; -use std::error::Error; - -/// An example to show the file names of the diff for either -/// -/// - only last commit -/// - only staged files -pub fn main() -> Result<(), Box> { - let repo = open_repo(".")?; - let diff = get_diff(&repo); - - let extensions = vec!["cpp", "hpp", "rs"]; - let (ignored, not_ignored) = parse_ignore(&Vec::from_iter(["target", ".github"])); - let files = parse_diff(&diff, &extensions, &ignored, ¬_ignored); - - for file in &files { - println!("{}", file.name.to_string_lossy()); - println!("lines with additions: {:?}", file.added_lines); - println!("ranges of added lines: {:?}", file.added_ranges); - println!("ranges of diff hunks: {:?}", file.diff_chunks); - } - println!("found {} files in diff", files.len()); - Ok(()) -} diff --git a/cpp-linter-lib/examples/gh_rest_api.rs b/cpp-linter-lib/examples/gh_rest_api.rs index 9b9bef7..dc88022 100644 --- a/cpp-linter-lib/examples/gh_rest_api.rs +++ b/cpp-linter-lib/examples/gh_rest_api.rs @@ -1,22 +1,27 @@ use std::env; use std::error::Error; -use cpp_linter_lib::cli::parse_ignore; +use cpp_linter_lib::common_fs::FileFilter; use cpp_linter_lib::github_api::GithubApiClient; // needed to use trait implementations (ie `get_list_of_changed_files()`) use cpp_linter_lib::rest_api::RestApiClient; -pub fn main() -> Result<(), Box> { +#[tokio::main] +pub async fn main() -> Result<(), Box> { env::set_var("GITHUB_SHA", "950ff0b690e1903797c303c5fc8d9f3b52f1d3c5"); env::set_var("GITHUB_REPOSITORY", "cpp-linter/cpp-linter"); let client_controller = GithubApiClient::new(); - let extensions = vec!["cpp", "hpp"]; - let (ignored, not_ignored) = parse_ignore(&Vec::from_iter(["target", ".github"])); + let file_filter = FileFilter::new( + &["target", ".github"], + vec!["cpp".to_string(), "hpp".to_string()], + ); env::set_var("CI", "true"); // needed for get_list_of_changed_files() to use REST API - let files = client_controller.get_list_of_changed_files(&extensions, &ignored, ¬_ignored); + let files = client_controller + .get_list_of_changed_files(&file_filter) + .await; for file in &files { println!("{}", file.name.to_string_lossy()); diff --git a/cpp-linter-lib/src/clang_tools/clang_format.rs b/cpp-linter-lib/src/clang_tools/clang_format.rs index 22154a0..f0c052b 100644 --- a/cpp-linter-lib/src/clang_tools/clang_format.rs +++ b/cpp-linter-lib/src/clang_tools/clang_format.rs @@ -1,7 +1,10 @@ //! This module holds functionality specific to running clang-format and parsing it's //! output. -use std::process::Command; +use std::{ + process::Command, + sync::{Arc, Mutex}, +}; // non-std crates use serde::Deserialize; @@ -14,7 +17,7 @@ use crate::{ }; /// A Structure used to deserialize clang-format's XML output. -#[derive(Debug, Deserialize, PartialEq)] +#[derive(Debug, Deserialize, PartialEq, Clone)] #[serde(rename = "replacements")] pub struct FormatAdvice { /// A list of [`Replacement`]s that clang-tidy wants to make. @@ -60,42 +63,58 @@ impl Clone for Replacement { } } +/// Get a total count of clang-format advice from the given list of [FileObj]s. +pub fn tally_format_advice(files: &[Arc>]) -> u64 { + let mut total = 0; + for file in files { + let file = file.lock().unwrap(); + if let Some(advice) = &file.format_advice { + if !advice.replacements.is_empty() { + total += 1; + } + } + } + total +} + /// Run clang-tidy for a specific `file`, then parse and return it's XML output. pub fn run_clang_format( cmd: &mut Command, - file: &FileObj, + file: &mut Arc>, style: &str, lines_changed_only: &LinesChangedOnly, -) -> FormatAdvice { +) -> Vec<(log::Level, String)> { + let mut logs = vec![]; + let mut file = file.lock().unwrap(); cmd.args(["--style", style, "--output-replacements-xml"]); let ranges = file.get_ranges(lines_changed_only); for range in &ranges { cmd.arg(format!("--lines={}:{}", range.start(), range.end())); } cmd.arg(file.name.to_string_lossy().as_ref()); - log::info!( - "Running \"{} {}\"", - cmd.get_program().to_string_lossy(), - cmd.get_args() - .map(|x| x.to_str().unwrap()) - .collect::>() - .join(" ") - ); + logs.push(( + log::Level::Info, + format!( + "Running \"{} {}\"", + cmd.get_program().to_string_lossy(), + cmd.get_args() + .map(|x| x.to_str().unwrap()) + .collect::>() + .join(" ") + ), + )); let output = cmd.output().unwrap(); if !output.stderr.is_empty() || !output.status.success() { - log::debug!( - "clang-format raised the follow errors:\n{}", - String::from_utf8(output.stderr).unwrap() - ); + logs.push(( + log::Level::Debug, + format!( + "clang-format raised the follow errors:\n{}", + String::from_utf8(output.stderr).unwrap() + ), + )); } - // log::debug!( - // "clang-format XML output:\n{}", - // String::from_utf8(output.stdout.clone()).unwrap() - // ); if output.stdout.is_empty() { - return FormatAdvice { - replacements: vec![], - }; + return logs; } let xml = String::from_utf8(output.stdout) .unwrap() @@ -107,8 +126,8 @@ pub fn run_clang_format( .whitespace_to_characters(true) .ignore_root_level_whitespace(true); let event_reader = serde_xml_rs::EventReader::new_with_config(xml.as_bytes(), config); - let mut format_advice: FormatAdvice = - FormatAdvice::deserialize(&mut Deserializer::new(event_reader)).unwrap_or(FormatAdvice { + let mut format_advice = FormatAdvice::deserialize(&mut Deserializer::new(event_reader)) + .unwrap_or(FormatAdvice { replacements: vec![], }); if !format_advice.replacements.is_empty() { @@ -130,7 +149,8 @@ pub fn run_clang_format( } format_advice.replacements = filtered_replacements; } - format_advice + file.format_advice = Some(format_advice); + logs } #[cfg(test)] diff --git a/cpp-linter-lib/src/clang_tools/clang_tidy.rs b/cpp-linter-lib/src/clang_tools/clang_tidy.rs index a99639e..ca84420 100644 --- a/cpp-linter-lib/src/clang_tools/clang_tidy.rs +++ b/cpp-linter-lib/src/clang_tools/clang_tidy.rs @@ -5,6 +5,7 @@ use std::{ env::{consts::OS, current_dir}, path::PathBuf, process::Command, + sync::{Arc, Mutex}, }; // non-std crates @@ -18,7 +19,7 @@ use crate::{ }; /// Used to deserialize a JSON compilation database -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] pub struct CompilationDatabase { /// A list of [`CompilationUnit`] units: Vec, @@ -28,7 +29,7 @@ pub struct CompilationDatabase { /// /// The only purpose this serves is to normalize relative paths for build systems that /// use/need relative paths (ie ninja). -#[derive(Deserialize, Debug)] +#[derive(Deserialize, Debug, Clone)] struct CompilationUnit { /// The directory of the build environment directory: String, @@ -44,6 +45,7 @@ struct CompilationUnit { } /// A structure that represents a single notification parsed from clang-tidy's stdout. +#[derive(Debug, Clone)] pub struct TidyNotification { /// The file's path and name (supposedly relative to the repository root folder). pub filename: String, @@ -86,6 +88,7 @@ impl TidyNotification { } /// A struct to hold notification from clang-tidy about a single file +#[derive(Debug, Clone)] pub struct TidyAdvice { /// A list of notifications parsed from clang-tidy stdout. pub notes: Vec, @@ -98,10 +101,11 @@ pub struct TidyAdvice { fn parse_tidy_output( tidy_stdout: &[u8], database_json: &Option, -) -> TidyAdvice { +) -> Option { let note_header = Regex::new(r"^(.+):(\d+):(\d+):\s(\w+):(.*)\[([a-zA-Z\d\-\.]+)\]$").unwrap(); let mut notification = None; let mut result = Vec::new(); + let cur_dir = current_dir().unwrap(); for line in String::from_utf8(tidy_stdout.to_vec()).unwrap().lines() { if let Some(captured) = note_header.captures(line) { if let Some(note) = notification { @@ -126,22 +130,20 @@ fn parse_tidy_output( // file was not a named unit in the database; // try to normalize path as if relative to working directory. // NOTE: This shouldn't happen with a properly formed JSON database - filename = normalize_path(&PathBuf::from_iter([ - ¤t_dir().unwrap(), - &filename, - ])); + filename = normalize_path(&PathBuf::from_iter([&cur_dir, &filename])); } } else { // still need to normalize the relative path despite missing database info. // let's assume the file is relative to current working directory. - filename = - normalize_path(&PathBuf::from_iter([¤t_dir().unwrap(), &filename])); + filename = normalize_path(&PathBuf::from_iter([&cur_dir, &filename])); } } assert!(filename.is_absolute()); - if filename.is_absolute() { + if filename.is_absolute() && filename.starts_with(&cur_dir) { + // if this filename can't be made into a relative path, then it is + // likely not a member of the project's sources (ie /usr/include/stdio.h) filename = filename - .strip_prefix(current_dir().unwrap()) + .strip_prefix(&cur_dir) .expect("cannot determine filename by relative path.") .to_path_buf(); } @@ -164,19 +166,42 @@ fn parse_tidy_output( if let Some(note) = notification { result.push(note); } - TidyAdvice { notes: result } + if result.is_empty() { + None + } else { + Some(TidyAdvice { notes: result }) + } +} + +/// Get a total count of clang-tidy advice from the given list of [FileObj]s. +pub fn tally_tidy_advice(files: &[Arc>]) -> u64 { + let mut total = 0; + for file in files { + let file = file.lock().unwrap(); + if let Some(advice) = &file.tidy_advice { + for tidy_note in &advice.notes { + let file_path = PathBuf::from(&tidy_note.filename); + if file_path == file.name { + total += 1; + } + } + } + } + total } /// Run clang-tidy, then parse and return it's output. pub fn run_clang_tidy( cmd: &mut Command, - file: &FileObj, + file: &mut Arc>, checks: &str, lines_changed_only: &LinesChangedOnly, database: &Option, - extra_args: &Option>, + extra_args: &Option>, database_json: &Option, -) -> TidyAdvice { +) -> Vec<(log::Level, std::string::String)> { + let mut logs = vec![]; + let mut file = file.lock().unwrap(); if !checks.is_empty() { cmd.args(["-checks", checks]); } @@ -204,35 +229,59 @@ pub fn run_clang_tidy( cmd.args(["--line-filter", filter.as_str()]); } cmd.arg(file.name.to_string_lossy().as_ref()); - log::info!( - "Running \"{} {}\"", - cmd.get_program().to_string_lossy(), - cmd.get_args() - .map(|x| x.to_str().unwrap()) - .collect::>() - .join(" ") - ); + logs.push(( + log::Level::Info, + format!( + "Running \"{} {}\"", + cmd.get_program().to_string_lossy(), + cmd.get_args() + .map(|x| x.to_str().unwrap()) + .collect::>() + .join(" ") + ), + )); let output = cmd.output().unwrap(); - log::debug!( - "Output from clang-tidy:\n{}", - String::from_utf8(output.stdout.to_vec()).unwrap() - ); + logs.push(( + log::Level::Debug, + format!( + "Output from clang-tidy:\n{}", + String::from_utf8(output.stdout.to_vec()).unwrap() + ), + )); if !output.stderr.is_empty() { - log::debug!( - "clang-tidy made the following summary:\n{}", - String::from_utf8(output.stderr).unwrap() - ); + logs.push(( + log::Level::Debug, + format!( + "clang-tidy made the following summary:\n{}", + String::from_utf8(output.stderr).unwrap() + ), + )); } - parse_tidy_output(&output.stdout, database_json) + file.tidy_advice = parse_tidy_output(&output.stdout, database_json); + logs } #[cfg(test)] mod test { + use std::{ + env, + path::PathBuf, + process::Command, + sync::{Arc, Mutex}, + }; + + use regex::Regex; + + use crate::{clang_tools::get_clang_tool_exe, cli::LinesChangedOnly, common_fs::FileObj}; + + use super::run_clang_tidy; + + // ***************** test for regex parsing of clang-tidy stdout + #[test] fn test_capture() { let src = "tests/demo/demo.hpp:11:11: warning: use a trailing return type for this function [modernize-use-trailing-return-type]"; - let pat = - regex::Regex::new(r"^(.+):(\d+):(\d+):\s(\w+):(.*)\[([a-zA-Z\d\-\.]+)\]$").unwrap(); + let pat = Regex::new(r"^(.+):(\d+):(\d+):\s(\w+):(.*)\[([a-zA-Z\d\-\.]+)\]$").unwrap(); let cap = pat.captures(src).unwrap(); assert_eq!( cap.get(0).unwrap().as_str(), @@ -248,4 +297,41 @@ mod test { .as_str() ) } + + #[test] + fn use_extra_args() { + let exe_path = get_clang_tool_exe( + "clang-tidy", + env::var("CLANG_VERSION").unwrap_or("".to_string()).as_str(), + ) + .unwrap(); + let mut cmd = Command::new(exe_path); + let file = FileObj::new(PathBuf::from("tests/demo/demo.cpp")); + let mut arc_ref = Arc::new(Mutex::new(file)); + let extra_args = vec!["-std=c++17".to_string(), "-Wall".to_string()]; + run_clang_tidy( + &mut cmd, + &mut arc_ref, + "", // use .clang-tidy config file + &LinesChangedOnly::Off, // check all lines + &None, // no database path + &Some(extra_args), // <---- the reason for this test + &None, // no deserialized database + ); + // since `cmd` was passed as a mutable reference, we can inspect the args that were added + let locked_file = arc_ref.lock().unwrap(); + let mut args = cmd + .get_args() + .map(|arg| arg.to_str().unwrap()) + .collect::>(); + assert_eq!(locked_file.name.to_string_lossy(), args.pop().unwrap()); + assert_eq!( + vec!["--extra-arg", "\"-std=c++17\"", "--extra-arg", "\"-Wall\""], + args + ); + assert!(!locked_file + .tidy_advice + .as_ref() + .is_some_and(|advice| advice.notes.is_empty())); + } } diff --git a/cpp-linter-lib/src/clang_tools/mod.rs b/cpp-linter-lib/src/clang_tools/mod.rs index 637aa4a..31e9145 100644 --- a/cpp-linter-lib/src/clang_tools/mod.rs +++ b/cpp-linter-lib/src/clang_tools/mod.rs @@ -1,23 +1,31 @@ //! This crate holds the functionality related to running clang-format and/or //! clang-tidy. -use std::{env::current_dir, fs, path::PathBuf, process::Command}; +use std::{ + env::current_dir, + fs, + path::PathBuf, + process::Command, + sync::{Arc, Mutex}, +}; // non-std crates use lenient_semver; use semver::Version; +use tokio::task::JoinSet; use which::{which, which_in}; // project-specific modules/crates use super::common_fs::FileObj; use crate::{ cli::LinesChangedOnly, + common_fs::FileFilter, logger::{end_log_group, start_log_group}, }; pub mod clang_format; -use clang_format::{run_clang_format, FormatAdvice}; +use clang_format::run_clang_format; pub mod clang_tidy; -use clang_tidy::{run_clang_tidy, CompilationDatabase, TidyAdvice}; +use clang_tidy::{run_clang_tidy, CompilationDatabase}; /// Fetch the path to a clang tool by `name` (ie `"clang-tidy"` or `"clang-format"`) and /// `version`. @@ -69,92 +77,154 @@ pub fn get_clang_tool_exe(name: &str, version: &str) -> Result, + pub extra_args: Option>, + pub database_json: Option, + pub style: String, + pub clang_tidy_command: Option, + pub clang_format_command: Option, + pub tidy_filter: FileFilter, + pub format_filter: FileFilter, +} + +/// This creates a task to run clang-tidy and clang-format on a single file. +/// +/// Returns a Future that infallibly resolves to a 2-tuple that contains /// -/// The returned list of [`FormatAdvice`] is parallel to the `files` list passed in -/// here. The returned 2D list of [`TidyAdvice`] is also parallel on the first -/// dimension. The second dimension is a list of notes specific to a translation unit -/// (each element of `files`). +/// 1. The file's path. +/// 2. A collections of cached logs. A [`Vec`] of tuples that hold +/// - log level +/// - messages +fn analyze_single_file( + file: &mut Arc>, + clang_params: Arc, +) -> (PathBuf, Vec<(log::Level, String)>) { + let file_lock = file.lock().unwrap(); + let file_name = file_lock.name.clone(); + drop(file_lock); + let mut logs = vec![]; + if let Some(tidy_cmd) = &clang_params.clang_tidy_command { + if clang_params + .tidy_filter + .is_source_or_ignored(file_name.as_path()) + { + let tidy_result = run_clang_tidy( + &mut Command::new(tidy_cmd), + file, + clang_params.tidy_checks.as_str(), + &clang_params.lines_changed_only, + &clang_params.database, + &clang_params.extra_args, + &clang_params.database_json, + ); + logs.extend(tidy_result); + } else { + logs.push(( + log::Level::Info, + format!( + "{} not scanned due to `--ignore-tidy`", + file_name.as_os_str().to_string_lossy() + ), + )) + } + } + if let Some(format_cmd) = &clang_params.clang_format_command { + if clang_params + .format_filter + .is_source_or_ignored(file_name.as_path()) + { + let format_result = run_clang_format( + &mut Command::new(format_cmd), + file, + clang_params.style.as_str(), + &clang_params.lines_changed_only, + ); + logs.extend(format_result); + } else { + logs.push(( + log::Level::Info, + format!( + "{} not scanned by clang-format due to `--ignore-format`", + file_name.as_os_str().to_string_lossy() + ), + )); + } + } + (file_name, logs) +} + +/// Runs clang-tidy and/or clang-format and returns the parsed output from each. /// /// If `tidy_checks` is `"-*"` then clang-tidy is not executed. /// If `style` is a blank string (`""`), then clang-format is not executed. -pub fn capture_clang_tools_output( - files: &Vec, +pub async fn capture_clang_tools_output( + files: &mut Vec>>, version: &str, - tidy_checks: &str, - style: &str, - lines_changed_only: &LinesChangedOnly, - database: Option, - extra_args: Option>, -) -> (Vec, Vec) { + clang_params: &mut ClangParams, +) { // find the executable paths for clang-tidy and/or clang-format and show version // info as debugging output. - let clang_tidy_command = if tidy_checks != "-*" { - let cmd = get_clang_tool_exe("clang-tidy", version).unwrap(); - log::debug!( - "{} --version\n{}", - &cmd.to_string_lossy(), - String::from_utf8_lossy(&Command::new(&cmd).arg("--version").output().unwrap().stdout) - ); - Some(cmd) - } else { - None + if clang_params.tidy_checks != "-*" { + clang_params.clang_tidy_command = { + let cmd = get_clang_tool_exe("clang-tidy", version).unwrap(); + log::debug!( + "{} --version\n{}", + &cmd.to_string_lossy(), + String::from_utf8_lossy( + &Command::new(&cmd).arg("--version").output().unwrap().stdout + ) + ); + Some(cmd) + } }; - let clang_format_command = if !style.is_empty() { - let cmd = get_clang_tool_exe("clang-format", version).unwrap(); - log::debug!( - "{} --version\n{}", - &cmd.to_string_lossy(), - String::from_utf8_lossy(&Command::new(&cmd).arg("--version").output().unwrap().stdout) - ); - Some(cmd) - } else { - None + if !clang_params.style.is_empty() { + clang_params.clang_format_command = { + let cmd = get_clang_tool_exe("clang-format", version).unwrap(); + log::debug!( + "{} --version\n{}", + &cmd.to_string_lossy(), + String::from_utf8_lossy( + &Command::new(&cmd).arg("--version").output().unwrap().stdout + ) + ); + Some(cmd) + } }; // parse database (if provided) to match filenames when parsing clang-tidy's stdout - let database_json: Option = if let Some(db_path) = &database { + if let Some(db_path) = &clang_params.database { if let Ok(db_str) = fs::read(db_path) { - Some( + clang_params.database_json = Some( serde_json::from_str::( String::from_utf8(db_str).unwrap().as_str(), ) .unwrap(), ) - } else { - None } - } else { - None }; + let mut executors = JoinSet::new(); // iterate over the discovered files and run the clang tools - let mut all_format_advice: Vec = Vec::with_capacity(files.len()); - let mut all_tidy_advice: Vec = Vec::with_capacity(files.len()); for file in files { - start_log_group(format!("Analyzing {}", file.name.to_string_lossy())); - if let Some(tidy_cmd) = &clang_tidy_command { - all_tidy_advice.push(run_clang_tidy( - &mut Command::new(tidy_cmd), - file, - tidy_checks, - lines_changed_only, - &database, - &extra_args, - &database_json, - )); - } - if let Some(format_cmd) = &clang_format_command { - all_format_advice.push(run_clang_format( - &mut Command::new(format_cmd), - file, - style, - lines_changed_only, - )); + let arc_params = Arc::new(clang_params.clone()); + let mut arc_file = Arc::clone(file); + executors.spawn(async move { analyze_single_file(&mut arc_file, arc_params) }); + } + + while let Some(output) = executors.join_next().await { + if let Ok(out) = output { + let (file_name, logs) = out; + start_log_group(format!("Analyzing {}", file_name.to_string_lossy())); + for (level, msg) in logs { + log::log!(level, "{}", msg); + } + end_log_group(); } - end_log_group(); } - (all_format_advice, all_tidy_advice) } #[cfg(test)] diff --git a/cpp-linter-lib/src/cli.rs b/cpp-linter-lib/src/cli.rs index ef8a17c..e142f5f 100644 --- a/cpp-linter-lib/src/cli.rs +++ b/cpp-linter-lib/src/cli.rs @@ -1,13 +1,11 @@ //! This module holds the Command Line Interface design. -use std::fs; - // non-std crates -use clap::builder::FalseyValueParser; -use clap::{Arg, ArgAction, ArgMatches, Command}; +use clap::builder::{ArgPredicate, FalseyValueParser}; +use clap::{Arg, ArgAction, ArgGroup, ArgMatches, Command}; /// An enum to describe `--lines-changed-only` CLI option's behavior. -#[derive(PartialEq)] +#[derive(PartialEq, Clone, Debug)] pub enum LinesChangedOnly { /// All lines are scanned Off, @@ -20,6 +18,10 @@ pub enum LinesChangedOnly { /// Builds and returns the Command Line Interface's argument parsing object. pub fn get_arg_parser() -> Command { Command::new("cpp-linter") + .subcommand( + Command::new("version") + .about("Display the cpp-linter version and exit.") + ) .arg( Arg::new("verbosity") .long("verbosity") @@ -38,6 +40,7 @@ thread comments or file annotations. Arg::new("database") .long("database") .short('p') + .help_heading("clang-tidy options") .long_help( "The path that is used to read a compile command database. For example, it can be a CMake build directory in which a file named @@ -52,6 +55,7 @@ example of setting up Clang Tooling on a source tree.", .short('s') .long("style") .default_value("llvm") + .help_heading("clang-format options") .long_help( "The style rules to use. @@ -69,6 +73,7 @@ example of setting up Clang Tooling on a source tree.", .default_value( "boost-*,bugprone-*,performance-*,readability-*,portability-*,modernize-*,clang-analyzer-*,cppcoreguidelines-*", ) + .help_heading("clang-tidy options") .long_help( "A comma-separated list of globs with optional ``-`` prefix. Globs are processed in order of appearance in the list. @@ -90,6 +95,9 @@ See also clang-tidy docs for more info. Arg::new("version") .short('V') .long("version") + .default_missing_value("NO-VERSION") + .num_args(0..=1) + .require_equals(true) .default_value("") .long_help( "The desired version of the clang tools to use. Accepted options are @@ -109,6 +117,7 @@ strings which can be 8, 9, 10, 11, 12, 13, 14, 15, 16, 17. .long("extensions") .value_delimiter(',') .default_value("c,h,C,H,cpp,hpp,cc,hh,c++,h++,cxx,hxx") + .help_heading("source options") .long_help("A comma-separated list of file extensions to analyze. "), ) @@ -117,6 +126,7 @@ strings which can be 8, 9, 10, 11, 12, 13, 14, 15, 16, 17. .short('r') .long("repo-root") .default_value(".") + .help_heading("source options") .long_help( "The relative path to the repository root directory. This path is relative to the runner's ``GITHUB_WORKSPACE`` environment variable (or @@ -130,6 +140,7 @@ the current working directory if not using a CI runner). .long("ignore") .value_delimiter('|') .default_value(".github|target") + .help_heading("source options") .long_help( "Set this option with path(s) to ignore (or not ignore). @@ -147,12 +158,35 @@ the current working directory if not using a CI runner). ", ), ) + .arg( + Arg::new("ignore-tidy") + .short('D') + .long("ignore-tidy") + .value_delimiter('|') + .default_value("") + .help_heading("clang-tidy options") + .long_help( + "Similar to `--ignore` but applied exclusively to files analyzed by clang-tidy.", + ), + ) + .arg( + Arg::new("ignore-format") + .short('M') + .long("ignore-format") + .value_delimiter('|') + .default_value("") + .help_heading("clang-format options") + .long_help( + "Similar to `--ignore` but applied exclusively to files analyzed by clang-format.", + ), + ) .arg( Arg::new("lines-changed-only") .short('l') .long("lines-changed-only") .value_parser(["true", "false", "diff"]) .default_value("true") + .help_heading("source options") .long_help( "This controls what part of the files are analyzed. The following values are accepted: @@ -168,8 +202,10 @@ The following values are accepted: Arg::new("files-changed-only") .short('f') .long("files-changed-only") + .default_value_if("lines-changed-only", ArgPredicate::Equals("true".into()), "true") .default_value("false") .value_parser(FalseyValueParser::new()) + .help_heading("source options") .long_help( "Set this option to false to analyze any source files in the repo. This is automatically enabled if @@ -190,6 +226,7 @@ This is automatically enabled if .long("extra-arg") .short('x') .action(ArgAction::Append) + .help_heading("clang-tidy options") .long_help( "A string of extra arguments passed to clang-tidy for use as compiler arguments. This can be specified more than once for each @@ -207,6 +244,7 @@ avoid using spaces between name and value (use ``=`` instead): .short('g') .value_parser(["true", "false", "updated"]) .default_value("false") + .help_heading("feedback options") .long_help( "Set this option to true to enable the use of thread comments as feedback. Set this to ``update`` to update an existing comment if one exists; @@ -233,6 +271,7 @@ the value 'true' will always delete an old comment and post a new one if necessa .short('t') .value_parser(FalseyValueParser::new()) .default_value("true") + .help_heading("feedback options") .long_help( "Set this option to true or false to enable or disable the use of a thread comment that basically says 'Looks Good To Me' (when all checks pass). @@ -248,6 +287,7 @@ thread comment that basically says 'Looks Good To Me' (when all checks pass). .short('w') .value_parser(FalseyValueParser::new()) .default_value("false") + .help_heading("feedback options") .long_help( "Set this option to true or false to enable or disable the use of a workflow step summary when the run has concluded. @@ -260,79 +300,23 @@ a workflow step summary when the run has concluded. .short('a') .value_parser(FalseyValueParser::new()) .default_value("true") + .help_heading("feedback options") .long_help( "Set this option to false to disable the use of file annotations as feedback. ", ), - ) -} - -/// This will parse the list of paths specified from the CLI using the `--ignore` -/// argument. -/// -/// It returns 2 lists (in order): -/// -/// - `ignored` paths -/// - `not_ignored` paths -/// -/// This function will also read a .gitmodules file located in the working directory. -/// The named submodules' paths will be automatically added to the ignored list, -/// unless the submodule's path is already specified in the not_ignored list. -pub fn parse_ignore(ignore: &[&str]) -> (Vec, Vec) { - let mut ignored = vec![]; - let mut not_ignored = vec![]; - for pattern in ignore { - let as_posix = pattern.replace('\\', "/"); - let mut pat = as_posix.as_str(); - let is_ignored = !pat.starts_with('!'); - if !is_ignored { - pat = &pat[1..]; - } - if pat.starts_with("./") { - pat = &pat[2..]; - } - let is_hidden = pat.starts_with('.'); - if is_hidden || is_ignored { - ignored.push(format!("./{pat}")); - } else { - not_ignored.push(format!("./{pat}")); - } - } - - if let Ok(read_buf) = fs::read_to_string(".gitmodules") { - for line in read_buf.split('\n') { - if line.trim_start().starts_with("path") { - assert!(line.find('=').unwrap() > 0); - let submodule = String::from("./") + line.split('=').last().unwrap().trim(); - log::debug!("Found submodule: {submodule}"); - let mut is_ignored = true; - for pat in ¬_ignored { - if pat == &submodule { - is_ignored = false; - break; - } - } - if is_ignored && !ignored.contains(&submodule) { - ignored.push(submodule); - } - } - } - } - - if !ignored.is_empty() { - log::info!("Ignored:"); - for pattern in &ignored { - log::info!(" {pattern}"); - } - } - if !not_ignored.is_empty() { - log::info!("Not Ignored:"); - for pattern in ¬_ignored { - log::info!(" {pattern}"); - } - } - (ignored, not_ignored) + ) + .groups([ + ArgGroup::new("Clang-tidy options") + .args(["tidy-checks", "database", "extra-arg", "ignore-tidy"]), + ArgGroup::new("Clang-format options").args(["style", "ignore-format"]), + ArgGroup::new("General options").args(["verbosity", "version"]), + ArgGroup::new("Source options").args(["extensions", "repo-root", "ignore", "lines-changed-only", "files-changed-only"]), + ArgGroup::new("Feedback options").args([ + "thread-comments", "no-lgtm", "step-summary", "file-annotations" + ]), + ]) } /// Converts the parsed value of the `--extra-arg` option into an optional vector of strings. @@ -354,25 +338,25 @@ pub fn parse_ignore(ignore: &[&str]) -> (Vec, Vec) { /// ``` /// The cpp-linter-action (for Github CI workflows) can only use 1 `extra-arg` input option, so /// the value will be split at spaces. -pub fn convert_extra_arg_val(args: &ArgMatches) -> Option> { - let raw_val = if let Ok(extra_args) = args.try_get_many::("extra-arg") { - extra_args.map(|extras| extras.map(|val| val.as_str()).collect::>()) - } else { - None - }; - if let Some(val) = raw_val { +pub fn convert_extra_arg_val(args: &ArgMatches) -> Option> { + let raw_val = args + .try_get_many::("extra-arg") + .expect("parser failed in set a default for `--extra-arf`"); + if let Some(mut val) = raw_val { if val.len() == 1 { // specified once; split and return result - Some( - val[0] + return Some( + val.next() + .unwrap() .trim_matches('\'') .trim_matches('"') .split(' ') + .map(|i| i.to_string()) .collect(), - ) + ); } else { // specified multiple times; just return - Some(val) + Some(val.map(|i| i.to_string()).collect()) } } else { // no value specified; just return diff --git a/cpp-linter-lib/src/common_fs.rs b/cpp-linter-lib/src/common_fs.rs deleted file mode 100644 index fcc0151..0000000 --- a/cpp-linter-lib/src/common_fs.rs +++ /dev/null @@ -1,439 +0,0 @@ -//! A module to hold all common file system functionality. - -use std::io::Read; -use std::path::{Component, Path}; -use std::{fs, io}; -use std::{ops::RangeInclusive, path::PathBuf}; - -use crate::cli::LinesChangedOnly; - -/// A structure to represent a file's path and line changes. -#[derive(Debug)] -pub struct FileObj { - /// The path to the file. - pub name: PathBuf, - - /// The list of lines with additions. - pub added_lines: Vec, - - /// The list of ranges that span only lines with additions. - pub added_ranges: Vec>, - - /// The list of ranges that span the lines present in diff chunks. - pub diff_chunks: Vec>, -} - -impl FileObj { - /// Instantiate a rudimentary object with only file name information. - /// - /// To instantiate an object with line information, use [FileObj::from]. - pub fn new(name: PathBuf) -> Self { - FileObj { - name, - added_lines: Vec::::new(), - added_ranges: Vec::>::new(), - diff_chunks: Vec::>::new(), - } - } - - /// Instantiate an object with file name and changed lines information. - pub fn from( - name: PathBuf, - added_lines: Vec, - diff_chunks: Vec>, - ) -> Self { - let added_ranges = FileObj::consolidate_numbers_to_ranges(&added_lines); - FileObj { - name, - added_lines, - added_ranges, - diff_chunks, - } - } - - /// A helper function to consolidate a [Vec] of line numbers into a - /// [Vec>] in which each range describes the beginning and - /// ending of a group of consecutive line numbers. - fn consolidate_numbers_to_ranges(lines: &[u32]) -> Vec> { - let mut range_start = None; - let mut ranges: Vec> = Vec::new(); - for (index, number) in lines.iter().enumerate() { - if index == 0 { - range_start = Some(*number); - } else if number - 1 != lines[index - 1] { - ranges.push(RangeInclusive::new(range_start.unwrap(), lines[index - 1])); - range_start = Some(*number); - } - if index == lines.len() - 1 { - ranges.push(RangeInclusive::new(range_start.unwrap(), *number)); - } - } - ranges - } - - pub fn get_ranges(&self, lines_changed_only: &LinesChangedOnly) -> Vec> { - match lines_changed_only { - LinesChangedOnly::Diff => self.diff_chunks.to_vec(), - LinesChangedOnly::On => self.added_ranges.to_vec(), - _ => Vec::new(), - } - } -} - -/// Describes if a specified `file_name` is contained within the given `set` of paths. -/// -/// The `set` of paths is used as domains, so the specified `file_name` can be a direct -/// or distant descendant of any given paths in the `set`. -pub fn is_file_in_list(file_name: &Path, set: &[String], prompt: String) -> bool { - for pattern in set { - let pat = Path::new(pattern); - if pat.is_file() { - if file_name == pat { - log::debug!( - "{} is {prompt} as specified via {:?}", - file_name.to_string_lossy().replace('\\', "/"), - pat - ); - return true; - } - } else if pat.is_dir() && file_name.starts_with(pat) { - log::debug!( - "{} is {prompt} as specified in domain {:?}", - file_name.to_string_lossy().replace('\\', "/"), - pat - ); - return true; - } - // else file doesn't exist; return false - } - false -} - -/// A helper function that checks if `entry` satisfies the following conditions (in -/// ordered priority): -/// -/// - Does `entry`'s path use at least 1 of the listed file `extensions`? (takes -/// precedence) -/// - Is `entry` *not* specified in list of `ignored` paths? -/// - Is `entry` specified in the list of explicitly `not_ignored` paths? (supersedes -/// specified `ignored` paths) -pub fn is_source_or_ignored( - entry: &Path, - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], -) -> bool { - let extension = entry.extension(); - if extension.is_none() { - return false; - } - let mut is_ignored = true; - for ext in extensions { - if ext == &extension.unwrap().to_os_string().into_string().unwrap() { - is_ignored = false; - break; - } - } - if !is_ignored { - log::debug!( - "{} is a source file", - entry.to_string_lossy().replace('\\', "/") - ); - let is_in_ignored = is_file_in_list(entry, ignored, String::from("ignored")); - let is_in_not_ignored = is_file_in_list(entry, not_ignored, String::from("not ignored")); - if !is_in_ignored || is_in_not_ignored { - return true; - } - } - false -} - -/// Walks a given `root_path` recursively and returns a [`Vec`] that -/// -/// - uses at least 1 of the `extensions` -/// - is not specified in the given list of `ignored` paths -/// - is specified in the given list `not_ignored` paths (which supersedes `ignored` paths) -pub fn list_source_files( - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], - root_path: &str, -) -> Vec { - let mut files: Vec = Vec::new(); - let entries = fs::read_dir(root_path) - .expect("repo root-path should exist") - .map(|res| res.map(|e| e.path())) - .collect::, io::Error>>() - .unwrap(); - for entry in entries { - if entry.is_dir() { - let mut is_hidden = false; - let parent = entry.components().last().expect("parent not known"); - if parent.as_os_str().to_str().unwrap().starts_with('.') { - is_hidden = true; - } - if !is_hidden { - files.extend(list_source_files( - extensions, - ignored, - not_ignored, - &entry.into_os_string().into_string().unwrap(), - )); - } - } else { - let is_valid_src = is_source_or_ignored(&entry, extensions, ignored, not_ignored); - if is_valid_src { - files.push(FileObj::new( - entry.clone().strip_prefix("./").unwrap().to_path_buf(), - )); - } - } - } - files -} - -/// Gets the line and column number from a given `offset` (of bytes) for given -/// `file_path`. -/// -/// This computes the line and column numbers from a buffer of bytes read from the -/// `file_path`. In non-UTF-8 encoded files, this does not guarantee that a word -/// boundary exists at the returned column number. However, the `offset` given to this -/// function is expected to originate from diagnostic information provided by -/// clang-format or clang-tidy. -pub fn get_line_cols_from_offset(file_path: &PathBuf, offset: usize) -> (usize, usize) { - let mut file_buf = vec![0; offset]; - fs::File::open(file_path) - .unwrap() - .read_exact(&mut file_buf) - .unwrap(); - let lines = file_buf.split(|byte| byte == &b'\n'); - let line_count = lines.clone().count(); - let column_count = lines.last().unwrap_or(&[]).len() + 1; // +1 because not a 0 based count - (line_count, column_count) -} - -/// This was copied from [cargo source code](https://github.com/rust-lang/cargo/blob/fede83ccf973457de319ba6fa0e36ead454d2e20/src/cargo/util/paths.rs#L61). -/// -/// NOTE: Rust [std::path] crate has no native functionality equivalent to this. -pub fn normalize_path(path: &Path) -> PathBuf { - let mut components = path.components().peekable(); - let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { - components.next(); - PathBuf::from(c.as_os_str()) - } else { - PathBuf::new() - }; - - for component in components { - match component { - Component::Prefix(..) => unreachable!(), - Component::RootDir => { - ret.push(component.as_os_str()); - } - Component::CurDir => {} - Component::ParentDir => { - ret.pop(); - } - Component::Normal(c) => { - ret.push(c); - } - } - } - ret -} - -#[cfg(test)] -mod test { - use std::env::current_dir; - use std::env::set_current_dir; - use std::path::PathBuf; - - use super::{get_line_cols_from_offset, list_source_files, normalize_path, FileObj}; - use crate::cli::LinesChangedOnly; - use crate::cli::{get_arg_parser, parse_ignore}; - use crate::common_fs::is_file_in_list; - - // *********************** tests for normalized paths - - #[test] - fn normalize_redirects() { - let mut src = current_dir().unwrap(); - src.push(".."); - src.push( - current_dir() - .unwrap() - .strip_prefix(current_dir().unwrap().parent().unwrap()) - .unwrap(), - ); - println!("relative path = {}", src.to_str().unwrap()); - assert_eq!(normalize_path(&src), current_dir().unwrap()); - } - - #[test] - fn normalize_no_root() { - let src = PathBuf::from("../cpp-linter-lib"); - let mut cur_dir = current_dir().unwrap(); - cur_dir = cur_dir - .strip_prefix(current_dir().unwrap().parent().unwrap()) - .unwrap() - .to_path_buf(); - println!("relative path = {}", src.to_str().unwrap()); - assert_eq!(normalize_path(&src), cur_dir); - } - - #[test] - fn normalize_current_redirect() { - let src = PathBuf::from("tests/./ignored_paths"); - println!("relative path = {}", src.to_str().unwrap()); - assert_eq!(normalize_path(&src), PathBuf::from("tests/ignored_paths")); - } - - // ************* tests for ignored paths - - fn setup_ignore(input: &str) -> (Vec, Vec) { - let arg_parser = get_arg_parser(); - let args = arg_parser.get_matches_from(vec!["cpp-linter", "-i", input]); - let ignore_arg = args - .get_many::("ignore") - .unwrap() - .map(|s| s.as_str()) - .collect::>(); - let (ignored, not_ignored) = parse_ignore(&ignore_arg); - println!("ignored = {:?}", ignored); - println!("not ignored = {:?}", not_ignored); - (ignored, not_ignored) - } - - #[test] - fn ignore_src() { - let (ignored, not_ignored) = setup_ignore("src"); - assert!(is_file_in_list( - &PathBuf::from("./src/lib.rs"), - &ignored, - "ignored".to_string() - )); - assert!(!is_file_in_list( - &PathBuf::from("./src/lib.rs"), - ¬_ignored, - "not_ignored".to_string() - )); - } - - #[test] - fn ignore_root() { - let (ignored, not_ignored) = setup_ignore("!src/lib.rs|./"); - assert!(is_file_in_list( - &PathBuf::from("./cargo.toml"), - &ignored, - "ignored".to_string() - )); - assert!(is_file_in_list( - &PathBuf::from("./src/lib.rs"), - ¬_ignored, - "not_ignored".to_string() - )); - } - - #[test] - fn ignore_root_implicit() { - let (ignored, not_ignored) = setup_ignore("!src|"); - assert!(is_file_in_list( - &PathBuf::from("./cargo.toml"), - &ignored, - "ignored".to_string() - )); - assert!(is_file_in_list( - &PathBuf::from("./src/lib.rs"), - ¬_ignored, - "not_ignored".to_string() - )); - } - - #[test] - fn ignore_submodules() { - set_current_dir("tests/ignored_paths").unwrap(); - let (ignored, not_ignored) = setup_ignore("!pybind11"); - - // using Vec::contains() because these files don't actually exist in project files - for ignored_submodule in ["./RF24", "./RF24Network", "./RF24Mesh"] { - assert!(ignored.contains(&ignored_submodule.to_string())); - assert!(!is_file_in_list( - &PathBuf::from(ignored_submodule.to_string() + "/some_src.cpp"), - &ignored, - "ignored".to_string() - )); - } - assert!(not_ignored.contains(&"./pybind11".to_string())); - assert!(!is_file_in_list( - &PathBuf::from("./pybind11/some_src.cpp"), - ¬_ignored, - "not ignored".to_string() - )); - } - - // *********************** tests for recursive path search - - #[test] - fn walk_dir_recursively() { - let (ignored, not_ignored) = setup_ignore("target"); - let extensions = vec!["cpp", "hpp"]; - let files = list_source_files(&extensions, &ignored, ¬_ignored, "."); - assert!(!files.is_empty()); - for file in files { - assert!(extensions.contains( - &file - .name - .extension() - .unwrap_or_default() - .to_string_lossy() - .to_string() - .as_str() - )); - } - } - - // *********************** tests for translating byte offset into line/column - - #[test] - fn translate_byte_offset() { - let (lines, cols) = get_line_cols_from_offset(&PathBuf::from("tests/demo/demo.cpp"), 144); - println!("lines: {lines}, cols: {cols}"); - assert_eq!(lines, 13); - assert_eq!(cols, 5); - } - - // *********************** tests for FileObj::get_ranges() - - #[test] - fn get_ranges_0() { - let file_obj = FileObj::new(PathBuf::from("tests/demo/demo.cpp")); - let ranges = file_obj.get_ranges(&LinesChangedOnly::Off); - assert!(ranges.is_empty()); - } - - #[test] - fn get_ranges_2() { - let diff_chunks = vec![1..=10]; - let added_lines = vec![4, 5, 9]; - let file_obj = FileObj::from( - PathBuf::from("tests/demo/demo.cpp"), - added_lines, - diff_chunks.clone(), - ); - let ranges = file_obj.get_ranges(&LinesChangedOnly::Diff); - assert_eq!(ranges, diff_chunks); - } - - #[test] - fn get_ranges_1() { - let diff_chunks = vec![1..=10]; - let added_lines = vec![4, 5, 9]; - let file_obj = FileObj::from( - PathBuf::from("tests/demo/demo.cpp"), - added_lines, - diff_chunks, - ); - let ranges = file_obj.get_ranges(&LinesChangedOnly::On); - assert_eq!(ranges, vec![4..=5, 9..=9]); - } -} diff --git a/cpp-linter-lib/src/common_fs/file_filter.rs b/cpp-linter-lib/src/common_fs/file_filter.rs new file mode 100644 index 0000000..a18bd97 --- /dev/null +++ b/cpp-linter-lib/src/common_fs/file_filter.rs @@ -0,0 +1,272 @@ +use std::{fs, path::Path}; + +use super::FileObj; + +#[derive(Debug, Clone)] +pub struct FileFilter { + ignored: Vec, + not_ignored: Vec, + extensions: Vec, +} +impl FileFilter { + pub fn new(ignore: &[&str], extensions: Vec) -> Self { + let (ignored, not_ignored) = Self::parse_ignore(ignore); + Self { + ignored, + not_ignored, + extensions, + } + } + + /// This will parse the list of paths specified from the CLI using the `--ignore` + /// argument. + /// + /// It returns 2 lists (in order): + /// + /// - `ignored` paths + /// - `not_ignored` paths + fn parse_ignore(ignore: &[&str]) -> (Vec, Vec) { + let mut ignored = vec![]; + let mut not_ignored = vec![]; + for pattern in ignore { + let as_posix = pattern.replace('\\', "/"); + let mut pat = as_posix.as_str(); + let is_ignored = !pat.starts_with('!'); + if !is_ignored { + pat = &pat[1..]; + } + if pat.starts_with("./") { + pat = &pat[2..]; + } + let is_hidden = pat.starts_with('.'); + if is_hidden || is_ignored { + ignored.push(format!("./{pat}")); + } else { + not_ignored.push(format!("./{pat}")); + } + } + + if !ignored.is_empty() { + log::info!("Ignored:"); + for pattern in &ignored { + log::info!(" {pattern}"); + } + } + if !not_ignored.is_empty() { + log::info!("Not Ignored:"); + for pattern in ¬_ignored { + log::info!(" {pattern}"); + } + } + (ignored, not_ignored) + } + + /// This function will also read a .gitmodules file located in the working directory. + /// The named submodules' paths will be automatically added to the ignored list, + /// unless the submodule's path is already specified in the not_ignored list. + pub fn parse_submodules(&mut self) { + if let Ok(read_buf) = fs::read_to_string(".gitmodules") { + for line in read_buf.split('\n') { + if line.trim_start().starts_with("path") { + assert!(line.find('=').unwrap() > 0); + let submodule = String::from("./") + line.split('=').last().unwrap().trim(); + log::debug!("Found submodule: {submodule}"); + let mut is_ignored = true; + for pat in &self.not_ignored { + if pat == &submodule { + is_ignored = false; + break; + } + } + if is_ignored && !self.ignored.contains(&submodule) { + self.ignored.push(submodule); + } + } + } + } + } + + /// Describes if a specified `file_name` is contained within the given `set` of paths. + /// + /// The `is_ignored` flag describes which list of paths is used as domains. + /// The specified `file_name` can be a direct or distant descendant of any paths in + /// the list. + /// + /// Returns a [`Some`] value of the the path/pattern that matches the given `file_name`. + /// If given `file_name` is not in the specified list, then [`None`] is returned. + pub fn is_file_in_list(&self, file_name: &Path, is_ignored: bool) -> Option { + let set = if is_ignored { + &self.ignored + } else { + &self.not_ignored + }; + for pattern in set { + let pat = Path::new(&pattern); + if (pat.is_file() && file_name == pat) || (pat.is_dir() && file_name.starts_with(pat)) { + return Some(pattern.to_owned()); + } + } + None + } + + /// A helper function that checks if `entry` satisfies the following conditions (in + /// ordered priority): + /// + /// - Does `entry`'s path use at least 1 of the listed file `extensions`? (takes + /// precedence) + /// - Is `entry` *not* specified in list of `ignored` paths? + /// - Is `entry` specified in the list of explicitly `not_ignored` paths? (supersedes + /// specified `ignored` paths) + pub fn is_source_or_ignored(&self, entry: &Path) -> bool { + let extension = entry.extension(); + if extension.is_none() { + return false; + } + let mut is_ignored = true; + for ext in &self.extensions { + if ext == &extension.unwrap().to_os_string().into_string().unwrap() { + is_ignored = false; + break; + } + } + if !is_ignored { + let is_in_ignored = self.is_file_in_list(entry, true); + let is_in_not_ignored = self.is_file_in_list(entry, false); + if is_in_not_ignored.is_some() || is_in_ignored.is_none() { + return true; + } + } + false + } + + /// Walks a given `root_path` recursively and returns a [`Vec`] that + /// + /// - uses at least 1 of the given `extensions` + /// - is not specified in the internal list of `ignored` paths + /// - is specified in the internal list `not_ignored` paths (which supersedes `ignored` paths) + pub fn list_source_files(&self, root_path: &str) -> Vec { + let mut files: Vec = Vec::new(); + let entries = fs::read_dir(root_path).expect("repo root-path should exist"); + for entry in entries.flatten() { + let path = entry.path(); + if path.is_dir() { + let mut is_hidden = false; + let parent = path.components().last().expect("parent not known"); + if parent.as_os_str().to_str().unwrap().starts_with('.') { + is_hidden = true; + } + if !is_hidden { + files.extend( + self.list_source_files(&path.into_os_string().into_string().unwrap()), + ); + } + } else { + let is_valid_src = self.is_source_or_ignored(&path); + if is_valid_src { + files.push(FileObj::new( + path.clone().strip_prefix("./").unwrap().to_path_buf(), + )); + } + } + } + files + } +} + +#[cfg(test)] +mod tests { + use super::FileFilter; + use crate::cli::get_arg_parser; + use std::{env::set_current_dir, path::PathBuf}; + + // ************* tests for ignored paths + + fn setup_ignore(input: &str, extension: Vec) -> FileFilter { + let arg_parser = get_arg_parser(); + let args = arg_parser.get_matches_from(vec!["cpp-linter", "-i", input]); + let ignore_arg = args + .get_many::("ignore") + .unwrap() + .map(|s| s.as_str()) + .collect::>(); + let file_filter = FileFilter::new(&ignore_arg, extension); + println!("ignored = {:?}", file_filter.ignored); + println!("not ignored = {:?}", file_filter.not_ignored); + file_filter + } + + #[test] + fn ignore_src() { + let file_filter = setup_ignore("src", vec![]); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./src/lib.rs"), true) + .is_some()); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./src/lib.rs"), false) + .is_none()); + } + + #[test] + fn ignore_root() { + let file_filter = setup_ignore("!src/lib.rs|./", vec![]); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./cargo.toml"), true) + .is_some()); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./src/lib.rs"), false) + .is_some()); + } + + #[test] + fn ignore_root_implicit() { + let file_filter = setup_ignore("!src|", vec![]); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./cargo.toml"), true) + .is_some()); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./src/lib.rs"), false) + .is_some()); + } + + #[test] + fn ignore_submodules() { + set_current_dir("tests/ignored_paths").unwrap(); + let mut file_filter = setup_ignore("!pybind11", vec![]); + file_filter.parse_submodules(); + + // using Vec::contains() because these files don't actually exist in project files + for ignored_submodule in ["./RF24", "./RF24Network", "./RF24Mesh"] { + assert!(file_filter.ignored.contains(&ignored_submodule.to_string())); + assert!(file_filter + .is_file_in_list( + &PathBuf::from(ignored_submodule.to_string() + "/some_src.cpp"), + true + ) + .is_none()); + } + assert!(file_filter.not_ignored.contains(&"./pybind11".to_string())); + assert!(file_filter + .is_file_in_list(&PathBuf::from("./pybind11/some_src.cpp"), false) + .is_none()); + } + + // *********************** tests for recursive path search + + #[test] + fn walk_dir_recursively() { + let extensions = vec!["cpp".to_string(), "hpp".to_string()]; + let file_filter = setup_ignore("target", extensions.clone()); + let files = file_filter.list_source_files("."); + assert!(!files.is_empty()); + for file in files { + assert!(extensions.contains( + &file + .name + .extension() + .unwrap_or_default() + .to_string_lossy() + .to_string() + )); + } + } +} diff --git a/cpp-linter-lib/src/common_fs/mod.rs b/cpp-linter-lib/src/common_fs/mod.rs new file mode 100644 index 0000000..da119a9 --- /dev/null +++ b/cpp-linter-lib/src/common_fs/mod.rs @@ -0,0 +1,234 @@ +//! A module to hold all common file system functionality. + +use std::fs; +use std::io::Read; +use std::path::{Component, Path}; +use std::{ops::RangeInclusive, path::PathBuf}; + +use crate::clang_tools::clang_format::FormatAdvice; +use crate::clang_tools::clang_tidy::TidyAdvice; +use crate::cli::LinesChangedOnly; +mod file_filter; +pub use file_filter::FileFilter; + +/// A structure to represent a file's path and line changes. +#[derive(Debug, Clone)] +pub struct FileObj { + /// The path to the file. + pub name: PathBuf, + + /// The list of lines with additions. + pub added_lines: Vec, + + /// The list of ranges that span only lines with additions. + pub added_ranges: Vec>, + + /// The list of ranges that span the lines present in diff chunks. + pub diff_chunks: Vec>, + + /// The collection of clang-format advice for this file. + pub format_advice: Option, + + /// The collection of clang-format advice for this file. + pub tidy_advice: Option, +} + +impl FileObj { + /// Instantiate a rudimentary object with only file name information. + /// + /// To instantiate an object with line information, use [FileObj::from]. + pub fn new(name: PathBuf) -> Self { + FileObj { + name, + added_lines: Vec::::new(), + added_ranges: Vec::>::new(), + diff_chunks: Vec::>::new(), + format_advice: None, + tidy_advice: None, + } + } + + /// Instantiate an object with file name and changed lines information. + pub fn from( + name: PathBuf, + added_lines: Vec, + diff_chunks: Vec>, + ) -> Self { + let added_ranges = FileObj::consolidate_numbers_to_ranges(&added_lines); + FileObj { + name, + added_lines, + added_ranges, + diff_chunks, + format_advice: None, + tidy_advice: None, + } + } + + /// A helper function to consolidate a [Vec] of line numbers into a + /// [Vec>] in which each range describes the beginning and + /// ending of a group of consecutive line numbers. + fn consolidate_numbers_to_ranges(lines: &[u32]) -> Vec> { + let mut range_start = None; + let mut ranges: Vec> = Vec::new(); + for (index, number) in lines.iter().enumerate() { + if index == 0 { + range_start = Some(*number); + } else if number - 1 != lines[index - 1] { + ranges.push(RangeInclusive::new(range_start.unwrap(), lines[index - 1])); + range_start = Some(*number); + } + if index == lines.len() - 1 { + ranges.push(RangeInclusive::new(range_start.unwrap(), *number)); + } + } + ranges + } + + pub fn get_ranges(&self, lines_changed_only: &LinesChangedOnly) -> Vec> { + match lines_changed_only { + LinesChangedOnly::Diff => self.diff_chunks.to_vec(), + LinesChangedOnly::On => self.added_ranges.to_vec(), + _ => Vec::new(), + } + } +} + +/// Gets the line and column number from a given `offset` (of bytes) for given +/// `file_path`. +/// +/// This computes the line and column numbers from a buffer of bytes read from the +/// `file_path`. In non-UTF-8 encoded files, this does not guarantee that a word +/// boundary exists at the returned column number. However, the `offset` given to this +/// function is expected to originate from diagnostic information provided by +/// clang-format or clang-tidy. +pub fn get_line_cols_from_offset(file_path: &PathBuf, offset: usize) -> (usize, usize) { + let mut file_buf = vec![0; offset]; + fs::File::open(file_path) + .unwrap() + .read_exact(&mut file_buf) + .unwrap(); + let lines = file_buf.split(|byte| byte == &b'\n'); + let line_count = lines.clone().count(); + let column_count = lines.last().unwrap_or(&[]).len() + 1; // +1 because not a 0 based count + (line_count, column_count) +} + +/// This was copied from [cargo source code](https://github.com/rust-lang/cargo/blob/fede83ccf973457de319ba6fa0e36ead454d2e20/src/cargo/util/paths.rs#L61). +/// +/// NOTE: Rust [std::path] crate has no native functionality equivalent to this. +pub fn normalize_path(path: &Path) -> PathBuf { + let mut components = path.components().peekable(); + let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() { + components.next(); + PathBuf::from(c.as_os_str()) + } else { + PathBuf::new() + }; + + for component in components { + match component { + Component::Prefix(..) => unreachable!(), + Component::RootDir => { + ret.push(component.as_os_str()); + } + Component::CurDir => {} + Component::ParentDir => { + ret.pop(); + } + Component::Normal(c) => { + ret.push(c); + } + } + } + ret +} + +#[cfg(test)] +mod test { + use std::env::current_dir; + use std::path::PathBuf; + + use super::{get_line_cols_from_offset, normalize_path, FileObj}; + use crate::cli::LinesChangedOnly; + + // *********************** tests for normalized paths + + #[test] + fn normalize_redirects() { + let mut src = current_dir().unwrap(); + src.push(".."); + src.push( + current_dir() + .unwrap() + .strip_prefix(current_dir().unwrap().parent().unwrap()) + .unwrap(), + ); + println!("relative path = {}", src.to_str().unwrap()); + assert_eq!(normalize_path(&src), current_dir().unwrap()); + } + + #[test] + fn normalize_no_root() { + let src = PathBuf::from("../cpp-linter-lib"); + let mut cur_dir = current_dir().unwrap(); + cur_dir = cur_dir + .strip_prefix(current_dir().unwrap().parent().unwrap()) + .unwrap() + .to_path_buf(); + println!("relative path = {}", src.to_str().unwrap()); + assert_eq!(normalize_path(&src), cur_dir); + } + + #[test] + fn normalize_current_redirect() { + let src = PathBuf::from("tests/./ignored_paths"); + println!("relative path = {}", src.to_str().unwrap()); + assert_eq!(normalize_path(&src), PathBuf::from("tests/ignored_paths")); + } + + // *********************** tests for translating byte offset into line/column + + #[test] + fn translate_byte_offset() { + let (lines, cols) = get_line_cols_from_offset(&PathBuf::from("tests/demo/demo.cpp"), 144); + println!("lines: {lines}, cols: {cols}"); + assert_eq!(lines, 13); + assert_eq!(cols, 5); + } + + // *********************** tests for FileObj::get_ranges() + + #[test] + fn get_ranges_0() { + let file_obj = FileObj::new(PathBuf::from("tests/demo/demo.cpp")); + let ranges = file_obj.get_ranges(&LinesChangedOnly::Off); + assert!(ranges.is_empty()); + } + + #[test] + fn get_ranges_2() { + let diff_chunks = vec![1..=10]; + let added_lines = vec![4, 5, 9]; + let file_obj = FileObj::from( + PathBuf::from("tests/demo/demo.cpp"), + added_lines, + diff_chunks.clone(), + ); + let ranges = file_obj.get_ranges(&LinesChangedOnly::Diff); + assert_eq!(ranges, diff_chunks); + } + + #[test] + fn get_ranges_1() { + let diff_chunks = vec![1..=10]; + let added_lines = vec![4, 5, 9]; + let file_obj = FileObj::from( + PathBuf::from("tests/demo/demo.cpp"), + added_lines, + diff_chunks, + ); + let ranges = file_obj.get_ranges(&LinesChangedOnly::On); + assert_eq!(ranges, vec![4..=5, 9..=9]); + } +} diff --git a/cpp-linter-lib/src/git.rs b/cpp-linter-lib/src/git.rs index 641875c..75b4430 100644 --- a/cpp-linter-lib/src/git.rs +++ b/cpp-linter-lib/src/git.rs @@ -14,7 +14,7 @@ use std::{ops::RangeInclusive, path::PathBuf}; use git2::{Diff, Error, Patch, Repository}; // project specific modules/crates -use crate::common_fs::{is_source_or_ignored, FileObj}; +use crate::common_fs::{FileFilter, FileObj}; /// This (re-)initializes the repository located in the specified `path`. /// @@ -99,12 +99,7 @@ fn parse_patch(patch: &Patch) -> (Vec, Vec>) { /// /// The specified list of `extensions`, `ignored` and `not_ignored` files are used as /// filters to expedite the process and only focus on the data cpp_linter can use. -pub fn parse_diff( - diff: &git2::Diff, - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], -) -> Vec { +pub fn parse_diff(diff: &git2::Diff, file_filter: &FileFilter) -> Vec { let mut files: Vec = Vec::new(); for file_idx in 0..diff.deltas().count() { let diff_delta = diff.get_delta(file_idx).unwrap(); @@ -115,7 +110,7 @@ pub fn parse_diff( git2::Delta::Renamed, ] .contains(&diff_delta.status()) - && is_source_or_ignored(&file_path, extensions, ignored, not_ignored) + && file_filter.is_source_or_ignored(&file_path) { let (added_lines, diff_chunks) = parse_patch(&Patch::from_diff(diff, file_idx).unwrap().unwrap()); @@ -132,22 +127,12 @@ pub fn parse_diff( /// log warning and error are output when this occurs. Please report this instance for /// troubleshooting/diagnosis as this likely means the diff is malformed or there is a /// bug in libgit2 source. -pub fn parse_diff_from_buf( - buff: &[u8], - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], -) -> Vec { +pub fn parse_diff_from_buf(buff: &[u8], file_filter: &FileFilter) -> Vec { if let Ok(diff_obj) = &Diff::from_buffer(buff) { - parse_diff(diff_obj, extensions, ignored, not_ignored) + parse_diff(diff_obj, file_filter) } else { log::warn!("libgit2 failed to parse the diff"); - brute_force_parse_diff::parse_diff( - &String::from_utf8_lossy(buff), - extensions, - ignored, - not_ignored, - ) + brute_force_parse_diff::parse_diff(&String::from_utf8_lossy(buff), file_filter) } } @@ -163,7 +148,7 @@ mod brute_force_parse_diff { use regex::Regex; use std::{ops::RangeInclusive, path::PathBuf}; - use crate::common_fs::{is_source_or_ignored, FileObj}; + use crate::common_fs::{FileFilter, FileObj}; fn get_filename_from_front_matter(front_matter: &str) -> Option<&str> { let diff_file_name = Regex::new(r"(?m)^\+\+\+\sb?/(.*)$").unwrap(); @@ -177,7 +162,7 @@ mod brute_force_parse_diff { return Some(captures.get(1).unwrap().as_str()); } } - if diff_binary_file.is_match(front_matter) { + if !diff_binary_file.is_match(front_matter) { log::warn!("Unrecognized diff starting with:\n{}", front_matter); } None @@ -226,12 +211,7 @@ mod brute_force_parse_diff { (additions, diff_chunks) } - pub fn parse_diff( - diff: &str, - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], - ) -> Vec { + pub fn parse_diff(diff: &str, file_filter: &FileFilter) -> Vec { log::error!("Using brute force diff parsing!"); let mut results = Vec::new(); let diff_file_delimiter = Regex::new(r"(?m)^diff --git a/.*$").unwrap(); @@ -242,20 +222,23 @@ mod brute_force_parse_diff { if file_diff.is_empty() || file_diff.starts_with("deleted file") { continue; } - if let Some(first_hunk) = hunk_info.find(file_diff) { - let front_matter = &file_diff[..first_hunk.start()]; - if let Some(file_name) = get_filename_from_front_matter(front_matter) { - let file_path = PathBuf::from(file_name); - if is_source_or_ignored(&file_path, extensions, ignored, not_ignored) { - let (added_lines, diff_chunks) = - parse_patch(&file_diff[first_hunk.start()..]); - results.push(FileObj::from(file_path, added_lines, diff_chunks)); - } - } + let hunk_start = if let Some(first_hunk) = hunk_info.find(file_diff) { + first_hunk.start() } else { - // file has no changed content. moving on - continue; + file_diff.len() + }; + let front_matter = &file_diff[..hunk_start]; + if let Some(file_name) = get_filename_from_front_matter(front_matter) { + let file_path = PathBuf::from(file_name); + if file_filter.is_source_or_ignored(&file_path) { + let (added_lines, diff_chunks) = parse_patch(&file_diff[hunk_start..]); + results.push(FileObj::from(file_path, added_lines, diff_chunks)); + } } + // } else { + // // file has no changed content. moving on + // continue; + // } } results } @@ -265,12 +248,19 @@ mod brute_force_parse_diff { mod test { use super::parse_diff; - use crate::{common_fs::FileObj, git::parse_diff_from_buf, logger}; + use crate::{ + common_fs::{FileFilter, FileObj}, + git::parse_diff_from_buf, + }; - static RENAMED_DIFF: &str = r"diff --git a/tests/demo/some source.cpp b/tests/demo/some source.cpp + static RENAMED_DIFF: &str = r#"diff --git a/tests/demo/some source.cpp b/tests/demo/some source.cpp similarity index 100% rename from /tests/demo/some source.cpp -rename to /tests/demo/some source.cpp\n"; +rename to /tests/demo/some source.cpp +diff --git a/some picture.png b/some picture.png +new file mode 100644 +Binary files /dev/null and b/some picture.png differ +"#; static RENAMED_DIFF_WITH_CHANGES: &str = r#"diff --git a/tests/demo/some source.cpp b/tests/demo/some source.cpp similarity index 99% @@ -283,28 +273,41 @@ rename to /tests/demo/some source.cpp #[test] fn parse_renamed_diff() { let diff_buf = RENAMED_DIFF.as_bytes(); - let files = parse_diff_from_buf(diff_buf, &[&String::from("cpp")], &[], &[]); - assert!(files.is_empty()); + let files = parse_diff_from_buf( + diff_buf, + &FileFilter::new(&["target"], vec![String::from("cpp")]), + ); + assert!(!files.is_empty()); + assert!(files + .first() + .unwrap() + .name + .ends_with("tests/demo/some source.cpp")); } #[test] fn parse_renamed_diff_with_patch() { let diff_buf = RENAMED_DIFF_WITH_CHANGES.as_bytes(); - let files = parse_diff_from_buf(diff_buf, &[&String::from("cpp")], &[], &[]); + let files = parse_diff_from_buf( + diff_buf, + &FileFilter::new(&["target"], vec![String::from("cpp")]), + ); assert!(!files.is_empty()); } /// Used to parse the same string buffer using both libgit2 and brute force regex. /// Returns 2 vectors of [FileObj] that should be equivalent. - fn setup_parsed(buf: &str, extensions: &[&str]) -> (Vec, Vec) { - logger::init().unwrap_or_default(); + fn setup_parsed(buf: &str, extensions: &[String]) -> (Vec, Vec) { ( - parse_diff_from_buf(buf.as_bytes(), extensions, &[], &[]), - parse_diff(buf, extensions, &[], &[]), + parse_diff_from_buf( + buf.as_bytes(), + &FileFilter::new(&["target"], extensions.to_owned()), + ), + parse_diff(buf, &FileFilter::new(&["target"], extensions.to_owned())), ) } - fn assert_files_eq(files_from_a: &Vec, files_from_b: &Vec) { + fn assert_files_eq(files_from_a: &[FileObj], files_from_b: &[FileObj]) { assert_eq!(files_from_a.len(), files_from_b.len()); for (a, b) in files_from_a.iter().zip(files_from_b) { assert_eq!(a.name, b.name); @@ -323,7 +326,7 @@ rename to /tests/demo/some source.cpp -#include \n\ +#include \n \n \n \n"; - let (files_from_buf, files_from_str) = setup_parsed(diff_buf, &[&String::from("cpp")]); + let (files_from_buf, files_from_str) = setup_parsed(diff_buf, &[String::from("cpp")]); assert!(!files_from_buf.is_empty()); assert_files_eq(&files_from_buf, &files_from_str); } @@ -334,7 +337,7 @@ rename to /tests/demo/some source.cpp new file mode 100644\n\ Binary files /dev/null and b/some picture.png differ\n"; - let (files_from_buf, files_from_str) = setup_parsed(diff_buf, &[&String::from("png")]); + let (files_from_buf, files_from_str) = setup_parsed(diff_buf, &[String::from("png")]); assert!(files_from_buf.is_empty()); assert_files_eq(&files_from_buf, &files_from_str); } @@ -374,7 +377,7 @@ mod test { use tempfile::{tempdir, TempDir}; - use crate::{cli::parse_ignore, github_api::GithubApiClient, rest_api::RestApiClient}; + use crate::{common_fs::FileFilter, github_api::GithubApiClient, rest_api::RestApiClient}; fn get_temp_dir() -> TempDir { let tmp = tempdir().unwrap(); @@ -382,9 +385,9 @@ mod test { tmp } - fn checkout_cpp_linter_py_repo( + async fn checkout_cpp_linter_py_repo( sha: &str, - extensions: &[&str], + extensions: &[String], tmp: &TempDir, patch_path: Option<&str>, ) -> Vec { @@ -396,76 +399,67 @@ mod test { patch_path, ); let rest_api_client = GithubApiClient::new(); - let (ignored, not_ignored) = parse_ignore(&["target"]); + let file_filter = FileFilter::new(&["target"], extensions.to_owned()); set_current_dir(tmp).unwrap(); env::set_var("CI", "false"); // avoid use of REST API when testing in CI - rest_api_client.get_list_of_changed_files(extensions, &ignored, ¬_ignored) + rest_api_client + .get_list_of_changed_files(&file_filter) + .await } - #[test] - fn with_no_changed_sources() { + #[tokio::test] + async fn with_no_changed_sources() { // commit with no modified C/C++ sources let sha = "0c236809891000b16952576dc34de082d7a40bf3"; let cur_dir = current_dir().unwrap(); let tmp = get_temp_dir(); - let extensions = vec!["cpp", "hpp"]; - let files = checkout_cpp_linter_py_repo(sha, &extensions, &tmp, None); + let extensions = vec!["cpp".to_string(), "hpp".to_string()]; + let files = checkout_cpp_linter_py_repo(sha, &extensions, &tmp, None).await; println!("files = {:?}", files); assert!(files.is_empty()); set_current_dir(cur_dir).unwrap(); // prep to delete temp_folder drop(tmp); // delete temp_folder } - #[test] - fn with_changed_sources() { + #[tokio::test] + async fn with_changed_sources() { // commit with modified C/C++ sources let sha = "950ff0b690e1903797c303c5fc8d9f3b52f1d3c5"; let cur_dir = current_dir().unwrap(); let tmp = get_temp_dir(); - let extensions = vec!["cpp", "hpp"]; - let files = checkout_cpp_linter_py_repo(sha, &extensions, &tmp, None); + let extensions = vec!["cpp".to_string(), "hpp".to_string()]; + let files = checkout_cpp_linter_py_repo(sha, &extensions.clone(), &tmp, None).await; println!("files = {:?}", files); assert!(files.len() >= 2); for file in files { - assert!(extensions.contains( - &file - .name - .extension() - .unwrap() - .to_string_lossy() - .to_string() - .as_str() - )); + assert!( + extensions.contains(&file.name.extension().unwrap().to_string_lossy().to_string()) + ); } set_current_dir(cur_dir).unwrap(); // prep to delete temp_folder drop(tmp); // delete temp_folder } - #[test] - fn with_staged_changed_sources() { + #[tokio::test] + async fn with_staged_changed_sources() { // commit with no modified C/C++ sources let sha = "0c236809891000b16952576dc34de082d7a40bf3"; let cur_dir = current_dir().unwrap(); let tmp = get_temp_dir(); - let extensions = vec!["cpp", "hpp"]; + let extensions = vec!["cpp".to_string(), "hpp".to_string()]; let files = checkout_cpp_linter_py_repo( sha, - &extensions, + &extensions.clone(), &tmp, Some("tests/capture_tools_output/cpp-linter/cpp-linter/test_git_lib.patch"), - ); + ) + .await; println!("files = {:?}", files); assert!(!files.is_empty()); for file in files { - assert!(extensions.contains( - &file - .name - .extension() - .unwrap() - .to_string_lossy() - .to_string() - .as_str() - )); + assert!( + extensions.contains(&file.name.extension().unwrap().to_string_lossy().to_string()) + ); } set_current_dir(cur_dir).unwrap(); // prep to delete temp_folder drop(tmp); // delete temp_folder diff --git a/cpp-linter-lib/src/lib.rs b/cpp-linter-lib/src/lib.rs index cde1841..3cd6f28 100644 --- a/cpp-linter-lib/src/lib.rs +++ b/cpp-linter-lib/src/lib.rs @@ -2,11 +2,7 @@ #![doc( html_favicon_url = "https://github.com/cpp-linter/cpp-linter/raw/main/docs/_static/favicon.ico" )] -//! The root module for the cpp_linter package when compiled as a library. -//! This module mainly holds the declarations of this package's other modules. -//! -//! The python binding is also defined here, and it is exposed in python as -//! `cpp_linter.cpp_linter` in the python path. +#![doc = include_str!("../README.md")] // project specific modules/crates pub mod clang_tools; diff --git a/cpp-linter-lib/src/main.rs b/cpp-linter-lib/src/main.rs new file mode 100644 index 0000000..c368198 --- /dev/null +++ b/cpp-linter-lib/src/main.rs @@ -0,0 +1,10 @@ +/// This crate is the binary executable's entrypoint. +use std::env; + +use cpp_linter_lib::run::run_main; + +/// This function simply forwards CLI args to [`run_main()`]. +#[tokio::main] +pub async fn main() { + run_main(env::args().collect::>()).await; +} diff --git a/cpp-linter-lib/src/rest_api/github_api.rs b/cpp-linter-lib/src/rest_api/github_api.rs index cbe7b0e..194a4ac 100644 --- a/cpp-linter-lib/src/rest_api/github_api.rs +++ b/cpp-linter-lib/src/rest_api/github_api.rs @@ -4,28 +4,33 @@ use std::collections::HashMap; use std::env; use std::fs::OpenOptions; use std::io::{Read, Write}; +use std::sync::{Arc, Mutex}; // non-std crates -use reqwest::blocking::Client; use reqwest::header::{HeaderMap, HeaderValue}; +use reqwest::Client; use reqwest::Method; use serde::Deserialize; use serde_json; +use crate::clang_tools::clang_format::tally_format_advice; +use crate::clang_tools::clang_tidy::tally_tidy_advice; // project specific modules/crates -use crate::clang_tools::{clang_format::FormatAdvice, clang_tidy::TidyAdvice}; -use crate::common_fs::FileObj; +use crate::common_fs::{FileFilter, FileObj}; use crate::git::{get_diff, open_repo, parse_diff, parse_diff_from_buf}; use super::{FeedbackInput, RestApiClient, COMMENT_MARKER}; +static USER_AGENT: &str = + "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:120.0) Gecko/20100101 Firefox/120.0"; + /// A structure to work with Github REST API. pub struct GithubApiClient { /// The HTTP request client to be used for all REST API calls. client: Client, /// The CI run's event payload from the webhook that triggered the workflow. - event_payload: Option, + pull_request: Option, /// The name of the event that was triggered when running cpp_linter. pub event_name: String, @@ -52,18 +57,21 @@ impl Default for GithubApiClient { impl GithubApiClient { pub fn new() -> Self { GithubApiClient { - client: reqwest::blocking::Client::new(), - event_payload: { - let event_payload_path = env::var("GITHUB_EVENT_PATH"); - if event_payload_path.is_ok() { + client: Client::new(), + pull_request: { + if let Ok(event_payload_path) = env::var("GITHUB_EVENT_PATH") { let file_buf = &mut String::new(); OpenOptions::new() .read(true) - .open(event_payload_path.unwrap()) + .open(event_payload_path) .unwrap() .read_to_string(file_buf) .unwrap(); - Some(serde_json::from_str(file_buf.as_str()).unwrap()) + let json = serde_json::from_str::>( + file_buf.as_str(), + ) + .unwrap(); + json["number"].as_i64() } else { None } @@ -92,10 +100,10 @@ impl GithubApiClient { impl RestApiClient for GithubApiClient { fn set_exit_code( &self, - checks_failed: i32, - format_checks_failed: Option, - tidy_checks_failed: Option, - ) -> i32 { + checks_failed: u64, + format_checks_failed: Option, + tidy_checks_failed: Option, + ) -> u64 { if let Ok(gh_out) = env::var("GITHUB_OUTPUT") { let mut gh_out_file = OpenOptions::new() .append(true) @@ -124,7 +132,6 @@ impl RestApiClient for GithubApiClient { } fn make_headers(&self, use_diff: Option) -> HeaderMap { - let gh_token = env::var("GITHUB_TOKEN"); let mut headers = HeaderMap::new(); let return_fmt = "application/vnd.github.".to_owned() + if use_diff.is_some_and(|val| val) { @@ -133,21 +140,14 @@ impl RestApiClient for GithubApiClient { "raw+json" }; headers.insert("Accept", return_fmt.parse().unwrap()); - let user_agent = - "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:120.0) Gecko/20100101 Firefox/120.0"; - headers.insert("User-Agent", user_agent.parse().unwrap()); - if let Ok(token) = gh_token { + headers.insert("User-Agent", USER_AGENT.parse().unwrap()); + if let Ok(token) = env::var("GITHUB_TOKEN") { headers.insert("Authorization", token.parse().unwrap()); } headers } - fn get_list_of_changed_files( - &self, - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], - ) -> Vec { + async fn get_list_of_changed_files(&self, file_filter: &FileFilter) -> Vec { if env::var("CI").is_ok_and(|val| val.as_str() == "true") && self.repo.is_some() && self.sha.is_some() @@ -158,8 +158,10 @@ impl RestApiClient for GithubApiClient { self.api_url, self.repo.as_ref().unwrap(), if self.event_name == "pull_request" { - let pr_number = &self.event_payload.as_ref().unwrap()["number"]; - format!("pulls/{}", &pr_number) + format!( + "pulls/{}", + &self.pull_request.expect("Pull request number unknown") + ) } else { format!("commits/{}", self.sha.as_ref().unwrap()) } @@ -169,40 +171,34 @@ impl RestApiClient for GithubApiClient { .get(url) .headers(self.make_headers(Some(true))) .send() + .await .unwrap() .bytes() + .await .unwrap(); - parse_diff_from_buf(&response, extensions, ignored, not_ignored) + parse_diff_from_buf(&response, file_filter) } else { // get diff from libgit2 API let repo = open_repo(".") .expect("Please ensure the repository is checked out before running cpp-linter."); - let list = parse_diff(&get_diff(&repo), extensions, ignored, not_ignored); + let list = parse_diff(&get_diff(&repo), file_filter); list } } - fn post_feedback( - &self, - files: &[FileObj], - format_advice: &[FormatAdvice], - tidy_advice: &[TidyAdvice], - user_inputs: FeedbackInput, - ) { - let (comment, format_checks_failed, tidy_checks_failed) = - self.make_comment(files, format_advice, tidy_advice); + async fn post_feedback(&self, files: &[Arc>], user_inputs: FeedbackInput) { + let format_checks_failed = tally_format_advice(files); + let tidy_checks_failed = tally_tidy_advice(files); + let mut comment = None; if user_inputs.file_annotations { - self.post_annotations( - files, - format_advice, - tidy_advice, - user_inputs.style.as_str(), - ); + self.post_annotations(files, user_inputs.style.as_str()); } if user_inputs.step_summary { - self.post_step_summary(&comment); + comment = + Some(self.make_comment(files, format_checks_failed, tidy_checks_failed, None)); + self.post_step_summary(comment.as_ref().unwrap()); } self.set_exit_code( format_checks_failed + tidy_checks_failed, @@ -212,13 +208,21 @@ impl RestApiClient for GithubApiClient { if user_inputs.thread_comments.as_str() != "false" { // post thread comment for PR or push event + if comment.as_ref().is_some_and(|c| c.len() > 65535) || comment.is_none() { + comment = Some(self.make_comment( + files, + format_checks_failed, + tidy_checks_failed, + Some(65535), + )); + } if let Some(repo) = &self.repo { let is_pr = self.event_name == "pull_request"; let base_url = format!("{}/repos/{}/", &self.api_url, &repo); let comments_url = if is_pr { format!( "{base_url}issues/{}", - &self.event_payload.as_ref().unwrap()["number"] + &self.pull_request.expect("Pull request number unknown") ) } else { format!("{base_url}/commits/{}", &self.sha.as_ref().unwrap()) @@ -229,9 +233,10 @@ impl RestApiClient for GithubApiClient { .client .get(&comments_url) .headers(self.make_headers(None)) - .send(); + .send() + .await; if let Ok(response) = request { - let json = response.json::().unwrap(); + let json = response.json::().await.unwrap(); let count = if is_pr { json["comments"].as_u64().unwrap() } else { @@ -239,12 +244,13 @@ impl RestApiClient for GithubApiClient { }; self.update_comment( &format!("{}/comments", &comments_url), - &comment, + &comment.unwrap(), count, user_inputs.no_lgtm, format_checks_failed + tidy_checks_failed == 0, user_inputs.thread_comments.as_str() == "update", - ); + ) + .await; } else { let error = request.unwrap_err(); if let Some(status) = error.status() { @@ -274,34 +280,29 @@ impl GithubApiClient { } } - fn post_annotations( - &self, - files: &[FileObj], - format_advice: &[FormatAdvice], - tidy_advice: &[TidyAdvice], - style: &str, - ) { - if !format_advice.is_empty() { - // formalize the style guide name - let style_guide = - if ["google", "chromium", "microsoft", "mozilla", "webkit"].contains(&style) { - // capitalize the first letter - let mut char_iter = style.chars(); - match char_iter.next() { - None => String::new(), - Some(f) => f.to_uppercase().collect::() + char_iter.as_str(), - } - } else if style == "llvm" || style == "gnu" { - style.to_ascii_uppercase() - } else { - String::from("Custom") - }; + fn post_annotations(&self, files: &[Arc>], style: &str) { + // formalize the style guide name + let style_guide = + if ["google", "chromium", "microsoft", "mozilla", "webkit"].contains(&style) { + // capitalize the first letter + let mut char_iter = style.chars(); + match char_iter.next() { + None => String::new(), + Some(f) => f.to_uppercase().collect::() + char_iter.as_str(), + } + } else if style == "llvm" || style == "gnu" { + style.to_ascii_uppercase() + } else { + String::from("Custom") + }; - // iterate over clang-format advice and post annotations - for (index, advice) in format_advice.iter().enumerate() { + // iterate over clang-format advice and post annotations + for file in files { + let file = file.lock().unwrap(); + if let Some(format_advice) = &file.format_advice { // assemble a list of line numbers let mut lines: Vec = Vec::new(); - for replacement in &advice.replacements { + for replacement in &format_advice.replacements { if let Some(line_int) = replacement.line { if !lines.contains(&line_int) { lines.push(line_int); @@ -311,29 +312,29 @@ impl GithubApiClient { // post annotation if any applicable lines were formatted if !lines.is_empty() { println!( - "::notice file={name},title=Run clang-format on {name}::File {name} does not conform to {style_guide} style guidelines. (lines {line_set})", - name = &files[index].name.to_string_lossy().replace('\\', "/"), - line_set = lines.iter().map(|val| val.to_string()).collect::>().join(","), - ); + "::notice file={name},title=Run clang-format on {name}::File {name} does not conform to {style_guide} style guidelines. (lines {line_set})", + name = &file.name.to_string_lossy().replace('\\', "/"), + line_set = lines.iter().map(|val| val.to_string()).collect::>().join(","), + ); } - } - } // end format_advice iterations - - // iterate over clang-tidy advice and post annotations - // The tidy_advice vector is parallel to the files vector; meaning it serves as a file filterer. - // lines are already filter as specified to clang-tidy CLI. - for (index, advice) in tidy_advice.iter().enumerate() { - for note in &advice.notes { - if note.filename == files[index].name.to_string_lossy().replace('\\', "/") { - println!( - "::{severity} file={file},line={line},title={file}:{line}:{cols} [{diag}]::{info}", - severity = if note.severity == *"note" { "notice".to_string() } else {note.severity.clone()}, - file = note.filename, - line = note.line, - cols = note.cols, - diag = note.diagnostic, - info = note.rationale, - ); + } // end format_advice iterations + + // iterate over clang-tidy advice and post annotations + // The tidy_advice vector is parallel to the files vector; meaning it serves as a file filterer. + // lines are already filter as specified to clang-tidy CLI. + if let Some(tidy_advice) = &file.tidy_advice { + for note in &tidy_advice.notes { + if note.filename == file.name.to_string_lossy().replace('\\', "/") { + println!( + "::{severity} file={file},line={line},title={file}:{line}:{cols} [{diag}]::{info}", + severity = if note.severity == *"note" { "notice".to_string() } else {note.severity.clone()}, + file = note.filename, + line = note.line, + cols = note.cols, + diag = note.diagnostic, + info = note.rationale, + ); + } } } } @@ -341,7 +342,7 @@ impl GithubApiClient { /// update existing comment or remove old comment(s) and post a new comment #[allow(clippy::too_many_arguments)] - fn update_comment( + async fn update_comment( &self, url: &String, comment: &String, @@ -350,8 +351,9 @@ impl GithubApiClient { is_lgtm: bool, update_only: bool, ) { - let comment_url = - self.remove_bot_comments(url, count, !update_only || (is_lgtm && no_lgtm)); + let comment_url = self + .remove_bot_comments(url, count, !update_only || (is_lgtm && no_lgtm)) + .await; #[allow(clippy::nonminimal_bool)] // an inaccurate assessment if (is_lgtm && !no_lgtm) || !is_lgtm { let payload = HashMap::from([("body", comment)]); @@ -374,6 +376,7 @@ impl GithubApiClient { .headers(self.make_headers(None)) .json(&payload) .send() + .await { log::info!( "Got {} response from {:?}ing comment", @@ -384,17 +387,17 @@ impl GithubApiClient { } } - fn remove_bot_comments(&self, url: &String, count: u64, delete: bool) -> Option { + async fn remove_bot_comments(&self, url: &String, count: u64, delete: bool) -> Option { let mut page = 1; let mut comment_url = None; let mut total = count; while total > 0 { - let request = self.client.get(format!("{url}/?page={page}")).send(); + let request = self.client.get(format!("{url}/?page={page}")).send().await; if request.is_err() { log::error!("Failed to get list of existing comments"); return None; } else if let Ok(response) = request { - let payload: JsonCommentsPayload = response.json().unwrap(); + let payload: JsonCommentsPayload = response.json().await.unwrap(); let mut comment_count = 0; for comment in payload.comments { if comment.body.starts_with(COMMENT_MARKER) { @@ -420,6 +423,7 @@ impl GithubApiClient { .delete(del_url) .headers(self.make_headers(None)) .send() + .await { log::info!( "Got {} from DELETE {}", @@ -463,3 +467,136 @@ struct User { pub login: String, pub id: u64, } + +#[cfg(test)] +mod test { + use std::{ + env, + io::Read, + path::PathBuf, + sync::{Arc, Mutex}, + }; + + use regex::Regex; + use tempfile::{tempdir, NamedTempFile}; + + use super::{GithubApiClient, USER_AGENT}; + use crate::{ + clang_tools::{capture_clang_tools_output, ClangParams}, + cli::LinesChangedOnly, + common_fs::{FileFilter, FileObj}, + rest_api::{FeedbackInput, RestApiClient, USER_OUTREACH}, + }; + + // ************************** tests for GithubApiClient::make_headers() + fn assert_header(use_diff: bool, auth: Option<&str>) { + let rest_api_client = GithubApiClient::new(); + if let Some(token) = auth { + env::set_var("GITHUB_TOKEN", token); + } + let headers = rest_api_client.make_headers(Some(use_diff)); + assert!(headers.contains_key("User-Agent")); + assert_eq!(headers.get("User-Agent").unwrap(), USER_AGENT); + assert!(headers.contains_key("Accept")); + assert!(headers + .get("Accept") + .unwrap() + .to_str() + .unwrap() + .ends_with(if use_diff { "diff" } else { "raw+json" })); + if let Some(token) = auth { + assert!(headers.contains_key("Authorization")); + assert_eq!(headers.get("Authorization").unwrap(), token); + } + } + + #[test] + fn get_headers_json_token() { + assert_header(false, Some("123456")); + } + + #[test] + fn get_headers_diff() { + assert_header(true, None); + } + + // ************************* tests for step-summary and output variables + + async fn create_comment(tidy_checks: &str, style: &str) -> (String, String) { + let tmp_dir = tempdir().unwrap(); + let rest_api_client = GithubApiClient::default(); + if env::var("ACTIONS_STEP_DEBUG").is_ok_and(|var| var == "true") { + assert!(rest_api_client.debug_enabled); + } + let mut files = vec![Arc::new(Mutex::new(FileObj::new(PathBuf::from( + "tests/demo/demo.cpp", + ))))]; + let mut clang_params = ClangParams { + tidy_checks: tidy_checks.to_string(), + lines_changed_only: LinesChangedOnly::Off, + database: None, + extra_args: None, + database_json: None, + style: style.to_string(), + clang_tidy_command: None, + clang_format_command: None, + tidy_filter: FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]), + format_filter: FileFilter::new(&[], vec!["cpp".to_string(), "hpp".to_string()]), + }; + capture_clang_tools_output( + &mut files, + env::var("CLANG-VERSION").unwrap_or("".to_string()).as_str(), + &mut clang_params, + ) + .await; + let feedback_inputs = FeedbackInput { + style: style.to_string(), + step_summary: true, + ..Default::default() + }; + let mut step_summary_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); + env::set_var("GITHUB_STEP_SUMMARY", step_summary_path.path()); + let mut gh_out_path = NamedTempFile::new_in(tmp_dir.path()).unwrap(); + env::set_var("GITHUB_OUTPUT", gh_out_path.path()); + rest_api_client.post_feedback(&files, feedback_inputs).await; + let mut step_summary_content = String::new(); + step_summary_path + .read_to_string(&mut step_summary_content) + .unwrap(); + assert!(&step_summary_content.contains(USER_OUTREACH)); + let mut gh_out_content = String::new(); + gh_out_path.read_to_string(&mut gh_out_content).unwrap(); + assert!(gh_out_content.starts_with("checks-failed=")); + (step_summary_content, gh_out_content) + } + + #[tokio::test] + async fn check_comment_concerns() { + let (comment, gh_out) = create_comment("readability-*", "file").await; + assert!(&comment.contains(":warning:\nSome files did not pass the configured checks!\n")); + let fmt_pattern = Regex::new(r"format-checks-failed=(\d+)\n").unwrap(); + let tidy_pattern = Regex::new(r"tidy-checks-failed=(\d+)\n").unwrap(); + for pattern in [fmt_pattern, tidy_pattern] { + let number = pattern + .captures(&gh_out) + .expect("found no number of checks-failed") + .get(1) + .unwrap() + .as_str() + .parse::() + .unwrap(); + assert!(number > 0); + } + } + + #[tokio::test] + async fn check_comment_lgtm() { + env::set_var("ACTIONS_STEP_DEBUG", "true"); + let (comment, gh_out) = create_comment("-*", "").await; + assert!(&comment.contains(":heavy_check_mark:\nNo problems need attention.")); + assert_eq!( + &gh_out, + "checks-failed=0\nformat-checks-failed=0\ntidy-checks-failed=0\n" + ); + } +} diff --git a/cpp-linter-lib/src/rest_api/mod.rs b/cpp-linter-lib/src/rest_api/mod.rs index 4a4fa86..675a20a 100644 --- a/cpp-linter-lib/src/rest_api/mod.rs +++ b/cpp-linter-lib/src/rest_api/mod.rs @@ -3,17 +3,18 @@ //! //! Currently, only Github is supported. -use std::path::PathBuf; +use std::sync::{Arc, Mutex}; +use std::{future::Future, path::PathBuf}; // non-std crates use reqwest::header::{HeaderMap, HeaderValue}; // project specific modules/crates pub mod github_api; -use crate::clang_tools::{clang_format::FormatAdvice, clang_tidy::TidyAdvice}; -use crate::common_fs::FileObj; +use crate::common_fs::{FileFilter, FileObj}; pub static COMMENT_MARKER: &str = ""; +pub static USER_OUTREACH: &str = "\n\nHave any feedback or feature suggestions? [Share it here.](https://github.com/cpp-linter/cpp-linter-action/issues)"; /// A struct to hold a collection of user inputs related to [`ResApiClient.post_feedback()`]. pub struct FeedbackInput { @@ -25,7 +26,7 @@ pub struct FeedbackInput { } impl Default for FeedbackInput { - /// Construct a [`UserInput`] instance with default values. + /// Construct a [`FeedbackInput`] instance with default values. fn default() -> Self { FeedbackInput { thread_comments: "false".to_string(), @@ -42,10 +43,10 @@ pub trait RestApiClient { /// A way to set output variables specific to cpp_linter executions in CI. fn set_exit_code( &self, - checks_failed: i32, - format_checks_failed: Option, - tidy_checks_failed: Option, - ) -> i32; + checks_failed: u64, + format_checks_failed: Option, + tidy_checks_failed: Option, + ) -> u64; /// A convenience method to create the headers attached to all REST API calls. /// @@ -60,10 +61,8 @@ pub trait RestApiClient { /// cpp_linter package is used. fn get_list_of_changed_files( &self, - extensions: &[&str], - ignored: &[String], - not_ignored: &[String], - ) -> Vec; + file_filter: &FileFilter, + ) -> impl Future>; /// Makes a comment in MarkDown syntax based on the concerns in `format_advice` and /// `tidy_advice` about the given set of `files`. @@ -75,65 +74,40 @@ pub trait RestApiClient { /// `format_checks_failed` and `tidy_checks_failed` (in respective order). fn make_comment( &self, - files: &[FileObj], - format_advice: &[FormatAdvice], - tidy_advice: &[TidyAdvice], - ) -> (String, i32, i32) { - let mut comment = String::from("\n# Cpp-Linter Report "); - let mut format_checks_failed = 0; - let mut tidy_checks_failed = 0; - let mut format_comment = String::new(); - for (index, fmt_advice) in format_advice.iter().enumerate() { - if !fmt_advice.replacements.is_empty() { - format_comment.push_str( - &format!( - "- {}\n", - files[index].name.to_string_lossy().replace('\\', "/") - ) - .to_string(), - ); - format_checks_failed += 1; - } - } + files: &[Arc>], + format_checks_failed: u64, + tidy_checks_failed: u64, + max_len: Option, + ) -> String { + let mut comment = format!("{COMMENT_MARKER}\n# Cpp-Linter Report "); + let mut remaining_length = + max_len.unwrap_or(u64::MAX) - comment.len() as u64 - USER_OUTREACH.len() as u64; - let mut tidy_comment = String::new(); - for (index, advice) in tidy_advice.iter().enumerate() { - for tidy_note in &advice.notes { - let file_path = PathBuf::from(&tidy_note.filename); - if file_path == files[index].name { - tidy_comment.push_str(&format!("- {}\n\n", tidy_note.filename)); - tidy_comment.push_str(&format!( - " {filename}:{line}:{cols}: {severity}: [{diagnostic}]\n > {rationale}\n{concerned_code}", - filename = tidy_note.filename, - line = tidy_note.line, - cols = tidy_note.cols, - severity = tidy_note.severity, - diagnostic = tidy_note.diagnostic_link(), - rationale = tidy_note.rationale, - concerned_code = if tidy_note.suggestion.is_empty() {String::from("")} else { - format!("\n ```{ext}\n {suggestion}\n ```\n", - ext = file_path.extension().expect("file extension was not determined").to_string_lossy(), - suggestion = tidy_note.suggestion.join("\n "), - ).to_string() - }, - ).to_string()); - tidy_checks_failed += 1; - } - } - } if format_checks_failed > 0 || tidy_checks_failed > 0 { - comment.push_str(":warning:\nSome files did not pass the configured checks!\n"); + let prompt = ":warning:\nSome files did not pass the configured checks!\n"; + remaining_length -= prompt.len() as u64; + comment.push_str(prompt); if format_checks_failed > 0 { - comment.push_str(&format!("\n
clang-format reports: {} file(s) not formatted\n\n{}\n
", format_checks_failed, &format_comment)); + make_format_comment( + files, + &mut comment, + format_checks_failed, + &mut remaining_length, + ); } if tidy_checks_failed > 0 { - comment.push_str(&format!("\n
clang-tidy reports: {} concern(s)\n\n{}\n
", tidy_checks_failed, tidy_comment)); + make_tidy_comment( + files, + &mut comment, + tidy_checks_failed, + &mut remaining_length, + ); } } else { comment.push_str(":heavy_check_mark:\nNo problems need attention."); } - comment.push_str("\n\nHave any feedback or feature suggestions? [Share it here.](https://github.com/cpp-linter/cpp-linter-action/issues)"); - (comment, format_checks_failed, tidy_checks_failed) + comment.push_str(USER_OUTREACH); + comment } /// A way to post feedback in the form of `thread_comments`, `file_annotations`, and @@ -148,9 +122,83 @@ pub trait RestApiClient { /// All other parameters correspond to CLI arguments. fn post_feedback( &self, - files: &[FileObj], - format_advice: &[FormatAdvice], - tidy_advice: &[TidyAdvice], + files: &[Arc>], user_inputs: FeedbackInput, + ) -> impl Future; +} + +fn make_format_comment( + files: &[Arc>], + comment: &mut String, + format_checks_failed: u64, + remaining_length: &mut u64, +) { + let opener = format!("\n
clang-format reports: {} file(s) not formatted\n\n", format_checks_failed); + let closer = String::from("\n
"); + let mut format_comment = String::new(); + *remaining_length -= opener.len() as u64 + closer.len() as u64; + for file in files { + let file = file.lock().unwrap(); + if let Some(format_advice) = &file.format_advice { + if !format_advice.replacements.is_empty() && *remaining_length > 0 { + let note = format!("- {}\n", file.name.to_string_lossy().replace('\\', "/")); + if (note.len() as u64) < *remaining_length { + format_comment.push_str(¬e.to_string()); + *remaining_length -= note.len() as u64; + } + } + } + } + comment.push_str(&opener); + comment.push_str(&format_comment); + comment.push_str(&closer); +} + +fn make_tidy_comment( + files: &[Arc>], + comment: &mut String, + tidy_checks_failed: u64, + remaining_length: &mut u64, +) { + let opener = format!( + "\n
clang-tidy reports: {} concern(s)\n\n", + tidy_checks_failed ); + let closer = String::from("\n
"); + let mut tidy_comment = String::new(); + *remaining_length -= opener.len() as u64 + closer.len() as u64; + for file in files { + let file = file.lock().unwrap(); + if let Some(tidy_advice) = &file.tidy_advice { + for tidy_note in &tidy_advice.notes { + let file_path = PathBuf::from(&tidy_note.filename); + if file_path == file.name { + let mut tmp_note = format!("- {}\n\n", tidy_note.filename); + tmp_note.push_str(&format!( + " {filename}:{line}:{cols}: {severity}: [{diagnostic}]\n > {rationale}\n{concerned_code}", + filename = tidy_note.filename, + line = tidy_note.line, + cols = tidy_note.cols, + severity = tidy_note.severity, + diagnostic = tidy_note.diagnostic_link(), + rationale = tidy_note.rationale, + concerned_code = if tidy_note.suggestion.is_empty() {String::from("")} else { + format!("\n ```{ext}\n {suggestion}\n ```\n", + ext = file_path.extension().expect("file extension was not determined").to_string_lossy(), + suggestion = tidy_note.suggestion.join("\n "), + ).to_string() + }, + ).to_string()); + + if (tmp_note.len() as u64) < *remaining_length { + tidy_comment.push_str(&tmp_note); + *remaining_length -= tmp_note.len() as u64; + } + } + } + } + } + comment.push_str(&opener); + comment.push_str(&tidy_comment); + comment.push_str(&closer); } diff --git a/cpp-linter-lib/src/run.rs b/cpp-linter-lib/src/run.rs index 3c428ed..49dda3a 100644 --- a/cpp-linter-lib/src/run.rs +++ b/cpp-linter-lib/src/run.rs @@ -1,10 +1,11 @@ //! This module is the native backend of the cpp-linter package written in Rust. //! //! In python, this module is exposed as `cpp_linter.run` that has 1 function exposed: -//! [`main()`]. +//! `main()`. use std::env; use std::path::{Path, PathBuf}; +use std::sync::{Arc, Mutex}; // non-std crates use log::{set_max_level, LevelFilter}; @@ -12,13 +13,15 @@ use log::{set_max_level, LevelFilter}; use openssl_probe; // project specific modules/crates -use crate::clang_tools::capture_clang_tools_output; -use crate::cli::{convert_extra_arg_val, get_arg_parser, parse_ignore, LinesChangedOnly}; -use crate::common_fs::{list_source_files, FileObj}; +use crate::clang_tools::{capture_clang_tools_output, ClangParams}; +use crate::cli::{convert_extra_arg_val, get_arg_parser, LinesChangedOnly}; +use crate::common_fs::FileFilter; use crate::github_api::GithubApiClient; use crate::logger::{self, end_log_group, start_log_group}; use crate::rest_api::{FeedbackInput, RestApiClient}; +const VERSION: &str = env!("CARGO_PKG_VERSION"); + #[cfg(feature = "openssl-vendored")] fn probe_ssl_certs() { openssl_probe::init_ssl_cert_env_vars(); @@ -32,23 +35,35 @@ fn probe_ssl_certs() {} /// The idea here is that all functionality is implemented in Rust. However, passing /// command line arguments is done differently in Python or Rust. /// -/// - In python, the `sys.argv` list is passed from the `cpp_linter.entry_point` script -/// to `run.main()`. -/// - In rust, the [`std::env::args`] is passed to `run::main()` in the binary driver -/// source `bin.rs`. +/// - In python, the ``sys.argv`` list is passed from the ``cpp_linter.entry_point.main()`` +/// function to rust via the ``cpp_linter.run.main()`` binding (which wraps [`run_main()`]). +/// - In rust, the [`std::env::args`] is passed to [`run_main()`] in the binary +/// source `main.rs`. /// /// This is done because of the way the python entry point is invoked. If [`std::env::args`] /// is used instead of python's `sys.argv`, then the list of strings includes the entry point /// alias ("path/to/cpp-linter.exe"). Thus, the parser in [`crate::cli`] will halt on an error /// because it is not configured to handle positional arguments. -pub fn run_main(args: Vec) -> i32 { +pub async fn run_main(args: Vec) -> i32 { probe_ssl_certs(); let arg_parser = get_arg_parser(); let args = arg_parser.get_matches_from(args); + if args.subcommand_matches("version").is_some() { + println!("cpp-linter v{}", VERSION); + return 0; + } + logger::init().unwrap(); + let version = args.get_one::("version").unwrap(); + if version == "NO-VERSION" { + log::error!("The `--version` arg is used to specify which version of clang to use."); + log::error!("To get the cpp-linter version, use `cpp-linter version` sub-command."); + return 1; + } + let root_path = args.get_one::("repo-root").unwrap(); if root_path != &String::from(".") { env::set_current_dir(Path::new(root_path)).unwrap(); @@ -76,14 +91,15 @@ pub fn run_main(args: Vec) -> i32 { let extensions = args .get_many::("extensions") .unwrap() - .map(|s| s.as_str()) + .map(|s| s.to_string()) .collect::>(); let ignore = args .get_many::("ignore") .unwrap() .map(|s| s.as_str()) .collect::>(); - let (ignored, not_ignored) = parse_ignore(&ignore); + let mut file_filter = FileFilter::new(&ignore, extensions.clone()); + file_filter.parse_submodules(); let lines_changed_only = match args .get_one::("lines-changed-only") @@ -97,16 +113,20 @@ pub fn run_main(args: Vec) -> i32 { let files_changed_only = args.get_flag("files-changed-only"); start_log_group(String::from("Get list of specified source files")); - let files: Vec = if lines_changed_only != LinesChangedOnly::Off || files_changed_only { + let files = if lines_changed_only != LinesChangedOnly::Off || files_changed_only { // parse_diff(github_rest_api_payload) - rest_api_client.get_list_of_changed_files(&extensions, &ignored, ¬_ignored) + rest_api_client + .get_list_of_changed_files(&file_filter) + .await } else { // walk the folder and look for files with specified extensions according to ignore values. - list_source_files(&extensions, &ignored, ¬_ignored, ".") + file_filter.list_source_files(".") }; + let mut arc_files = vec![]; log::info!("Giving attention to the following files:"); - for file in &files { + for file in files { log::info!(" ./{}", file.name.to_string_lossy().replace('\\', "/")); + arc_files.push(Arc::new(Mutex::new(file))); } end_log_group(); @@ -120,19 +140,88 @@ pub fn run_main(args: Vec) -> i32 { .to_string(), file_annotations: args.get_flag("file-annotations"), }; + let ignore_tidy = args + .get_many::("ignore-tidy") + .unwrap() + .map(|s| s.as_str()) + .collect::>(); + let ignore_format = args + .get_many::("ignore-format") + .unwrap() + .map(|s| s.as_str()) + .collect::>(); let extra_args = convert_extra_arg_val(&args); - let (format_advice, tidy_advice) = capture_clang_tools_output( - &files, - args.get_one::("version").unwrap(), - args.get_one::("tidy-checks").unwrap(), - user_inputs.style.as_str(), - &lines_changed_only, - database_path, + let mut clang_params = ClangParams { + tidy_checks: args.get_one::("tidy-checks").unwrap().to_string(), + lines_changed_only, + database: database_path, extra_args, - ); + database_json: None, + style: user_inputs.style.clone(), + clang_tidy_command: None, + clang_format_command: None, + tidy_filter: FileFilter::new(&ignore_tidy, extensions.clone()), + format_filter: FileFilter::new(&ignore_format, extensions), + }; + capture_clang_tools_output(&mut arc_files, version, &mut clang_params).await; start_log_group(String::from("Posting feedback")); - rest_api_client.post_feedback(&files, &format_advice, &tidy_advice, user_inputs); + rest_api_client.post_feedback(&arc_files, user_inputs).await; end_log_group(); 0 } + +#[cfg(test)] +mod test { + use super::run_main; + + #[tokio::test] + async fn run() { + assert_eq!( + run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string() + ]) + .await, + 0 + ); + } + + #[tokio::test] + async fn run_version_command() { + assert_eq!( + run_main(vec!["cpp-linter".to_string(), "version".to_string()]).await, + 0 + ); + } + + #[tokio::test] + async fn run_force_debug_output() { + assert_eq!( + run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "-v".to_string(), + "debug".to_string(), + ]) + .await, + 0 + ); + } + + #[tokio::test] + async fn run_bad_version_input() { + assert_eq!( + run_main(vec![ + "cpp-linter".to_string(), + "-l".to_string(), + "false".to_string(), + "-V".to_string() + ]) + .await, + 1 + ); + } +} diff --git a/cpp-linter-py/Cargo.toml b/cpp-linter-py/Cargo.toml index 42128d2..457cfc9 100644 --- a/cpp-linter-py/Cargo.toml +++ b/cpp-linter-py/Cargo.toml @@ -12,8 +12,9 @@ name = "cpp_linter" crate-type = ["cdylib"] [dependencies] -pyo3 = { version = "0.20.0", features = ["extension-module"] } +pyo3 = { version = "0.22.2", features = ["extension-module"] } cpp-linter-lib = { path = "../cpp-linter-lib", version = "*" } +tokio = "1.39.2" [features] openssl-vendored = ["cpp-linter-lib/openssl-vendored"] diff --git a/cpp-linter-py/README.md b/cpp-linter-py/README.md index a3609ba..63e2c7b 100644 --- a/cpp-linter-py/README.md +++ b/cpp-linter-py/README.md @@ -1,3 +1,6 @@ + # cpp-linter -The python binding to the cpp_linter_rs project. +The python binding for the [cpp_linter_rs][this] project. + +[this]: https://github.com/cpp-linter/cpp_linter_rs diff --git a/cpp-linter-py/cpp_linter/entry_point.py b/cpp-linter-py/cpp_linter/entry_point.py index d550630..34a71b6 100644 --- a/cpp-linter-py/cpp_linter/entry_point.py +++ b/cpp-linter-py/cpp_linter/entry_point.py @@ -7,8 +7,8 @@ - In python, the ``sys.argv`` list is passed from the ``cpp_linter.entry_point.main()`` function to rust via the ``cpp_linter.run.main()`` binding. -- In rust, the ``std::env::args`` is passed to ``run::main()`` in the binary driver - source `bin.rs`. +- In rust, the ``std::env::args`` is passed to ``run::run_main()`` in the binary driver + source `main.rs`. This is done because of the way the python entry point is invoked. If ``std::env::args`` is used instead of python's ``sys.argv``, then the list of strings includes the entry diff --git a/cpp-linter-py/docs/conf.py b/cpp-linter-py/docs/conf.py index 4f5ca35..a8496ae 100644 --- a/cpp-linter-py/docs/conf.py +++ b/cpp-linter-py/docs/conf.py @@ -81,5 +81,5 @@ def setup(app: Sphinx): subprocess.run( ["cargo", "run", "--example", "cli_doc"], check=True, - cwd=str(Path(__file__).parent.parent.parent / "cpp-linter-lib"), + cwd=str(Path(__file__).parent.parent.parent), ) diff --git a/cpp-linter-py/pyproject.toml b/cpp-linter-py/pyproject.toml index 8946cb1..bfb865d 100644 --- a/cpp-linter-py/pyproject.toml +++ b/cpp-linter-py/pyproject.toml @@ -41,3 +41,8 @@ tracker = "https://github.com/cpp-linter/cpp-linter/issues" [tool.maturin] features = ["pyo3/extension-module"] +exclude = [ + {path = "**/tests/**", format="sdist"}, + {path = "**/examples/**", format="sdist"}, + {path = "**/docs/**", format="sdist"}, +] diff --git a/cpp-linter-py/src/lib.rs b/cpp-linter-py/src/lib.rs index 05e9b97..539c5fc 100644 --- a/cpp-linter-py/src/lib.rs +++ b/cpp-linter-py/src/lib.rs @@ -1,20 +1,25 @@ use pyo3::prelude::*; +use tokio::runtime::Builder; use cpp_linter_lib::run::run_main; /// A wrapper for the cpp_linter_lib::run::run_main() #[pyfunction] fn main(args: Vec) -> PyResult { - Ok(run_main(args)) + Builder::new_multi_thread() + .enable_all() + .build() + .unwrap() + .block_on(async { Ok(run_main(args).await) }) } /// The python binding for the cpp_linter package. It only exposes a submodule named /// ``cpp_linter.run`` whose only exposed function is used as the entrypoint script. /// See the pure python sources in this repo's cpp_linter folder (located at repo root). #[pymodule] -fn cpp_linter(_py: Python, m: &PyModule) -> PyResult<()> { - let run_submodule = PyModule::new(_py, "run")?; +fn cpp_linter(m: &Bound<'_, PyModule>) -> PyResult<()> { + let run_submodule = PyModule::new_bound(m.py(), "run")?; run_submodule.add_function(wrap_pyfunction!(main, m)?)?; - m.add_submodule(run_submodule)?; + m.add_submodule(&run_submodule)?; Ok(()) } diff --git a/cpp_linter_rs.code-workspace b/cpp_linter_rs.code-workspace new file mode 100644 index 0000000..9d0d9d4 --- /dev/null +++ b/cpp_linter_rs.code-workspace @@ -0,0 +1,15 @@ +{ + "folders": [ + { + "path": "." + } + ], + "settings": {}, + "extensions": { + "recommendations": [ + "rust-lang.rust-analyzer", + "streetsidesoftware.code-spell-checker", + "fill-labs.dependi", + ] + } +} diff --git a/cspell.config.yml b/cspell.config.yml new file mode 100644 index 0000000..549731d --- /dev/null +++ b/cspell.config.yml @@ -0,0 +1,28 @@ +version: "0.2" +language: en +words: + - bugprone + - consts + - cppcoreguidelines + - endgroup + - Falsey + - gitmodules + - iomanip + - libgit + - nonminimal + - peekable + - posix + - pybind + - pyfunction + - pymodule + - reqwest + - revparse + - tempdir + - tempfile + - venv +ignorePaths: + - .env/** + - .venv/** + - env/** + - venv/** + - target/**