diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ccd9317a..16a1c3bc 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -2,9 +2,9 @@ name: "Code Scanning - Action" on: push: - branches: [master] + branches: [main] pull_request: - branches: [master] + branches: [main] jobs: CodeQL-Build: diff --git a/.github/workflows/create-release-and-upload-assets.yml b/.github/workflows/create-release-and-upload-assets.yml index 4b8852ef..78b8079d 100644 --- a/.github/workflows/create-release-and-upload-assets.yml +++ b/.github/workflows/create-release-and-upload-assets.yml @@ -11,7 +11,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git fetch --all @@ -40,7 +40,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git fetch --all @@ -62,7 +62,7 @@ jobs: echo "${{ github.ref }}" | cut -dv -f2 > VERSION.txt git add VERSION.txt git diff --quiet && git diff --staged --quiet || git commit -m "${COMMIT_MSG}" - git push origin master + git push origin main - name: Update debian changelog env: EMAIL: furlongm@gmail.com @@ -72,7 +72,7 @@ jobs: skip-checks: true run: | gbp dch --new-version=$(cat VERSION.txt)-1 --release --distribution=stable --spawn-editor=never --commit --commit-msg="${COMMIT_MSG}" - git push origin master + git push origin main build-and-upload-deb-assets: needs: update-version-and-changelog runs-on: ubuntu-latest @@ -91,7 +91,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git config --global --add safe.directory /__w/patchman/patchman @@ -156,7 +156,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - ref: master + ref: main - name: git fetch --all run: | git config --global --add safe.directory /__w/patchman/patchman diff --git a/.gitignore b/.gitignore index 1497c695..3a1397f5 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,5 @@ dist run pyvenv.cfg .vscode +.venv +*.xml diff --git a/MANIFEST.in b/MANIFEST.in index 293b1da2..5ea60ab7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,5 +16,7 @@ recursive-include packages * recursive-include repos * recursive-include reports * recursive-include modules * +recursive-include errata * +recursive-include security * recursive-include sbin * recursive-include etc * diff --git a/README.md b/README.md index f4f9bfe2..d425c5fd 100644 --- a/README.md +++ b/README.md @@ -106,6 +106,7 @@ python3-requests python3-colorama python3-magic python3-humanize +python3-yaml ``` The server can optionally make use of celery to asynchronously process the diff --git a/TODO b/TODO index 2c3e98bc..b5f49f0b 100644 --- a/TODO +++ b/TODO @@ -1,9 +1,7 @@ -* allow sending updates from Red Hat / SuSE machines -* web interface support for updating repos, finding updates * add checkrestart-style options to see which services need restarting -* CVE/OVAL apps +* OVAL/OSCAP apps * CA support (tinyca?) -* native python client, using apt/yum/debian libraries +* native python/go client, using apt/yum/debian libraries * record the history of installed packages on a host * also store package descriptions/tags/urls * check for unused repos @@ -11,5 +9,13 @@ * helper script to change paths (e.g. /usr/lib/python3/dist-packages/patchman) * Dockerfile/Dockerimage * compressed reports -* add cronjobs to built packages -* install celery/rabbit/memcache with packages +* add cronjobs to build packages +* dnf5 support +* proxy support +* GLSA support +* only use date for errata issue date? +* parallelize package extraction +* use django-tables2 +* autonaming for deb repos +* associate repos with gentoo hosts +* populate authenticated repos with package lists from hosts? diff --git a/VERSION.txt b/VERSION.txt index e265a8cb..25875f01 100644 --- a/VERSION.txt +++ b/VERSION.txt @@ -1 +1 @@ -3.0.15 +3.0.19 diff --git a/arch/utils.py b/arch/utils.py new file mode 100644 index 00000000..1498fdec --- /dev/null +++ b/arch/utils.py @@ -0,0 +1,52 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from arch.models import PackageArchitecture, MachineArchitecture +from patchman.signals import info_message + + +def clean_package_architectures(): + """ Remove package architectures that are no longer in use + """ + parches = PackageArchitecture.objects.filter(package__isnull=True) + plen = parches.count() + if plen == 0: + info_message.send(sender=None, text='No orphaned PackageArchitectures found.') + else: + info_message.send(sender=None, text=f'Removing {plen} orphaned PackageArchitectures') + parches.delete() + + +def clean_machine_architectures(): + """ Remove machine architectures that are no longer in use + """ + marches = MachineArchitecture.objects.filter( + host__isnull=True, + repository__isnull=True, + ) + mlen = marches.count() + if mlen == 0: + info_message.send(sender=None, text='No orphaned MachineArchitectures found.') + else: + info_message.send(sender=None, text=f'Removing {mlen} orphaned MachineArchitectures') + marches.delete() + + +def clean_architectures(): + """ Remove architectures that are no longer in use + """ + clean_package_architectures() + clean_machine_architectures() diff --git a/client/patchman-client b/client/patchman-client index 6b6a0943..bf4bc5c9 100755 --- a/client/patchman-client +++ b/client/patchman-client @@ -297,10 +297,20 @@ get_installed_archlinux_packages() { fi } +get_installed_gentoo_packages() { + if check_command_exists qkeyword ; then + gentoo_package_arch=$(qkeyword -A) + fi + if check_command_exists qlist ; then + qlist -Ic -F "'%{PN}' '%{SLOT}' '%{PV}' REL'%{PR}' '${gentoo_package_arch}' 'gentoo' '%{CAT}' '%{REPO}'" | sed -e "s/REL'r/'/g" >> "${tmpfile_pkg}" + fi +} + get_packages() { get_installed_rpm_packages get_installed_deb_packages get_installed_archlinux_packages + get_installed_gentoo_packages } get_modules() { @@ -346,7 +356,9 @@ get_host_data() { os="${PRETTY_NAME}" elif [ "${ID}" == "arch" ] ; then os="${NAME}" - elif [[ "${ID}" =~ "suse" ]] ; then + elif [ "${ID}" == "gentoo" ] ; then + os="${PRETTY_NAME} ${VERSION_ID}" + elif [[ "${ID_LIKE}" =~ "suse" ]] ; then os="${PRETTY_NAME}" elif [ "${ID}" == "astra" ] ; then os="${NAME} $(cat /etc/astra_version)" @@ -386,6 +398,9 @@ get_host_data() { fi done fi + if [ ! -z "${CPE_NAME}" ] ; then + os="${os} [${CPE_NAME}]" + fi if ${verbose} ; then echo "Kernel: ${host_kernel}" echo "Arch: ${host_arch}" @@ -452,7 +467,7 @@ get_repos() { fi # replace this with a dedicated awk or simple python script? yum_repolist=$(yum repolist enabled --verbose 2>/dev/null | sed -e "s/:\? *([0-9]\+ more)$//g" -e "s/ ([0-9]\+$//g" -e "s/:\? more)$//g" -e "s/'//g" -e "s/%/%%/g") - for i in $(echo "${yum_repolist}" | awk '{ if ($1=="Repo-id") {printf "'"'"'"; for (i=3; i> "${tmpfile_rep}" unset priority done @@ -484,7 +499,8 @@ get_repos() { if ${verbose} ; then echo 'Finding apt repos...' fi - IFS=${FULL_IFS} read -r osname shortversion <<<$(echo "${os}" | awk '{print $1,$2}' | cut -d . -f 1,2) + osname=$(echo ${os} | cut -d " " -f 1) + shortversion=${VERSION_ID} repo_string="'deb\' \'${osname} ${shortversion} ${host_arch} repo at" repos=$(apt-cache policy | grep -v Translation | grep -E "^ *[0-9]{1,5}" | grep -E " mirror\+file|http(s)?:" | sed -e "s/^ *//g" -e "s/ *$//g" | cut -d " " -f 1,2,3,4) non_mirror_repos=$(echo "${repos}" | grep -Ev "mirror\+file") @@ -510,11 +526,11 @@ get_repos() { echo 'Finding zypper repos...' fi if [ $(zypper -q --no-refresh lr --details | head -n 1 | grep Keep) ] ; then - zypper_lr_cols="2,3,8,10" + zypper_lr_cols='{print "${os}" $3 "|" $2 "|" $8 "|" $10}' else - zypper_lr_cols="2,3,7,9" + zypper_lr_cols='{print "${os}" $3 "|" $2 "|" $7 "|" $9}' fi - for i in $(zypper -q --no-refresh lr -E -u --details | grep -v ^$ | tail -n +3 | cut -d "|" -f ${zypper_lr_cols} | sed -e "s/ *|/ ${host_arch} |/" -e "s/\?[a-zA-Z0-9_-]* *$//" -e "s/^ /'/g" -e "s/ *| */' '/g" -e "s/ *$/'/g") ; do + for i in $(zypper -q --no-refresh lr -E -u --details | grep -v ^$ | tail -n +3 | awk -F"|" "${zypper_lr_cols}" | sed -e "s/\${os}/${PRETTY_NAME}/" -e "s/ *|/ ${host_arch} |/" -e "s/\?[a-zA-Z0-9_-]* *$//" -e "s/^/'/g" -e "s/ *| */' '/g" -e "s/ *$/'/g") ; do echo \'rpm\' ${i} >> "${tmpfile_rep}" id=$(echo ${i} | cut -d \' -f 4) suse_repo=$(echo ${i} | grep -e "https://updates.suse.com/.*") @@ -557,6 +573,56 @@ get_repos() { done fi + # Gentoo + if [[ "${os}" =~ "Gentoo" ]] ; then + if [ ${verbose} == 1 ] ; then + echo 'Finding portage repos...' + fi + declare -A repo_info + repos_output=$(portageq repos_config /) + repo_name="" + priority="" + sync_uri="" + + while IFS= read -r line; do + # if the line starts with a section header (e.g., [gentoo], [guru]), it's the repo name + if [[ "${line}" =~ ^\[(.*)\] ]]; then + # if we already have a repo_name, save the previous entry + if [[ -n "${repo_name}" && -n "${sync_uri}" ]]; then + repo_info["${repo_name}"]="${priority},${sync_uri}" + fi + # else start new repo parsing, resetting vars + repo_name="${BASH_REMATCH[1]}" + priority="" + sync_uri="" + fi + + # if the line contains "priority", extract the value, 0 if it doesnt exist + if [[ "${line}" =~ "priority" ]]; then + priority=$(echo "${line}" | cut -d'=' -f2 | xargs) + fi + + # if the line contains "sync-uri", extract the value + if [[ "${line}" =~ "sync-uri" ]]; then + sync_uri=$(echo "${line}" | cut -d'=' -f2 | xargs) + fi + done <<< "${repos_output}" + + # save the last repository entry if it's available + if [[ -n "${repo_name}" && -n "${sync_uri}" ]]; then + repo_info["${repo_name}"]="${priority},${sync_uri}" + fi + + for repo in "${!repo_info[@]}"; do + priority=$(echo ${repo_info[$repo]} | cut -d',' -f1) + sync_uri=$(echo ${repo_info[$repo]} | cut -d',' -f2) + if [ "${priority}" == "" ] ; then + priority=0 + fi + echo "'gentoo' 'Gentoo Linux ${repo} Repo ${host_arch}' '${repo}' '${priority}' '${sync_uri}'" >> "${tmpfile_rep}" + done + fi + IFS=${FULL_IFS} sed -i -e '/^$/d' "${tmpfile_rep}" @@ -629,10 +695,11 @@ post_data() { } if ! check_command_exists which || \ + ! check_command_exists awk || \ ! check_command_exists mktemp || \ ! check_command_exists curl || \ ! check_command_exists flock ; then - echo "which, mktemp, flock or curl was not found, exiting." + echo "which, awk, mktemp, flock or curl was not found, exiting." exit 1 fi diff --git a/debian/changelog b/debian/changelog index fa0d8651..452ac0b4 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,35 @@ +patchman (3.0.19-1) stable; urgency=medium + + * switch default branch from master to main + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Sat, 01 Mar 2025 20:45:02 +0000 + +patchman (3.0.18-1) stable; urgency=medium + + * recognize https mirrors in mirrorlists + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Sat, 01 Mar 2025 20:36:58 +0000 + +patchman (3.0.17-1) stable; urgency=medium + + [ Hugo Deprez ] + * add python3-yaml dependency + * update readme for depenency + + [ Marcus Furlong ] + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Thu, 27 Feb 2025 16:07:15 +0000 + +patchman (3.0.16-1) stable; urgency=medium + + * change compression format to support older rpm versions + * auto-commit to update version skip-checks: true + + -- Marcus Furlong Wed, 26 Feb 2025 05:25:29 +0000 + patchman (3.0.15-1) stable; urgency=medium [ Vladimir Lettiev ] diff --git a/debian/control b/debian/control index ca0792ce..67026269 100644 --- a/debian/control +++ b/debian/control @@ -3,23 +3,25 @@ Section: python Priority: optional Maintainer: Marcus Furlong Uploaders: Marcus Furlong -Build-Depends: debhelper (>=13), python3 (>= 3.10), dh-python, dh-exec +Build-Depends: debhelper (>=13), python3 (>= 3.11), dh-python, dh-exec Standards-Version: 4.6.2 Homepage: https://github.com/furlongm/patchman Vcs-Git: git://github.com/furlongm/patchman Vcs-Browser: https://github.com/furlongm/patchman -X-Python3-Version: >= 3.10 +X-Python3-Version: >= 3.11 Package: python3-patchman Architecture: all Homepage: https://github.com/furlongm/patchman -Depends: ${misc:Depends}, python3 (>= 3.10), python3-django (>= 3.2), - python3-django-tagging, python3-django-extensions, python3-django-bootstrap3, +Depends: ${misc:Depends}, python3 (>= 3.11), python3-django (>= 4.2), + python3-django-extensions, python3-django-bootstrap3, python3-cvss, python3-djangorestframework, python3-django-filters, python3-debian, - python3-rpm, python3-progressbar, python3-lxml, python3-defusedxml, + python3-rpm, python3-tqdm, python3-defusedxml, python3-pip, python3-tenacity, python3-requests, python3-colorama, python3-magic, python3-humanize, - python3-pip, python3-pymemcache, memcached, libapache2-mod-wsgi-py3, apache2 -Suggests: python3-django-celery, python3-mysqldb, python3-psycopg2 + python3-yaml, libapache2-mod-wsgi-py3, apache2, sqlite3, + celery, python3-celery, python3-django-celery-beat, redis-server, + python3-redis, python3-git, python3-django-taggit +Suggests: python3-mysqldb, python3-psycopg2, python3-pymemcache, memcached Description: Django-based patch status monitoring tool for linux systems. . Patchman provides a web interface for monitoring host package updates. @@ -41,7 +43,7 @@ Description: Django-based patch status monitoring tool for linux systems. Package: patchman-client Architecture: all Homepage: https://github.com/furlongm/patchman -Depends: ${misc:Depends}, curl, debianutils, util-linux, coreutils +Depends: ${misc:Depends}, curl, debianutils, util-linux, coreutils, mawk Description: Client for the patchman monitoring system. . The client will send a list of packages and repositories to the upstream diff --git a/debian/python3-patchman.install b/debian/python3-patchman.install index ededd6ec..e13b11ca 100755 --- a/debian/python3-patchman.install +++ b/debian/python3-patchman.install @@ -1,3 +1,4 @@ #!/usr/bin/dh-exec etc/patchman/apache.conf.example => etc/apache2/conf-available/patchman.conf etc/patchman/local_settings.py etc/patchman +etc/systemd/system/patchman-celery.service => lib/systemd/system/patchman-celery.service diff --git a/debian/python3-patchman.postinst b/debian/python3-patchman.postinst index 015a9e27..b64cb816 100644 --- a/debian/python3-patchman.postinst +++ b/debian/python3-patchman.postinst @@ -20,8 +20,12 @@ if [ "$1" = "configure" ] ; then patchman-manage makemigrations patchman-manage migrate --run-syncdb --fake-initial + sqlite3 /var/lib/patchman/db/patchman.db 'PRAGMA journal_mode=WAL;' chown -R www-data:www-data /var/lib/patchman + adduser --system --group patchman-celery + usermod -a -G www-data patchman-celery + chmod g+w /var/lib/patchman /var/lib/patchman/db /var/lib/patchman/db/patchman.db echo echo "Remember to run 'patchman-manage createsuperuser' to create a user." diff --git a/debian/rules b/debian/rules index 63a1916b..c1612816 100755 --- a/debian/rules +++ b/debian/rules @@ -9,7 +9,7 @@ clean:: export PYBUILD_NAME=patchman %: - dh $@ --with python3 --buildsystem=pybuild + dh $@ --with=python3 --buildsystem=pybuild --with=systemd override_dh_auto_test: true diff --git a/errata/__init__.py b/errata/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/admin.py b/errata/admin.py new file mode 100644 index 00000000..88190ff6 --- /dev/null +++ b/errata/admin.py @@ -0,0 +1,25 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.contrib import admin +from errata.models import Erratum + + +class ErratumAdmin(admin.ModelAdmin): + readonly_fields = ('affected_packages', 'fixed_packages', 'references') + + +admin.site.register(Erratum, ErratumAdmin) diff --git a/errata/apps.py b/errata/apps.py new file mode 100644 index 00000000..9411f035 --- /dev/null +++ b/errata/apps.py @@ -0,0 +1,21 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.apps import AppConfig + + +class ErrataConfig(AppConfig): + name = 'errata' diff --git a/errata/managers.py b/errata/managers.py new file mode 100644 index 00000000..e39147be --- /dev/null +++ b/errata/managers.py @@ -0,0 +1,22 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class ErratumManager(models.Manager): + def get_queryset(self): + return super().get_queryset().select_related() diff --git a/errata/migrations/0001_initial.py b/errata/migrations/0001_initial.py new file mode 100644 index 00000000..d02a7dc8 --- /dev/null +++ b/errata/migrations/0001_initial.py @@ -0,0 +1,43 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ('packages', '0002_delete_erratum_delete_erratumreference'), + ('operatingsystems', '0005_rename_osgroup_osrelease_rename_os_osvariant_and_more'), + ('security', '0001_initial'), + ] + + operations = [ + migrations.CreateModel( + name='ErratumReference', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('er_type', models.CharField(max_length=255)), + ('url', models.URLField(max_length=765)), + ], + ), + migrations.CreateModel( + name='Erratum', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255, unique=True)), + ('e_type', models.CharField(max_length=255)), + ('issue_date', models.DateTimeField()), + ('synopsis', models.CharField(max_length=255)), + ('cves', models.ManyToManyField(blank=True, to='security.cve')), + ('osreleases', models.ManyToManyField(blank=True, to='operatingsystems.osrelease')), + ('packages', models.ManyToManyField(blank=True, to='packages.package')), + ('references', models.ManyToManyField(blank=True, to='errata.erratumreference')), + ], + options={ + 'verbose_name': 'Erratum', + 'verbose_name_plural': 'Errata', + }, + ), + ] diff --git a/errata/migrations/0002_alter_erratumreference_unique_together.py b/errata/migrations/0002_alter_erratumreference_unique_together.py new file mode 100644 index 00000000..f88ff86d --- /dev/null +++ b/errata/migrations/0002_alter_erratumreference_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-05 01:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0001_initial'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='erratumreference', + unique_together={('er_type', 'url')}, + ), + ] diff --git a/errata/migrations/0003_delete_erratumreference_alter_erratum_references.py b/errata/migrations/0003_delete_erratumreference_alter_erratum_references.py new file mode 100644 index 00000000..aebfd7ed --- /dev/null +++ b/errata/migrations/0003_delete_erratumreference_alter_erratum_references.py @@ -0,0 +1,26 @@ +# Generated by Django 4.2.19 on 2025-03-05 19:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0005_reference_cve_references'), + ('errata', '0002_alter_erratumreference_unique_together'), + ] + + operations = [ + migrations.RemoveField( + model_name='erratum', + name='references', + ), + migrations.DeleteModel( + name='ErratumReference', + ), + migrations.AddField( + model_name='erratum', + name='references', + field=models.ManyToManyField(blank=True, to='security.Reference'), + ), + ] diff --git a/errata/migrations/0004_rename_packages_erratum_fixed_packages.py b/errata/migrations/0004_rename_packages_erratum_fixed_packages.py new file mode 100644 index 00000000..770ed814 --- /dev/null +++ b/errata/migrations/0004_rename_packages_erratum_fixed_packages.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-06 04:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0003_delete_erratumreference_alter_erratum_references'), + ] + + operations = [ + migrations.RenameField( + model_name='erratum', + old_name='packages', + new_name='fixed_packages', + ), + ] diff --git a/errata/migrations/0005_erratum_affected_packages_and_more.py b/errata/migrations/0005_erratum_affected_packages_and_more.py new file mode 100644 index 00000000..f5cc9571 --- /dev/null +++ b/errata/migrations/0005_erratum_affected_packages_and_more.py @@ -0,0 +1,24 @@ +# Generated by Django 4.2.19 on 2025-03-06 05:23 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0004_alter_package_options_alter_packagecategory_options_and_more'), + ('errata', '0004_rename_packages_erratum_fixed_packages'), + ] + + operations = [ + migrations.AddField( + model_name='erratum', + name='affected_packages', + field=models.ManyToManyField(blank=True, related_name='affected_by_erratum', to='packages.package'), + ), + migrations.AlterField( + model_name='erratum', + name='fixed_packages', + field=models.ManyToManyField(blank=True, related_name='fixed_by_erratum', to='packages.package'), + ), + ] diff --git a/errata/migrations/0006_alter_erratum_options.py b/errata/migrations/0006_alter_erratum_options.py new file mode 100644 index 00000000..22cad601 --- /dev/null +++ b/errata/migrations/0006_alter_erratum_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-07 03:06 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0005_erratum_affected_packages_and_more'), + ] + + operations = [ + migrations.AlterModelOptions( + name='erratum', + options={'ordering': ['-issue_date', 'name'], 'verbose_name': 'Erratum', 'verbose_name_plural': 'Errata'}, + ), + ] diff --git a/errata/migrations/0007_alter_erratum_fixed_packages.py b/errata/migrations/0007_alter_erratum_fixed_packages.py new file mode 100644 index 00000000..5cf9ec56 --- /dev/null +++ b/errata/migrations/0007_alter_erratum_fixed_packages.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.19 on 2025-03-10 23:49 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0005_alter_package_packagetype'), + ('errata', '0006_alter_erratum_options'), + ] + + operations = [ + migrations.AlterField( + model_name='erratum', + name='fixed_packages', + field=models.ManyToManyField(blank=True, related_name='provides_fix_in_erratum', to='packages.package'), + ), + ] diff --git a/errata/migrations/__init__.py b/errata/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/models.py b/errata/models.py new file mode 100644 index 00000000..b10daf4d --- /dev/null +++ b/errata/models.py @@ -0,0 +1,166 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json + +from django.db import models +from django.urls import reverse +from django.db import IntegrityError + +from packages.models import Package, PackageUpdate +from packages.utils import find_evr, get_matching_packages +from errata.managers import ErratumManager +from security.models import CVE, Reference +from security.utils import get_or_create_cve, get_or_create_reference +from patchman.signals import error_message +from util import get_url + + +class Erratum(models.Model): + + name = models.CharField(max_length=255, unique=True) + e_type = models.CharField(max_length=255) + issue_date = models.DateTimeField() + synopsis = models.CharField(max_length=255) + affected_packages = models.ManyToManyField(Package, blank=True, related_name='affected_by_erratum') + fixed_packages = models.ManyToManyField(Package, blank=True, related_name='provides_fix_in_erratum') + from operatingsystems.models import OSRelease + osreleases = models.ManyToManyField(OSRelease, blank=True) + cves = models.ManyToManyField(CVE, blank=True) + references = models.ManyToManyField(Reference, blank=True) + + objects = ErratumManager() + + class Meta: + verbose_name = 'Erratum' + verbose_name_plural = 'Errata' + ordering = ['-issue_date', 'name'] + + def __str__(self): + text = f'{self.name} ({self.e_type}), {self.cves.count()} related CVEs, ' + text += f'affecting {self.osreleases.count()} OS Releases, ' + text += f'providing {self.fixed_packages.count()} fixed Packages' + return text + + def get_absolute_url(self): + return reverse('errata:erratum_detail', args=[self.name]) + + def scan_for_security_updates(self): + if self.e_type == 'security': + for package in self.fixed_packages.all(): + affected_updates = PackageUpdate.objects.filter( + newpackage=package, + security=False, + ) + for affected_update in affected_updates: + affected_update.security = True + try: + affected_update.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + # a version of this update already exists that is + # marked as a security update, so delete this one + affected_update.delete() + for package in self.affected_packages.all(): + affected_updates = PackageUpdate.objects.filter( + oldpackage=package, + security=False, + ) + for affected_update in affected_updates: + affected_update.security = True + try: + affected_update.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + # a version of this update already exists that is + # marked as a security update, so delete this one + affected_update.delete() + + def fetch_osv_dev_data(self): + osv_dev_url = f'https://api.osv.dev/v1/vulns/{self.name}' + res = get_url(osv_dev_url) + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.name} - {osv_dev_url}') + return + data = res.content + osv_dev_json = json.loads(data) + self.parse_osv_dev_data(osv_dev_json) + + def parse_osv_dev_data(self, osv_dev_json): + name = osv_dev_json.get('id') + if name != self.name: + error_message.send(sender=None, text=f'Erratum name mismatch - {self.name} != {name}') + return + related = osv_dev_json.get('related') + if related: + for vuln in related: + if vuln.startswith('CVE'): + self.add_cve(vuln) + affected = osv_dev_json.get('affected') + if not affected: + return + affected_packages = set() + for package in affected: + fixed_packages = set() + ranges = package.get('ranges') + for affected_range in ranges: + for event in affected_range.get('events'): + fixed_version = event.get('fixed') + if fixed_version: + epoch, ver, rel = find_evr(fixed_version) + matching_packages = self.fixed_packages.filter(epoch=epoch, version=ver, release=rel).all() + for match in matching_packages: + fixed_packages.add(match) + affected_versions = package.get('versions') + if not affected_versions: + continue + for package in fixed_packages: + for version in affected_versions: + epoch, ver, rel = find_evr(version) + matching_packages = get_matching_packages( + name=package.name, + epoch=epoch, + version=ver, + release=rel, + arch=package.arch, + p_type=package.packagetype, + ) + for match in matching_packages: + affected_packages.add(match) + self.add_affected_packages(affected_packages) + + def add_fixed_packages(self, packages): + for package in packages: + self.fixed_packages.add(package) + self.save() + + def add_affected_packages(self, packages): + for package in packages: + self.affected_packages.add(package) + + def add_cve(self, cve_id): + """ Add a CVE to an Erratum object + """ + if not cve_id.startswith('CVE') or not cve_id.split('-')[1].isdigit(): + error_message.send(sender=None, text=f'Not a CVE ID: {cve_id}') + return + self.cves.add(get_or_create_cve(cve_id)) + + def add_reference(self, ref_type, url): + """ Add a reference to an Erratum object + """ + reference = get_or_create_reference(ref_type=ref_type, url=url) + self.references.add(reference) diff --git a/errata/serializers.py b/errata/serializers.py new file mode 100644 index 00000000..c559a422 --- /dev/null +++ b/errata/serializers.py @@ -0,0 +1,25 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from rest_framework import serializers + +from errata.models import Erratum + + +class ErratumSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = Erratum + fields = ('id', 'name', 'e_type', 'issue_date', 'synopsis', 'cves', 'releases', 'references') diff --git a/errata/sources/__init__.py b/errata/sources/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/sources/distros/__init__.py b/errata/sources/distros/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/sources/distros/alma.py b/errata/sources/distros/alma.py new file mode 100644 index 00000000..e0f2d4ae --- /dev/null +++ b/errata/sources/distros/alma.py @@ -0,0 +1,165 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import concurrent.futures +import json + +from django.db import connections + +from operatingsystems.utils import get_or_create_osrelease +from packages.models import Package +from packages.utils import get_or_create_package, parse_package_string +from util import get_url, fetch_content, get_setting_of_type +from patchman.signals import pbar_start, pbar_update + + +def update_alma_errata(concurrent_processing=True): + """ Update Alma Linux advisories from errata.almalinux.org: + https://errata.almalinux.org/8/errata.full.json + https://errata.almalinux.org/9/errata.full.json + and process advisories + """ + default_alma_releases = [8, 9] + alma_releases = get_setting_of_type( + setting_name='ALMA_RELEASES', + setting_type=list, + default=default_alma_releases, + ) + for release in alma_releases: + advisories = fetch_alma_advisories(release) + process_alma_errata(release, advisories, concurrent_processing) + + +def fetch_alma_advisories(release): + """ Fetch Alma Linux advisories + """ + alma_errata_url = f'https://errata.almalinux.org/{release}/errata.full.json' + headers = {'Accept': 'application/json', 'Cache-Control': 'no-cache, no-tranform'} + res = get_url(alma_errata_url, headers=headers) + data = fetch_content(res, f'Fetching Alma {release} Errata') + advisories = json.loads(data).get('data') + return advisories + + +def process_alma_errata(release, advisories, concurrent_processing): + """ Process Alma Linux Errata + """ + if concurrent_processing: + process_alma_errata_concurrently(release, advisories) + else: + process_alma_errata_serially(release, advisories) + + +def process_alma_errata_serially(release, advisories): + """ Process Alma Linux Errata serially + """ + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Alma {release} Errata', plen=elen) + for i, advisory in enumerate(advisories): + process_alma_erratum(release, advisory) + pbar_update.send(sender=None, index=i + 1) + + +def process_alma_errata_concurrently(release, advisories): + """ Process Alma Linux Errata concurrently + """ + connections.close_all() + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Alma {release} Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(process_alma_erratum, release, advisory) for advisory in advisories] + for future in concurrent.futures.as_completed(futures): + i += 1 + pbar_update.send(sender=None, index=i + 1) + + +def process_alma_erratum(release, advisory): + """ Process a single Alma Linux Erratum + """ + from errata.utils import get_or_create_erratum + erratum_name = advisory.get('id') + issue_date = advisory.get('issued_date') + synopsis = advisory.get('title') + e_type = advisory.get('type') + e, created = get_or_create_erratum( + name=erratum_name, + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_alma_erratum_osreleases(e, release) + add_alma_erratum_references(e, advisory) + add_alma_erratum_packages(e, advisory) + add_alma_erratum_modules(e, advisory) + + +def add_alma_erratum_osreleases(e, release): + """ Update OS Release for Alma Linux errata + """ + osrelease = get_or_create_osrelease(name=f'Alma Linux {release}') + e.osreleases.add(osrelease) + + +def add_alma_erratum_references(e, advisory): + """ Add references for Alma Linux errata + """ + references = advisory.get('references') + for reference in references: + ref_id = reference.get('id') + ref_type = reference.get('type') + er_url = reference.get('href') + if ref_type == 'cve': + e.add_cve(ref_id) + continue + if ref_type == 'self': + ref_type = 'Alma Advisory' + e.add_reference(ref_type, er_url) + + +def add_alma_erratum_packages(e, advisory): + """ Parse and add packages for Alma Linux errata + """ + fixed_packages = set() + packages = advisory.get('packages') + for package in packages: + package_name = package.get('filename') + if package_name: + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + p_type = Package.RPM + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) + + +def add_alma_erratum_modules(e, advisory): + """ Parse and add modules for Alma Linux errata + """ + from modules.utils import get_matching_modules + fixed_packages = set() + modules = advisory.get('modules') + for module in modules: + name = module.get('name') + arch = module.get('arch') + context = module.get('context') + stream = module.get('stream') + version = module.get('version') + matching_modules = get_matching_modules(name, stream, version, context, arch) + for match in matching_modules: + for fixed_package in match.packages.all(): + match.packages.add(fixed_package) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) diff --git a/errata/sources/distros/arch.py b/errata/sources/distros/arch.py new file mode 100644 index 00000000..40d0dada --- /dev/null +++ b/errata/sources/distros/arch.py @@ -0,0 +1,216 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import concurrent.futures +import json + +from django.db import connections + +from operatingsystems.utils import get_or_create_osrelease +from patchman.signals import error_message, pbar_start, pbar_update +from packages.models import Package +from packages.utils import find_evr, get_matching_packages, get_or_create_package +from util import get_url, fetch_content + + +def update_arch_errata(concurrent_processing=False): + """ Update Arch Linux Errata from the following sources: + https://security.archlinux.org/advisories.json + """ + add_arch_linux_osrelease() + advisories = fetch_arch_errata() + parse_arch_errata(advisories, concurrent_processing) + + +def fetch_arch_errata(): + """ Fetch Arch Linux Errata Advisories + https://security.archlinux.org/advisories.json + """ + res = get_url('https://security.archlinux.org/advisories.json') + advisories = fetch_content(res, 'Fetching Arch Advisories') + return json.loads(advisories) + + +def parse_arch_errata(advisories, concurrent_processing): + """ Parse Arch Linux Errata Advisories + """ + if concurrent_processing: + parse_arch_errata_concurrently(advisories) + else: + parse_arch_errata_serially(advisories) + + +def parse_arch_errata_serially(advisories): + """ Parse Arch Linux Errata Advisories serially + """ + osrelease = get_or_create_osrelease(name='Arch Linux') + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) + for i, advisory in enumerate(advisories): + process_arch_erratum(advisory, osrelease) + pbar_update.send(sender=None, index=i + 1) + + +def parse_arch_errata_concurrently(advisories): + """ Parse Arch Linux Errata Advisories concurrently + """ + osrelease = get_or_create_osrelease(name='Arch Linux') + connections.close_all() + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Arch Advisories', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(process_arch_erratum, advisory, osrelease) for advisory in advisories] + for future in concurrent.futures.as_completed(futures): + i += 1 + pbar_update.send(sender=None, index=i + 1) + + +def process_arch_erratum(advisory, osrelease): + """ Process a single Arch Linux Erratum + """ + from errata.utils import get_or_create_erratum + try: + name = advisory.get('name') + issue_date = advisory.get('date') + package = advisory.get('package') + issue_type = advisory.get('type') + synopsis = f'{package} - {issue_type}' + e, created = get_or_create_erratum( + name=name, + e_type='security', + issue_date=issue_date, + synopsis=synopsis, + ) + e.osreleases.add(osrelease) + add_arch_erratum_references(e, advisory) + add_arch_erratum_packages(e, advisory) + except Exception as exc: + error_message.send(sender=None, text=exc) + + +def add_arch_linux_osrelease(): + """ Add Arch Linux OSRelease and link existing OSVariants + """ + get_or_create_osrelease(name='Arch Linux') + + +def add_arch_erratum_references(e, advisory): + """ Add Arch Linux Erratum References + """ + reference = advisory.get('reference') + e.add_reference('Mailing List', reference) + asa_id = advisory.get('name') + url = f'https://security.archlinux.org/advisory/{asa_id}' + e.add_reference('ASA', url) + raw_url = f'{url}/raw' + res = get_url(raw_url) + data = res.content + parse_arch_erratum_raw(e, data.decode()) + + +def parse_arch_erratum_raw(e, data): + """ Parse Arch Linux Erratum Raw Data for CVEs and References + """ + in_reference_section = False + for line in data.splitlines(): + if line.startswith('CVE-ID'): + cve_ids = line.split(':')[1].strip().split() + for cve_id in cve_ids: + e.add_cve(cve_id) + elif line.startswith('References'): + in_reference_section = True + continue + if in_reference_section: + if line.startswith('='): + continue + else: + reference = line.strip() + if reference: + e.add_reference('Link', reference) + + +def add_arch_erratum_packages(e, advisory): + """ Add Arch Linux Erratum Packages + """ + group_id = advisory.get('group') + group_url = f'https://security.archlinux.org/group/{group_id}.json' + res = get_url(group_url) + data = res.content + group = json.loads(data) + packages = group.get('packages') + + affected = group.get('affected') + affected_packages = find_arch_affected_packages(affected, packages) + e.add_affected_packages(affected_packages) + + fixed = group.get('fixed') + fixed_packages = find_arch_fixed_packages(fixed, packages) + e.add_fixed_packages(fixed_packages) + + add_arch_erratum_group_references(e, group) + add_arch_erratum_group_cves(e, group) + + +def find_arch_affected_packages(affected, packages): + """ Find Arch Linux Erratum Affected Packages + This checks existing packages for matches and does not + require an architecture + """ + package_type = Package.ARCH + epoch, version, release = find_evr(affected) + affected_packages = set() + for package in packages: + matching_packages = get_matching_packages(package, epoch, version, release, package_type) + for match in matching_packages: + affected_packages.add(match) + return affected_packages + + +def find_arch_fixed_packages(fixed, packages): + """ Find Arch Linux Erratum Fixed Packages + This adds new packages with arch x86_64 only + """ + package_type = Package.ARCH + epoch, version, release = find_evr(fixed) + fixed_packages = set() + for package in packages: + fixed_package = get_or_create_package( + name=package, + epoch=epoch, + version=version, + release=release, + arch='x86_64', + p_type=package_type + ) + fixed_packages.add(fixed_package) + return fixed_packages + + +def add_arch_erratum_group_references(e, group): + """ Add Arch Linux Erratum References + """ + references = group.get('references') + for reference in references: + e.add_reference('Link', reference) + + +def add_arch_erratum_group_cves(e, group): + """ Add Arch Linux Erratum CVEs + """ + cve_ids = group.get('issues') + for cve_id in cve_ids: + e.add_cve(cve_id) diff --git a/errata/sources/distros/centos.py b/errata/sources/distros/centos.py new file mode 100644 index 00000000..eefb2b88 --- /dev/null +++ b/errata/sources/distros/centos.py @@ -0,0 +1,161 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import re +from defusedxml import ElementTree + +from operatingsystems.utils import get_or_create_osrelease +from packages.models import Package +from packages.utils import parse_package_string, get_or_create_package +from patchman.signals import error_message, pbar_start, pbar_update +from util import bunzip2, get_url, fetch_content, get_sha1, get_setting_of_type + + +def update_centos_errata(): + """ Update CentOS errata from https://cefs.steve-meier.de/ + """ + data = fetch_centos_errata_checksum() + expected_checksum = parse_centos_errata_checksum(data) + data = fetch_centos_errata() + actual_checksum = get_sha1(data) + if actual_checksum != expected_checksum: + e = 'CEFS checksum mismatch, skipping CentOS errata parsing\n' + e += f'{actual_checksum} (actual) != {expected_checksum} (expected)' + error_message.send(sender=None, text=e) + else: + if data: + parse_centos_errata(bunzip2(data)) + + +def fetch_centos_errata_checksum(): + """ Fetch CentOS errata checksum from https://cefs.steve-meier.de/ + """ + res = get_url('https://cefs.steve-meier.de/errata.latest.sha1') + return fetch_content(res, 'Fetching CentOS Errata Checksum') + + +def fetch_centos_errata(): + """ Fetch CentOS errata from https://cefs.steve-meier.de/ + """ + res = get_url('https://cefs.steve-meier.de/errata.latest.xml.bz2') + return fetch_content(res, 'Fetching CentOS Errata') + + +def parse_centos_errata_checksum(data): + """ Parse the errata checksum and return the bz2 checksum + """ + for line in data.decode('utf-8').splitlines(): + if line.endswith('errata.latest.xml.bz2'): + return line.split()[0] + + +def parse_centos_errata(data): + """ Parse CentOS errata from https://cefs.steve-meier.de/ + """ + result = ElementTree.XML(data) + errata_xml = result.findall('*') + elen = len(errata_xml) + pbar_start.send(sender=None, ptext=f'Processing {elen} CentOS Errata', plen=elen) + for i, child in enumerate(errata_xml): + pbar_update.send(sender=None, index=i + 1) + releases = get_centos_erratum_releases(child.findall('os_release')) + if not accepted_centos_release(releases): + continue + e = parse_centos_errata_tag(child.tag, child.attrib) + if e is not None: + parse_centos_errata_children(e, child.iter()) + + +def parse_centos_errata_tag(name, attribs): + """ Parse all tags that contain errata. If the erratum already exists, + we assume that it already has all refs, packages, releases and arches. + """ + from errata.utils import get_or_create_erratum + e = None + if name.startswith('CE'): + issue_date = attribs['issue_date'] + references = attribs['references'] + synopsis = attribs['synopsis'] + if name.startswith('CEBA'): + e_type = 'bugfix' + elif name.startswith('CESA'): + e_type = 'security' + elif name.startswith('CEEA'): + e_type = 'enhancement' + e, created = get_or_create_erratum( + name=name.replace('--', ':'), + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_centos_erratum_references(e, references) + return e + + +def add_centos_erratum_references(e, references): + """ Add references for CentOS errata + """ + for reference in references.split(' '): + e.add_reference('Link', reference) + + +def parse_centos_errata_children(e, children): + """ Parse errata children to obtain architecture, release and packages + """ + fixed_packages = set() + for c in children: + if c.tag == 'os_arch': + pass + elif c.tag == 'os_release': + if accepted_centos_release([c.text]): + osrelease_name = f'CentOS {c.text}' + osrelease = get_or_create_osrelease(name=osrelease_name) + e.osreleases.add(osrelease) + elif c.tag == 'packages': + name, epoch, ver, rel, dist, arch = parse_package_string(c.text) + match = re.match(r'.*el([0-9]+).*', rel) + if match: + release = match.group(1) + if accepted_centos_release([release]): + p_type = Package.RPM + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) + + +def get_centos_erratum_releases(releases_xml): + """ Collect the releases a given erratum pertains to + """ + releases = set() + for release in releases_xml: + releases.add(int(release.text)) + return releases + + +def accepted_centos_release(releases): + """ Check if we accept the releases that the erratum pertains to + If any release is accepted we return True, else False + """ + min_release = get_setting_of_type( + setting_name='MIN_CENTOS_RELEASE', + setting_type=int, + default=7, + ) + acceptable_release = False + for release in releases: + if int(release) >= min_release: + acceptable_release = True + return acceptable_release diff --git a/errata/sources/distros/debian.py b/errata/sources/distros/debian.py new file mode 100644 index 00000000..93ae2bd5 --- /dev/null +++ b/errata/sources/distros/debian.py @@ -0,0 +1,354 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import concurrent.futures +import csv +import re +from datetime import datetime +from debian.deb822 import Dsc +from io import StringIO + +from django.db import connections + +from operatingsystems.models import OSRelease +from operatingsystems.utils import get_or_create_osrelease +from packages.models import Package +from packages.utils import get_or_create_package, find_evr +from patchman.signals import error_message, pbar_start, pbar_update, warning_message +from util import get_url, fetch_content, get_setting_of_type, extract + +DSCs = {} + + +def update_debian_errata(concurrent_processing=True): + """ Update Debian errata using: + https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list + https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list + """ + codenames = retrieve_debian_codenames() + create_debian_os_releases(codenames) + dsas = fetch_debian_dsa_advisories() + dlas = fetch_debian_dla_advisories() + advisories = dsas + dlas + fetch_dscs_from_debian_package_file_maps() + accepted_codenames = get_accepted_debian_codenames() + errata = parse_debian_errata(advisories, accepted_codenames) + create_debian_errata(errata, accepted_codenames, concurrent_processing) + + +def fetch_debian_dsa_advisories(): + """ Fetch the current Debian DLA file + """ + debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DSA/list' + res = get_url(debian_dsa_url) + data = fetch_content(res, 'Fetching Debian DSAs') + return data.decode() + + +def fetch_debian_dla_advisories(): + """ Fetch the current Debian DSA file + """ + debian_dsa_url = 'https://salsa.debian.org/security-tracker-team/security-tracker/raw/master/data/DLA/list' + res = get_url(debian_dsa_url) + data = fetch_content(res, 'Fetching Debian DLAs') + return data.decode() + + +def fetch_dscs_from_debian_package_file_maps(): + """ Fetch the current Debian package file maps + """ + repos = ['debian', 'debian-security'] + for repo in repos: + file_map_url = f'https://deb.debian.org/{repo}/indices/package-file.map.bz2' + res = get_url(file_map_url) + data = fetch_content(res, f'Fetching `{repo}` package file map') + file_map_data = extract(data, file_map_url).decode() + parse_debian_package_file_map(file_map_data, repo) + + +def parse_debian_package_file_map(data, repo): + """ Parse the a Debian package file map + Format: + Path: ./pool/updates/main/3/389-ds-base/389-ds-base_1.4.0.21-1+deb10u1.dsc + Source: 389-ds-base + Source-Version: 1.4.0.21-1+deb10u1 + """ + parsing_dsc = False + for line in data.splitlines(): + if line.startswith('Path:'): + if line.endswith('.dsc'): + parsing_dsc = True + path = line.split(' ')[1].lstrip('./') + url = f'https://deb.debian.org/{repo}/{path}' + else: + parsing_dsc = False + elif line.startswith('Source:') and parsing_dsc: + source = line.split(' ')[1] + elif line.startswith('Source-Version:') and parsing_dsc: + version = line.split(' ')[1] + if not DSCs.get(source): + DSCs[source] = {} + if not DSCs[source].get(version): + DSCs[source][version] = {} + DSCs[source][version] = {'url': url} + parsing_dsc = False + + +def parse_debian_errata(advisories, accepted_codenames): + """ Parse Debian DSA/DLA files for security advisories + """ + distro_pattern = re.compile(r'^\t\[(.+?)\] - .*') + title_pattern = re.compile(r'^\[(.+?)\] (.+?) (.+?)[ ]+[-]+ (.*)') + errata = [] + e = {'packages': {}, 'cve_ids': [], 'releases': []} + for line in advisories.splitlines(): + if line.startswith('['): + errata = add_errata_by_codename(errata, e, accepted_codenames) + e = {'packages': {}, 'cve_ids': [], 'releases': []} + match = re.match(title_pattern, line) + if match: + e = parse_debian_erratum_advisory(e, match) + elif line.startswith('\t{'): + for cve_id in line.strip('\t{}').split(): + e['cve_ids'].append(cve_id) + elif line.startswith('\t['): + match = re.match(distro_pattern, line) + if match: + release = match.group(1) + e['releases'].append(release) + if not e.get('packages').get(release): + e['packages'][release] = [] + e['packages'][release].append(parse_debian_erratum_package(line, accepted_codenames)) + # add the last one + errata = add_errata_by_codename(errata, e, accepted_codenames) + return errata + + +def add_errata_by_codename(errata, e, accepted_codenames): + """ Get errata by codename and add to errata + """ + if e: + for release in e.get('releases'): + if release in accepted_codenames: + errata.append(e) + return errata + + +def parse_debian_erratum_advisory(e, match): + """ Parse the initial details for an erratum in a DSA/DLA file + Returns the updated dictionary + """ + date = match.group(1) + issue_date = int(datetime.strptime(date, '%d %b %Y').strftime('%s')) + erratum_name = match.group(2) + synopsis = match.group(4) + e['name'] = erratum_name + e['issue_date'] = issue_date + e['synopsis'] = synopsis + return e + + +def create_debian_errata(errata, accepted_codenames, concurrent_processing): + """ Create Debian Errata + """ + if concurrent_processing: + create_debian_errata_concurrently(errata, accepted_codenames) + else: + create_debian_errata_serially(errata, accepted_codenames) + + +def create_debian_errata_serially(errata, accepted_codenames): + """ Create Debian Errata Serially + """ + elen = len(errata) + pbar_start.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) + for i, erratum in enumerate(errata): + process_debian_erratum(erratum, accepted_codenames) + pbar_update.send(sender=None, index=i + 1) + + +def create_debian_errata_concurrently(errata, accepted_codenames): + """ Create Debian Errata concurrently + """ + connections.close_all() + elen = len(errata) + pbar_start.send(sender=None, ptext=f'Processing {elen} Debian Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(process_debian_erratum, erratum, accepted_codenames) for erratum in errata] + for future in concurrent.futures.as_completed(futures): + i += 1 + pbar_update.send(sender=None, index=i + 1) + + +def process_debian_erratum(erratum, accepted_codenames): + """ Process a single Debian Erratum + """ + try: + from errata.utils import get_or_create_erratum + erratum_name = erratum.get('name') + e, created = get_or_create_erratum( + name=erratum_name, + e_type='security', + issue_date=erratum.get('issue_date'), + synopsis=erratum.get('synopsis'), + ) + e.add_reference('Link', f'https://security-tracker.debian.org/tracker/{erratum_name}') + for cve_id in erratum.get('cve_ids'): + e.add_cve(cve_id) + for codename, packages in erratum.get('packages').items(): + if codename not in accepted_codenames: + continue + osrelease = OSRelease.objects.get(codename=codename) + e.osreleases.add(osrelease) + for package in packages: + process_debian_erratum_fixed_packages(e, package) + except Exception as exc: + error_message.send(sender=None, text=exc) + + +def parse_debian_erratum_package(line, accepted_codenames): + """ Parse the codename and source package from a DSA/DLA file + Returns the source package and source version + """ + distro_package_pattern = re.compile(r'^\t\[(.+?)\] - (.+?) (.*)') + match = re.match(distro_package_pattern, line) + if match: + codename = match.group(1) + if codename in accepted_codenames: + source_package = match.group(2) + source_version = match.group(3) + fetch_debian_dsc_package_list(source_package, source_version) + return source_package, source_version + + +def get_debian_dsc_package_list(package, version): + """ Get the package list from a DSC file for a given source package/version + """ + if not DSCs.get(package) or not DSCs[package].get(version): + return + package_list = DSCs[package][version].get('package_list') + if package_list: + return package_list + + +def fetch_debian_dsc_package_list(package, version): + """ Fetch the package list from a DSC file for a given source package/version + """ + if not DSCs.get(package) or not DSCs[package].get(version): + warning_message.send(sender=None, text=f'No DSC found for {package} {version}') + return + source_url = DSCs[package][version]['url'] + res = get_url(source_url) + data = res.content + dsc = Dsc(data.decode()) + package_list = dsc.get('package-list') + DSCs[package][version]['package_list'] = package_list + + +def get_accepted_debian_codenames(): + """ Get acceptable Debian OS codenames + Can be overridden by specifying DEBIAN_CODENAMES in settings + """ + default_codenames = ['bookworm', 'bullseye'] + accepted_codenames = get_setting_of_type( + setting_name='DEBIAN_CODENAMES', + setting_type=list, + default=default_codenames, + ) + return accepted_codenames + + +def retrieve_debian_codenames(): + """ Returns the codename to version mapping + """ + distro_info_url = 'https://debian.pages.debian.net/distro-info-data/debian.csv' + res = get_url(distro_info_url) + debian_csv = fetch_content(res, 'Fetching Debian distro data') + reader = csv.DictReader(StringIO(debian_csv.decode())) + codename_to_version = {} + for row in reader: + version = row.get('version') + series = row.get('series') + codename_to_version[series] = version + return codename_to_version + + +def create_debian_os_releases(codename_to_version): + """ Create OSReleases for acceptable Debian codenames + """ + accepted_codenames = get_accepted_debian_codenames() + for codename, version in codename_to_version.items(): + if codename in accepted_codenames: + osrelease_name = f'Debian {version}' + get_or_create_osrelease(name=osrelease_name, codename=codename) + + +def process_debian_erratum_fixed_packages(e, package_data): + """ Process packages fixed in a Debian errata + """ + source_package, source_version = package_data + epoch, ver, rel = find_evr(source_version) + package_list = get_debian_dsc_package_list(source_package, source_version) + if not package_list: + return + fixed_packages = set() + for package in package_list: + if package.get('package-type') != 'deb': + continue + name = package.get('package') + arches = process_debian_dsc_arches(package.get('_other')) + for arch in arches: + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, Package.DEB) + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) + + +def process_debian_dsc_arches(arches): + """ Process arches for dsc files + Return a list of arches for a given package in a dsc file + """ + arches = arches.replace('arch=', '') + accepted_arches = [] + # https://www.debian.org/ports/ + official_ports = [ + 'amd64', + 'arm64', + 'armel', + 'armhf', + 'i386', + 'mips64el', + 'ppc64el', + 'riscv64', + 's390x', + ] + for arch in arches.split(','): + if arch == 'any': + return official_ports + elif arch == 'all': + return ['all'] # architecture-independent packages + elif arch in official_ports: + accepted_arches.append(arch) + continue + elif arch.startswith('any-'): + real_arch = arch.split('-')[1] + if real_arch in official_ports: + accepted_arches.append(real_arch) + continue + elif arch.endswith('-any'): + if arch.startswith('linux'): + return official_ports + return accepted_arches diff --git a/errata/sources/distros/rocky.py b/errata/sources/distros/rocky.py new file mode 100644 index 00000000..693d7b0c --- /dev/null +++ b/errata/sources/distros/rocky.py @@ -0,0 +1,254 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json +import concurrent.futures +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential + +from django.db import connections +from django.db.utils import OperationalError + +from operatingsystems.utils import get_or_create_osrelease +from packages.models import Package +from packages.utils import parse_package_string, get_or_create_package +from patchman.signals import pbar_start, pbar_update +from util import get_url, fetch_content, info_message, error_message + + +def update_rocky_errata(concurrent_processing=True): + """ Update Rocky Linux errata + """ + rocky_errata_api_host = 'https://apollo.build.resf.org' + rocky_errata_api_url = '/api/v3/' + if check_rocky_errata_endpoint_health(rocky_errata_api_host): + advisories = fetch_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing) + process_rocky_errata(advisories, concurrent_processing) + + +def check_rocky_errata_endpoint_health(rocky_errata_api_host): + """ Check Rocky Linux errata endpoint health + """ + rocky_errata_healthcheck_path = '/_/healthz' + rocky_errata_healthcheck_url = rocky_errata_api_host + rocky_errata_healthcheck_path + headers = {'Accept': 'application/json'} + res = get_url(rocky_errata_healthcheck_url, headers=headers) + data = fetch_content(res, 'Rocky Linux Errata API healthcheck') + try: + health = json.loads(data) + if health.get('status') == 'ok': + s = f'Rocky Errata API healthcheck OK: {rocky_errata_healthcheck_url}' + info_message.send(sender=None, text=s) + return True + else: + s = f'Rocky Errata API healthcheck FAILED: {rocky_errata_healthcheck_url}' + error_message.send(sender=None, text=s) + return False + except Exception as e: + s = f'Rocky Errata API healthcheck exception occured: {rocky_errata_healthcheck_url}\n' + s += str(e) + error_message.send(sender=None, text=s) + return False + + +def fetch_rocky_advisories(rocky_errata_api_host, rocky_errata_api_url, concurrent_processing): + """ Fetch Rocky Linux advisories and return the list + """ + if concurrent_processing: + return fetch_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url) + else: + return fetch_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url) + + +def fetch_rocky_advisories_serially(rocky_errata_api_host, rocky_errata_api_url): + """ Fetch Rocky Linux advisories serially and return the list + """ + rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' + headers = {'Accept': 'application/json'} + page = 1 + pages = None + advisories = [] + params = {'page': 1, 'size': 100} + while True: + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = fetch_content(res, f'Rocky Advisories {page}{"/"+pages if pages else ""}') + advisories_dict = json.loads(data) + advisories += advisories_dict.get('advisories') + links = advisories_dict.get('links') + if page == 1: + last_link = links.get('last') + pages = last_link.split('=')[-1] + next_link = links.get('next') + if next_link: + rocky_errata_advisories_url = rocky_errata_api_host + next_link + params = {} + page += 1 + else: + break + return advisories + + +def fetch_rocky_advisories_concurrently(rocky_errata_api_host, rocky_errata_api_url): + """ Fetch Rocky Linux advisories concurrently and return the list + """ + rocky_errata_advisories_url = rocky_errata_api_host + rocky_errata_api_url + 'advisories/' + headers = {'Accept': 'application/json'} + advisories = [] + params = {'page': 1, 'size': 100} + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = fetch_content(res, 'Rocky Advisories Page 1') + advisories_dict = json.loads(data) + links = advisories_dict.get('links') + last_link = links.get('last') + pages = int(last_link.split('=')[-1]) + ptext = 'Fetching Rocky Advisories' + pbar_start.send(sender=None, ptext=ptext, plen=pages) + i = 0 + with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor: + futures = [executor.submit(get_rocky_advisory, rocky_errata_advisories_url, page) + for page in range(1, pages + 1)] + for future in concurrent.futures.as_completed(futures): + advisories += future.result() + i += 1 + pbar_update.send(sender=None, index=i + 1) + return advisories + + +def get_rocky_advisory(rocky_errata_advisories_url, page): + """ Fetch a single Rocky Linux advisory + """ + headers = {'Accept': 'application/json'} + params = {'page': page, 'size': 100} + res = get_url(rocky_errata_advisories_url, headers=headers, params=params) + data = res.content + advisories_dict = json.loads(data) + return advisories_dict.get('advisories') + + +def process_rocky_errata(advisories, concurrent_processing): + """ Process Rocky Linux Errata + """ + if concurrent_processing: + process_rocky_errata_concurrently(advisories) + else: + process_rocky_errata_serially(advisories) + + +def process_rocky_errata_serially(advisories): + """ Process Rocky Linux errata serially + """ + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) + for i, advisory in enumerate(advisories): + process_rocky_erratum(advisory) + pbar_update.send(sender=None, index=i + 1) + + +def process_rocky_errata_concurrently(advisories): + """ Process Rocky Linux errata concurrently + """ + connections.close_all() + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Rocky Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(process_rocky_erratum, advisory) for advisory in advisories] + for future in concurrent.futures.as_completed(futures): + i += 1 + pbar_update.send(sender=None, index=i + 1) + + +@retry( + retry=retry_if_exception_type(OperationalError), + stop=stop_after_attempt(5), + wait=wait_exponential(multiplier=1, min=2, max=15), +) +def process_rocky_erratum(advisory): + """ Process a single Rocky Linux erratum + """ + from errata.utils import get_or_create_erratum + try: + erratum_name = advisory.get('name') + e_type = advisory.get('kind').lower().replace(' ', '') + issue_date = advisory.get('published_at') + synopsis = advisory.get('synopsis') + e, created = get_or_create_erratum( + name=erratum_name, + e_type=e_type, + issue_date=issue_date, + synopsis=synopsis, + ) + add_rocky_erratum_references(e, advisory) + add_rocky_erratum_oses(e, advisory) + add_rocky_erratum_packages(e, advisory) + except Exception as exc: + error_message.send(sender=None, text=exc) + + +def add_rocky_erratum_references(e, advisory): + """ Add Rocky Linux errata references + """ + e.add_reference('Rocky Advisory', 'https://apollo.build.resf.org/{e.name}') + e.add_reference('Rocky Advisory', 'https://errata.rockylinux.org/{e.name}') + advisory_cves = advisory.get('cves') + for a_cve in advisory_cves: + cve_id = a_cve.get('cve') + e.add_cve(cve_id) + fixes = advisory.get('fixes') + for fix in fixes: + url = fix.get('source') + e.add_reference('Bug Report', url) + + +def add_rocky_erratum_oses(e, advisory): + """ Update OS Variant, OS Release and MachineArch for Rocky Linux errata + """ + affected_oses = advisory.get('affected_products') + for affected_os in affected_oses: + variant = affected_os.get('variant') + major_version = affected_os.get('major_version') + osrelease_name = f'{variant} {major_version}' + osrelease = get_or_create_osrelease(name=osrelease_name) + e.osreleases.add(osrelease) + + +def add_rocky_erratum_packages(e, advisory): + """ Parse and add packages for Rocky Linux errata + """ + from modules.utils import get_matching_modules + packages = advisory.get('packages') + fixed_packages = set() + for package in packages: + package_name = package.get('nevra') + if package_name: + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + p_type = Package.RPM + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) + module_name = package.get('module_name') + module_context = package.get('module_context') + module_stream = package.get('module_stream') + module_version = package.get('module_version') + if module_name and module_context and module_stream and module_version: + matching_modules = get_matching_modules( + module_name, + module_stream, + module_version, + module_context, + arch, + ) + for match in matching_modules: + match.packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) diff --git a/errata/sources/distros/ubuntu.py b/errata/sources/distros/ubuntu.py new file mode 100644 index 00000000..7f50962c --- /dev/null +++ b/errata/sources/distros/ubuntu.py @@ -0,0 +1,239 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import concurrent.futures +import csv +import os +import json +from io import StringIO +from urllib.parse import urlparse + +from django.db import connections + +from operatingsystems.models import OSRelease, OSVariant +from operatingsystems.utils import get_or_create_osrelease +from packages.models import Package +from packages.utils import get_or_create_package, parse_package_string, find_evr, get_matching_packages +from util import get_url, fetch_content, get_sha256, bunzip2, get_setting_of_type +from patchman.signals import error_message, pbar_start, pbar_update + + +def update_ubuntu_errata(concurrent_processing=False): + """ Update Ubuntu errata + """ + codenames = retrieve_ubuntu_codenames() + create_ubuntu_os_releases(codenames) + data = fetch_ubuntu_usn_db() + if data: + expected_checksum = fetch_ubuntu_usn_db_checksum() + actual_checksum = get_sha256(data) + if actual_checksum == expected_checksum: + parse_usn_data(data, concurrent_processing) + else: + e = 'Ubuntu USN DB checksum mismatch, skipping Ubuntu errata parsing\n' + e += f'{actual_checksum} (actual) != {expected_checksum} (expected)' + error_message.send(sender=None, text=e) + + +def fetch_ubuntu_usn_db(): + """ Fetch the Ubuntu USN database + """ + ubuntu_usn_db_json_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2' + res = get_url(ubuntu_usn_db_json_url) + return fetch_content(res, 'Fetching Ubuntu Errata') + + +def fetch_ubuntu_usn_db_checksum(): + """ Fetch the Ubuntu USN database checksum + """ + ubuntu_usn_db_checksum_url = 'https://usn.ubuntu.com/usn-db/database.json.bz2.sha256' + res = get_url(ubuntu_usn_db_checksum_url) + return fetch_content(res, 'Fetching Ubuntu Errata Checksum').decode().split()[0] + + +def parse_usn_data(data, concurrent_processing): + """ Parse the Ubuntu USN data + """ + accepted_releases = get_accepted_ubuntu_codenames() + extracted = bunzip2(data).decode() + advisories = json.loads(extracted) + if concurrent_processing: + parse_usn_data_concurrently(advisories, accepted_releases) + else: + parse_usn_data_serially(advisories, accepted_releases) + + +def parse_usn_data_serially(advisories, accepted_releases): + """ Parse the Ubuntu USN data serially + """ + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) + for i, (usn_id, advisory) in enumerate(advisories.items()): + process_usn(usn_id, advisory, accepted_releases) + pbar_update.send(sender=None, index=i + 1) + + +def parse_usn_data_concurrently(advisories, accepted_releases): + """ Parse the Ubuntu USN data concurrently + """ + connections.close_all() + elen = len(advisories) + pbar_start.send(sender=None, ptext=f'Processing {elen} Ubuntu Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(process_usn, usn_id, advisory, accepted_releases) + for usn_id, advisory in advisories.items()] + for future in concurrent.futures.as_completed(futures): + i += 1 + pbar_update.send(sender=None, index=i + 1) + + +def process_usn(usn_id, advisory, accepted_releases): + """ Process a single USN advisory + """ + from errata.utils import get_or_create_erratum + try: + affected_releases = advisory.get('releases', {}).keys() + if not release_is_affected(affected_releases, accepted_releases): + return + name = f'USN-{usn_id}' + issue_date = int(advisory.get('timestamp')) + synopsis = advisory.get('title') + e, created = get_or_create_erratum( + name=name, + e_type='security', + issue_date=issue_date, + synopsis=synopsis, + ) + add_ubuntu_erratum_osreleases( + e, + affected_releases, + accepted_releases, + ) + add_ubuntu_erratum_references(e, usn_id, advisory) + add_ubuntu_erratum_packages(e, advisory) + except Exception as exc: + error_message.send(sender=None, text=exc) + + +def add_ubuntu_erratum_osreleases(e, affected_releases, accepted_releases): + """ Add Ubuntu erratum OSReleases + """ + for release in affected_releases: + if release in accepted_releases: + osrelease = OSRelease.objects.get(codename=release) + e.osreleases.add(osrelease) + + +def release_is_affected(affected_releases, accepted_releases): + """ Check if release is affected by the erratum + """ + for release in affected_releases: + if release in accepted_releases: + return True + return False + + +def add_ubuntu_erratum_references(e, usn_id, advisory): + """ Add Ubuntu erratum references and CVEs + """ + usn_url = f'https://ubuntu.com/security/notices/USN-{usn_id}' + e.add_reference('USN', usn_url) + cve_ids = advisory.get('cves') + if cve_ids: + for cve_id in cve_ids: + if cve_id.startswith('CVE'): + e.add_cve(cve_id) + else: + e.add_reference('Link', cve_id) + + +def add_ubuntu_erratum_packages(e, advisory): + """ Add Ubuntu erratum packages + """ + affected_releases = advisory.get('releases') + p_type = Package.DEB + fixed_packages = set() + for release, packages in affected_releases.items(): + if release in get_accepted_ubuntu_codenames(): + arches = packages.get('archs') + if arches: + for arch, urls in arches.items(): + for url in urls.get('urls'): + path = urlparse(url).path + package_name = os.path.basename(path) + if package_name.endswith('.deb'): + name, epoch, ver, rel, dist, arch = parse_package_string(package_name) + fixed_package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + fixed_packages.add(fixed_package) + else: + binaries = packages.get('binaries') + allbinaries = packages.get('allbinaries') + for package_name, package_data in (binaries | allbinaries).items(): + # we don't know the architecture so this requires the packages to + # exist (e.g. on a host or a mirror) to be captured + epoch, ver, rel = find_evr(package_data.get('version')) + matching_packages = get_matching_packages( + name=package_name, + epoch=epoch, + version=ver, + release=rel, + p_type=p_type, + ) + for fixed_package in matching_packages: + fixed_packages.add(fixed_package) + e.add_fixed_packages(fixed_packages) + + +def get_accepted_ubuntu_codenames(): + """ Get acceptable Ubuntu OS codenames + Can be overridden by specifying UBUNTU_CODENAMES in settings + """ + default_codenames = ['focal', 'jammy', 'noble'] + accepted_codenames = get_setting_of_type( + setting_name='UBUNTU_CODENAMES', + setting_type=list, + default=default_codenames, + ) + return accepted_codenames + + +def retrieve_ubuntu_codenames(): + """ Returns the codename to version mapping + """ + distro_info_url = 'https://debian.pages.debian.net/distro-info-data/ubuntu.csv' + res = get_url(distro_info_url) + ubuntu_csv = fetch_content(res, 'Fetching Ubuntu distro data') + reader = csv.DictReader(StringIO(ubuntu_csv.decode())) + codename_to_version = {} + for row in reader: + version = row.get('version') + series = row.get('series') + codename_to_version[series] = version + return codename_to_version + + +def create_ubuntu_os_releases(codename_to_version): + """ Create OSReleases for acceptable Ubuntu codenames + """ + accepted_codenames = get_accepted_ubuntu_codenames() + for codename, version in codename_to_version.items(): + if codename in accepted_codenames: + osrelease_name = f'Ubuntu {version}' + osrelease = get_or_create_osrelease(name=osrelease_name, codename=codename) + for osvariant in OSVariant.objects.filter(name__startswith=osrelease_name.replace(' LTS', '')): + osvariant.osrelease = osrelease + osvariant.save() diff --git a/errata/sources/repos/__init__.py b/errata/sources/repos/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/errata/sources/repos/yum.py b/errata/sources/repos/yum.py new file mode 100644 index 00000000..dfeed879 --- /dev/null +++ b/errata/sources/repos/yum.py @@ -0,0 +1,231 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from errata.sources.distros.arch import update_arch_errata +from errata.sources.distros.alma import update_alma_errata +from errata.sources.distros.debian import update_debian_errata +from errata.sources.distros.centos import update_centos_errata +from errata.sources.distros.rocky import update_rocky_errata +from errata.sources.distros.ubuntu import update_ubuntu_errata +from patchman.signals import error_message +from repos.models import Repository +from security.tasks import update_cves, update_cwes +from util import get_setting_of_type + + +@shared_task +def update_yum_repo_errata(repo_id=None, force=False): + """ Update all yum repos errata + """ + if repo_id: + repo = Repository.objects.get(id=repo_id) + repo.refresh_errata(force) + else: + for repo in Repository.objects.filter(repotype=Repository.RPM): + repo.refresh_errata(force) + + +@shared_task +def update_errata(erratum_type=None, force=False, repo=None): + """ Update all distros errata + """ + errata_os_updates = [] + erratum_types = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian', 'centos'] + erratum_type_defaults = ['yum', 'rocky', 'alma', 'arch', 'ubuntu', 'debian'] + if erratum_type: + if erratum_type not in erratum_types: + error_message.send(sender=None, text=f'Erratum type `{erratum_type}` not in {erratum_types}') + else: + errata_os_updates = erratum_type + else: + errata_os_updates = get_setting_of_type( + setting_name='ERRATA_OS_UPDATES', + setting_type=list, + default=erratum_type_defaults, + ) + if 'yum' in errata_os_updates: + update_yum_repo_errata(repo_id=repo, force=force) + if 'arch' in errata_os_updates: + update_arch_errata() + if 'alma' in errata_os_updates: + update_alma_errata() + if 'rocky' in errata_os_updates: + update_rocky_errata() + if 'debian' in errata_os_updates: + update_debian_errata() + if 'ubuntu' in errata_os_updates: + update_ubuntu_errata() + if 'centos' in errata_os_updates: + update_centos_errata() + + +@shared_task +def update_errata_and_cves(): + """ Task to update all errata + """ + update_errata.delay() + update_cves.delay() + update_cwes.delay() diff --git a/errata/templates/errata/erratum_detail.html b/errata/templates/errata/erratum_detail.html new file mode 100644 index 00000000..4738154e --- /dev/null +++ b/errata/templates/errata/erratum_detail.html @@ -0,0 +1,86 @@ +{% extends "base.html" %} + +{% block page_title %}Erratum - {{ erratum }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Errata
  • {{ erratum }}
  • {% endblock %} + +{% block content_title %} Erratum - {{ erratum }} {% endblock %} + +{% block content %} + + + +
    +
    +
    + + + + + + + + + + + + + + + + + + + +
    Name {{ erratum.name }}
    Type {{ erratum.e_type }}
    Published Date{{ erratum.issue_date|date|default_if_none:'' }}
    Synopsis {{ erratum.synopsis }}
    Packages Affected {{ erratum.affected_packages.count }}
    Packages Fixed {{ erratum.fixed_packages.count }}
    OS Releases Affected + {% for osrelease in erratum.osreleases.all %} + {{ osrelease }}
    + {% endfor %} +
    CVEs + + {% for cve in erratum.cves.all %} + + {% endfor %} +
    {{ cve }}
    +
    References + + + {% for reference in erratum.references.all %} + + + + + {% endfor %} +
    osv.devhttps://osv.dev/vulnerability/{{ erratum.name }}
    {{ reference.ref_type }}{{reference.url}}
    +
    +
    +
    +
    +
    +
    + {% for package in erratum.affected_packages.all %} + + {{ package }} + + {% endfor %} +
    +
    +
    +
    +
    +
    + {% for package in erratum.fixed_packages.all %} + + {{ package }} + + {% endfor %} +
    +
    +
    +
    + +{% endblock %} diff --git a/errata/templates/errata/erratum_list.html b/errata/templates/errata/erratum_list.html new file mode 100644 index 00000000..ef732386 --- /dev/null +++ b/errata/templates/errata/erratum_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}Errata{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Errata
  • {% endblock %} + +{% block content_title %} Errata {% endblock %} diff --git a/errata/templates/errata/erratum_table.html b/errata/templates/errata/erratum_table.html new file mode 100644 index 00000000..c319cbb5 --- /dev/null +++ b/errata/templates/errata/erratum_table.html @@ -0,0 +1,31 @@ +{% load common %} + + + + + + + + + + + + + + + + {% for erratum in object_list %} + + + + + + + + + + + + {% endfor %} + +
    IDTypePublished DateSynopsisPackages AffectedPackages FixedOS Releases AffectedCVEsReferences
    {{ erratum.name }}{{ erratum.e_type }}{{ erratum.issue_date|date|default_if_none:'' }}{{ erratum.synopsis }}{% with count=erratum.affected_packages.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.fixed_packages.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.osreleases.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.cves.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=erratum.references.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}
    diff --git a/errata/urls.py b/errata/urls.py new file mode 100644 index 00000000..6ec1cac0 --- /dev/null +++ b/errata/urls.py @@ -0,0 +1,26 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.urls import path + +from errata import views + +app_name = 'errata' + +urlpatterns = [ + path('', views.erratum_list, name='erratum_list'), + path('errata//', views.erratum_detail, name='erratum_detail'), +] diff --git a/errata/utils.py b/errata/utils.py new file mode 100644 index 00000000..d8099db4 --- /dev/null +++ b/errata/utils.py @@ -0,0 +1,101 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import concurrent.futures + +from django.db import connections + +from util import tz_aware_datetime +from errata.models import Erratum +from packages.models import PackageUpdate +from patchman.signals import pbar_start, pbar_update, warning_message + + +def get_or_create_erratum(name, e_type, issue_date, synopsis): + """ Get or create an Erratum object. Returns the object and created + """ + try: + e = Erratum.objects.get(name=name) + issue_date_tz = tz_aware_datetime(issue_date) + # if it's +/- 1 day we don't update it, just use whichever was the first one + # different sources are generated at different times + # e.g. yum updateinfo vs website errata info + days_delta = abs(e.issue_date.date() - issue_date_tz.date()).days + updated = False + if e.e_type != e_type: + warning_message.send(sender=None, text=f'Updating {name} type `{e.e_type}` -> `{e_type}`') + e.e_type = e_type + updated = True + if days_delta > 1: + text = f'Updating {name} issue date `{e.issue_date.date()}` -> `{issue_date_tz.date()}`' + warning_message.send(sender=None, text=text) + e.issue_date = issue_date_tz + updated = True + if e.synopsis != synopsis: + warning_message.send(sender=None, text=f'Updating {name} synopsis `{e.synopsis}` -> `{synopsis}`') + e.synopsis = synopsis + updated = True + if updated: + e.save() + created = False + except Erratum.DoesNotExist: + e, created = Erratum.objects.get_or_create( + name=name, + e_type=e_type, + issue_date=tz_aware_datetime(issue_date), + synopsis=synopsis, + ) + return e, created + + +def mark_errata_security_updates(): + """ For each set of erratum packages, modify any PackageUpdate that + should be marked as a security update. + """ + connections.close_all() + elen = Erratum.objects.count() + pbar_start.send(sender=None, ptext=f'Scanning {elen} Errata for security updates', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(e.scan_for_security_updates) for e in Erratum.objects.all()] + for future in concurrent.futures.as_completed(futures): + pbar_update.send(sender=None, index=i + 1) + i += 1 + + +def scan_package_updates_for_affected_packages(): + """ Scan PackageUpdates for packages affected by errata + """ + plen = PackageUpdate.objects.count() + pbar_start.send(sender=None, ptext=f'Scanning {plen} Updates for affected packages', plen=plen) + for i, pu in enumerate(PackageUpdate.objects.all()): + pbar_update.send(sender=None, index=i + 1) + for e in pu.newpackage.provides_fix_in_erratum.all(): + e.affected_packages.add(pu.oldpackage) + + +def enrich_errata(): + """ Enrich Errata with data from osv.dev + """ + connections.close_all() + elen = Erratum.objects.count() + pbar_start.send(sender=None, ptext=f'Adding osv.dev data to {elen} Errata', plen=elen) + i = 0 + with concurrent.futures.ProcessPoolExecutor(max_workers=25) as executor: + futures = [executor.submit(e.fetch_osv_dev_data) for e in Erratum.objects.all()] + for future in concurrent.futures.as_completed(futures): + pbar_update.send(sender=None, index=i + 1) + i += 1 diff --git a/errata/views.py b/errata/views.py new file mode 100644 index 00000000..42d12f71 --- /dev/null +++ b/errata/views.py @@ -0,0 +1,101 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.shortcuts import get_object_or_404, render +from django.contrib.auth.decorators import login_required +from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger +from django.db.models import Q + +from rest_framework import viewsets + +from operatingsystems.models import OSRelease +from errata.models import Erratum +from errata.serializers import ErratumSerializer +from util.filterspecs import Filter, FilterBar + + +@login_required +def erratum_list(request): + errata = Erratum.objects.select_related() + + if 'e_type' in request.GET: + errata = errata.filter(e_type=request.GET['e_type']).distinct() + + if 'reference_id' in request.GET: + errata = errata.filter(references=request.GET['reference_id']) + + if 'cve_id' in request.GET: + errata = errata.filter(cves__cve_id=request.GET['cve_id']) + + if 'package_id' in request.GET: + if request.GET['type'] == 'affected': + errata = errata.filter(affected_packages=request.GET['package_id']) + elif request.GET['type'] == 'fixed': + errata = errata.filter(fixed_packages=request.GET['package_id']) + + if 'osrelease_id' in request.GET: + errata = errata.filter(osreleases=request.GET['osrelease_id']) + + if 'host' in request.GET: + errata = errata.filter(host__hostname=request.GET['host']) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(name__icontains=term) | Q(synopsis__icontains=term) + query = query & q + errata = errata.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(errata, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append(Filter(request, 'Erratum Type', 'e_type', + Erratum.objects.values_list('e_type', flat=True).distinct())) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', + OSRelease.objects.filter(erratum__in=errata))) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'errata/erratum_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) + + +@login_required +def erratum_detail(request, erratum_name): + erratum = get_object_or_404(Erratum, name=erratum_name) + return render(request, + 'errata/erratum_detail.html', + {'erratum': erratum}) + + +class ErratumViewSet(viewsets.ModelViewSet): + """ API endpoint that allows errata to be viewed or edited. + """ + queryset = Erratum.objects.all() + serializer_class = ErratumSerializer diff --git a/etc/patchman/celery.conf b/etc/patchman/celery.conf new file mode 100644 index 00000000..7afc96ee --- /dev/null +++ b/etc/patchman/celery.conf @@ -0,0 +1,2 @@ +REDIS_HOST=127.0.0.1 +REDIS_PORT=6379 diff --git a/etc/patchman/local_settings.py b/etc/patchman/local_settings.py index 9b9430e2..181c4c4d 100644 --- a/etc/patchman/local_settings.py +++ b/etc/patchman/local_settings.py @@ -8,8 +8,12 @@ DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.sqlite3', +# 'ENGINE': 'django.db.backends.sqlite3', # noqa disabled until django 5.1 is in use, see https://blog.pecar.me/django-sqlite-dblock + 'ENGINE': 'patchman.sqlite3', 'NAME': '/var/lib/patchman/db/patchman.db', + 'OPTIONS': { + 'timeout': 30 + } } } @@ -29,21 +33,58 @@ ALLOWED_HOSTS = ['127.0.0.1', '*'] # Maximum number of mirrors to add or refresh per repo -MAX_MIRRORS = 5 +MAX_MIRRORS = 2 + +# Maximum number of failures before disabling a mirror, set to -1 to never disable mirrors +MAX_MIRROR_FAILURES = 14 -# Number of days to wait before notifying users that a host has not reported +# Number of days to wait before raising that a host has not reported DAYS_WITHOUT_REPORT = 14 # Whether to run patchman under the gunicorn web server RUN_GUNICORN = False -# Enable memcached CACHES = { 'default': { - 'BACKEND': 'django.core.cache.backends.memcached.PyMemcacheCache', - 'LOCATION': '127.0.0.1:11211', - 'OPTIONS': { - 'ignore_exc': True, - }, + 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', } } + +# Uncomment to enable redis caching for e.g. 30 seconds +# Note that the UI results may be out of date for this amount of time +# CACHES = { +# 'default': { +# 'BACKEND': 'django.core.cache.backends.redis.RedisCache', +# 'LOCATION': 'redis://127.0.0.1:6379', +# 'TIMEOUT': 30, +# } +# } + +from datetime import timedelta # noqa +from celery.schedules import crontab # noqa +CELERY_BEAT_SCHEDULE = { + 'process_all_unprocessed_reports': { + 'task': 'reports.tasks.process_reports', + 'schedule': crontab(minute='*/5'), + }, + 'refresh_repos_daily': { + 'task': 'repos.tasks.refresh_repos', + 'schedule': crontab(hour=4, minute=00), + }, + 'update_errata_cves_cwes_every_12_hours': { + 'task': 'errata.tasks.update_errata_and_cves', + 'schedule': timedelta(hours=12), + }, + 'run_database_maintenance_daily': { + 'task': 'util.tasks.clean_database', + 'schedule': crontab(hour=6, minute=00), + }, + 'remove_old_reports': { + 'task': 'reports.tasks.remove_reports_with_no_hosts', + 'schedule': timedelta(days=7), + }, + 'find_host_updates': { + 'task': 'hosts.tasks.find_all_host_updates_homogenous', + 'schedule': timedelta(hours=24), + }, +} diff --git a/etc/systemd/system/patchman-celery.service b/etc/systemd/system/patchman-celery.service new file mode 100644 index 00000000..6408d818 --- /dev/null +++ b/etc/systemd/system/patchman-celery.service @@ -0,0 +1,14 @@ +[Unit] +Description=Patchman Celery Service +Requires=network-online.target +After=network-onlne.target + +[Service] +Type=simple +User=patchman-celery +Group=patchman-celery +EnvironmentFile=/etc/patchman/celery.conf +ExecStart=/usr/bin/celery --broker redis://${REDIS_HOST}:${REDIS_PORT}/0 --app patchman worker --loglevel info --beat --scheduler django_celery_beat.schedulers:DatabaseScheduler --task-events --pool threads + +[Install] +WantedBy=multi-user.target diff --git a/hooks/yum/patchman.py b/hooks/yum/patchman.py index c59af372..343144eb 100644 --- a/hooks/yum/patchman.py +++ b/hooks/yum/patchman.py @@ -27,5 +27,5 @@ def posttrans_hook(conduit): 'servicecmd', '/usr/sbin/patchman-client') args = '-n' - command = f'{servicecmd!s} {args!s}> /dev/null' + command = f'{servicecmd} {args}> /dev/null' os.system(command) diff --git a/hooks/zypper/patchman.py b/hooks/zypper/patchman.py index 3d8f5da9..14781565 100755 --- a/hooks/zypper/patchman.py +++ b/hooks/zypper/patchman.py @@ -25,24 +25,24 @@ class MyPlugin(Plugin): - def PLUGINBEGIN(self, headers, body): + def PLUGINBEGIN(self, headers, body): # noqa logging.info('PLUGINBEGIN') - logging.debug(f'headers: {headers!s}') + logging.debug(f'headers: {headers}') self.ack() - def PACKAGESETCHANGED(self, headers, body): + def PACKAGESETCHANGED(self, headers, body): # noqa logging.info('PACKAGESETCHANGED') - logging.debug(f'headers: {headers!s}') + logging.debug(f'headers: {headers}') print('Sending report to patchman server...') servicecmd = '/usr/sbin/patchman-client' args = '-n' - command = f'{servicecmd!s} {args!s}> /dev/null' + command = f'{servicecmd} {args}> /dev/null' os.system(command) self.ack() - def PLUGINEND(self, headers, body): + def PLUGINEND(self, headers, body): # noqa logging.info('PLUGINEND') - logging.debug(f'headers: {headers!s}') + logging.debug(f'headers: {headers}') self.ack() diff --git a/hosts/forms.py b/hosts/forms.py index 115e23b1..931de03c 100644 --- a/hosts/forms.py +++ b/hosts/forms.py @@ -33,7 +33,7 @@ class Meta: fields = ('hostname', 'reversedns', 'ipaddress', - 'os', + 'osvariant', 'kernel', 'arch', 'reboot_required', diff --git a/hosts/migrations/0001_initial.py b/hosts/migrations/0001_initial.py index f8f8d45c..43366684 100644 --- a/hosts/migrations/0001_initial.py +++ b/hosts/migrations/0001_initial.py @@ -3,7 +3,11 @@ from django.db import migrations, models import django.db.models.deletion import django.utils.timezone -import tagging.fields +try: + import tagging.fields + has_tagging = True +except ImportError: + has_tagging = False class Migration(migrations.Migration): @@ -13,22 +17,25 @@ class Migration(migrations.Migration): dependencies = [ ] + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('hostname', models.CharField(max_length=255, unique=True)), + ('ipaddress', models.GenericIPAddressField()), + ('reversedns', models.CharField(blank=True, max_length=255, null=True)), + ('check_dns', models.BooleanField(default=False)), + ('kernel', models.CharField(max_length=255)), + ('lastreport', models.DateTimeField()), + ('reboot_required', models.BooleanField(default=False)), + ('host_repos_only', models.BooleanField(default=True)), + ('updated_at', models.DateTimeField(default=django.utils.timezone.now)), + ] + if has_tagging: + fields.append(('tags', tagging.fields.TagField(blank=True, max_length=255))) + operations = [ migrations.CreateModel( name='Host', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('hostname', models.CharField(max_length=255, unique=True)), - ('ipaddress', models.GenericIPAddressField()), - ('reversedns', models.CharField(blank=True, max_length=255, null=True)), - ('check_dns', models.BooleanField(default=False)), - ('kernel', models.CharField(max_length=255)), - ('lastreport', models.DateTimeField()), - ('reboot_required', models.BooleanField(default=False)), - ('host_repos_only', models.BooleanField(default=True)), - ('tags', tagging.fields.TagField(blank=True, max_length=255)), - ('updated_at', models.DateTimeField(default=django.utils.timezone.now)), - ], + fields=fields, options={ 'verbose_name': 'Host', 'verbose_name_plural': 'Hosts', diff --git a/hosts/migrations/0004_remove_host_tags_host_tags.py b/hosts/migrations/0004_remove_host_tags_host_tags.py new file mode 100644 index 00000000..84e7affe --- /dev/null +++ b/hosts/migrations/0004_remove_host_tags_host_tags.py @@ -0,0 +1,44 @@ +# Generated by Django 4.2.18 on 2025-02-04 23:37 + +from django.apps import apps +from django.db import migrations +import taggit.managers +try: + import tagging # noqa +except ImportError: + pass + + +def check_tagging_tag_field_exists(app_label, model_name, field_name): + Model = apps.get_model(app_label, model_name) + fields = Model._meta.get_fields() + for field in fields: + if field.name == field_name and 'tagging' in str(field.related_model): + return True + return False + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0005_auto_20220424_2025'), + ('hosts', '0003_host_modules'), + ] + + if check_tagging_tag_field_exists('hosts', 'Host', 'tags'): + operations = [ + migrations.RemoveField( + model_name='host', + name='tags', + ) + ] + else: + operations = [] + + operations.append( + migrations.AddField( + model_name='host', + name='tags', + field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'), + ) + ) diff --git a/hosts/migrations/0005_rename_os_host_osvariant.py b/hosts/migrations/0005_rename_os_host_osvariant.py new file mode 100644 index 00000000..821c3224 --- /dev/null +++ b/hosts/migrations/0005_rename_os_host_osvariant.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('hosts', '0004_remove_host_tags_host_tags'), + ] + + operations = [ + migrations.RenameField( + model_name='host', + old_name='os', + new_name='osvariant', + ), + ] diff --git a/hosts/migrations/0006_migrate_to_tz_aware.py b/hosts/migrations/0006_migrate_to_tz_aware.py new file mode 100644 index 00000000..e36bbf1f --- /dev/null +++ b/hosts/migrations/0006_migrate_to_tz_aware.py @@ -0,0 +1,21 @@ +from django.db import migrations +from django.utils import timezone + +def make_datetimes_tz_aware(apps, schema_editor): + Host = apps.get_model('hosts', 'Host') + for host in Host.objects.all(): + if host.lastreport and timezone.is_naive(host.lastreport): + host.lastreport = timezone.make_aware(host.lastreport, timezone=timezone.get_default_timezone()) + host.save() + if host.updated_at and timezone.is_naive(host.updated_at): + host.updated_at = timezone.make_aware(host.updated_at, timezone=timezone.get_default_timezone()) + host.save() + +class Migration(migrations.Migration): + dependencies = [ + ('hosts', '0005_rename_os_host_osvariant'), + ] + + operations = [ + migrations.RunPython(make_datetimes_tz_aware, migrations.RunPython.noop), + ] diff --git a/hosts/migrations/0007_alter_host_tags.py b/hosts/migrations/0007_alter_host_tags.py new file mode 100644 index 00000000..3858b847 --- /dev/null +++ b/hosts/migrations/0007_alter_host_tags.py @@ -0,0 +1,20 @@ +# Generated by Django 4.2.19 on 2025-02-28 19:53 + +from django.db import migrations +import taggit.managers + + +class Migration(migrations.Migration): + + dependencies = [ + ('taggit', '0005_auto_20220424_2025'), + ('hosts', '0006_migrate_to_tz_aware'), + ] + + operations = [ + migrations.AlterField( + model_name='host', + name='tags', + field=taggit.managers.TaggableManager(blank=True, help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'), + ), + ] diff --git a/hosts/migrations/0008_alter_host_options.py b/hosts/migrations/0008_alter_host_options.py new file mode 100644 index 00000000..0952cf19 --- /dev/null +++ b/hosts/migrations/0008_alter_host_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('hosts', '0007_alter_host_tags'), + ] + + operations = [ + migrations.AlterModelOptions( + name='host', + options={'ordering': ['hostname'], 'verbose_name': 'Host', 'verbose_name_plural': 'Hosts'}, + ), + ] diff --git a/hosts/migrations/0009_host_errata.py b/hosts/migrations/0009_host_errata.py new file mode 100644 index 00000000..0f6dd1d4 --- /dev/null +++ b/hosts/migrations/0009_host_errata.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.19 on 2025-03-10 19:52 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('errata', '0006_alter_erratum_options'), + ('hosts', '0008_alter_host_options'), + ] + + operations = [ + migrations.AddField( + model_name='host', + name='errata', + field=models.ManyToManyField(blank=True, to='errata.erratum'), + ), + ] diff --git a/hosts/models.py b/hosts/models.py index b474619b..e207348b 100644 --- a/hosts/models.py +++ b/hosts/models.py @@ -15,7 +15,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.db import models, IntegrityError, DatabaseError, transaction +from django.db import models from django.db.models import Q from django.urls import reverse from django.utils import timezone @@ -24,29 +24,29 @@ from version_utils.rpm import labelCompare except ImportError: from rpm import labelCompare -from tagging.fields import TagField +from taggit.managers import TaggableManager -from packages.models import Package, PackageUpdate -from domains.models import Domain -from repos.models import Repository -from operatingsystems.models import OS from arch.models import MachineArchitecture +from domains.models import Domain +from errata.models import Erratum +from hosts.utils import update_rdns from modules.models import Module -from patchman.signals import info_message, error_message +from operatingsystems.models import OSVariant +from packages.models import Package, PackageUpdate from packages.utils import get_or_create_package_update +from patchman.signals import info_message +from repos.models import Repository from repos.utils import find_best_repo from hosts.managers import HostManager -from hosts.utils import update_rdns, remove_reports class Host(models.Model): - objects = HostManager() hostname = models.CharField(max_length=255, unique=True) ipaddress = models.GenericIPAddressField() reversedns = models.CharField(max_length=255, blank=True, null=True) check_dns = models.BooleanField(default=False) - os = models.ForeignKey(OS, on_delete=models.CASCADE) + osvariant = models.ForeignKey(OSVariant, on_delete=models.CASCADE) kernel = models.CharField(max_length=255) arch = models.ForeignKey(MachineArchitecture, on_delete=models.CASCADE) domain = models.ForeignKey(Domain, on_delete=models.CASCADE) @@ -57,13 +57,16 @@ class Host(models.Model): updates = models.ManyToManyField(PackageUpdate, blank=True) reboot_required = models.BooleanField(default=False) host_repos_only = models.BooleanField(default=True) - tags = TagField() + tags = TaggableManager(blank=True) updated_at = models.DateTimeField(default=timezone.now) + errata = models.ManyToManyField(Erratum, blank=True) + + objects = HostManager() class Meta: verbose_name = 'Host' verbose_name_plural = 'Hosts' - ordering = ('hostname',) + ordering = ['hostname'] def __str__(self): return self.hostname @@ -71,21 +74,21 @@ def __str__(self): def show(self): """ Show info about this host """ - text = f'{self!s}:\n' - text += f'IP address : {self.ipaddress!s}\n' - text += f'Reverse DNS : {self.reversedns!s}\n' - text += f'Domain : {self.domain!s}\n' - text += f'OS : {self.os!s}\n' - text += f'Kernel : {self.kernel!s}\n' - text += f'Architecture : {self.arch!s}\n' - text += f'Last report : {self.lastreport!s}\n' - text += f'Packages : {self.get_num_packages()!s}\n' - text += f'Repos : {self.get_num_repos()!s}\n' - text += f'Updates : {self.get_num_updates()!s}\n' - text += f'Tags : {self.tags!s}\n' - text += f'Needs reboot : {self.reboot_required!s}\n' - text += f'Updated at : {self.updated_at!s}\n' - text += f'Host repos : {self.host_repos_only!s}\n' + text = f'{self}:\n' + text += f'IP address : {self.ipaddress}\n' + text += f'Reverse DNS : {self.reversedns}\n' + text += f'Domain : {self.domain}\n' + text += f'OS Variant : {self.osvariant}\n' + text += f'Kernel : {self.kernel}\n' + text += f'Architecture : {self.arch}\n' + text += f'Last report : {self.lastreport}\n' + text += f'Packages : {self.get_num_packages()}\n' + text += f'Repos : {self.get_num_repos()}\n' + text += f'Updates : {self.get_num_updates()}\n' + text += f'Tags : {self.tags}\n' + text += f'Needs reboot : {self.reboot_required}\n' + text += f'Updated at : {self.updated_at}\n' + text += f'Host repos : {self.host_repos_only}\n' info_message.send(sender=None, text=text) @@ -114,14 +117,21 @@ def check_rdns(self): info_message.send(sender=None, text='Reverse DNS matches') else: text = 'Reverse DNS mismatch found: ' - text += f'{self.hostname!s} != {self.reversedns!s}' + text += f'{self.hostname} != {self.reversedns}' info_message.send(sender=None, text=text) else: - info_message.send(sender=None, - text='Reverse DNS check disabled') + info_message.send(sender=None, text='Reverse DNS check disabled') - def clean_reports(self, timestamp): - remove_reports(self, timestamp) + def clean_reports(self): + """ Remove all but the last 3 reports for a host + """ + from reports.models import Report + reports = Report.objects.filter(host=self).order_by('-created')[3:] + rlen = reports.count() + for report in Report.objects.filter(host=self).order_by('-created')[3:]: + report.delete() + if rlen > 0: + info_message.send(sender=None, text=f'{self.hostname}: removed {rlen} old reports') def get_host_repo_packages(self): if self.host_repos_only: @@ -131,7 +141,7 @@ def get_host_repo_packages(self): mirror__repo__hostrepo__enabled=True) else: hostrepos_q = \ - Q(mirror__repo__osgroup__os__host=self, + Q(mirror__repo__osrelease__osvariant__host=self, mirror__repo__arch=self.arch, mirror__enabled=True, mirror__repo__enabled=True) | \ @@ -144,28 +154,17 @@ def process_update(self, package, highest_package): if self.host_repos_only: host_repos = Q(repo__host=self) else: - host_repos = \ - Q(repo__osgroup__os__host=self, repo__arch=self.arch) | \ - Q(repo__host=self) + host_repos = Q(repo__osrelease__osvariant__host=self, repo__arch=self.arch) | Q(repo__host=self) mirrors = highest_package.mirror_set.filter(host_repos) security = False - # if any of the containing repos are security, mark the update as - # security + # if any of the containing repos are security, mark the update as a security update for mirror in mirrors: if mirror.repo.security: security = True - update = get_or_create_package_update(oldpackage=package, - newpackage=highest_package, - security=security) - try: - with transaction.atomic(): - self.updates.add(update) - info_message.send(sender=None, text=f'{update!s}') - return update.id - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + update = get_or_create_package_update(oldpackage=package, newpackage=highest_package, security=security) + self.updates.add(update) + info_message.send(sender=None, text=f'{update}') + return update.id def find_updates(self): @@ -196,14 +195,11 @@ def find_updates(self): kernel_packages = self.packages.filter(kernels_q) if self.host_repos_only: - update_ids = self.find_host_repo_updates(host_packages, - repo_packages) + update_ids = self.find_host_repo_updates(host_packages, repo_packages) else: - update_ids = self.find_osgroup_repo_updates(host_packages, - repo_packages) + update_ids = self.find_osrelease_repo_updates(host_packages, repo_packages) - kernel_update_ids = self.find_kernel_updates(kernel_packages, - repo_packages) + kernel_update_ids = self.find_kernel_updates(kernel_packages, repo_packages) for ku_id in kernel_update_ids: update_ids.append(ku_id) @@ -228,10 +224,13 @@ def find_host_repo_updates(self, host_packages, repo_packages): priority = best_repo.priority # find the packages that are potential updates - pu_q = Q(name=package.name, - arch=package.arch, - packagetype=package.packagetype) - potential_updates = repo_packages.filter(pu_q) + pu_q = Q( + name=package.name, + arch=package.arch, + packagetype=package.packagetype, + category=package.category, + ) + potential_updates = repo_packages.filter(pu_q).exclude(version__startswith='9999') for pu in potential_updates: pu_is_module_package = False pu_in_enabled_modules = False @@ -243,36 +242,39 @@ def find_host_repo_updates(self, host_packages, repo_packages): if pu_is_module_package: if not pu_in_enabled_modules: continue - if highest_package.compare_version(pu) == -1 \ - and package.compare_version(pu) == -1: - - if priority is not None: - # proceed only if the package is from a repo with a - # priority and that priority is >= the repo priority - pu_best_repo = find_best_repo(pu, hostrepos) - if pu_best_repo: - pu_priority = pu_best_repo.priority - if pu_priority >= priority: - highest_package = pu - else: - highest_package = pu + if package.compare_version(pu) == -1: + # package updates that are fixed by erratum (may already be superceded by another update) + errata = pu.provides_fix_in_erratum.all() + if errata: + for erratum in errata: + self.errata.add(erratum) + if highest_package.compare_version(pu) == -1: + if priority is not None: + # proceed only if the package is from a repo with a + # priority and that priority is >= the repo priority + pu_best_repo = find_best_repo(pu, hostrepos) + if pu_best_repo: + pu_priority = pu_best_repo.priority + if pu_priority >= priority: + highest_package = pu + else: + highest_package = pu if highest_package != package: uid = self.process_update(package, highest_package) if uid is not None: update_ids.append(uid) - return update_ids - def find_osgroup_repo_updates(self, host_packages, repo_packages): + def find_osrelease_repo_updates(self, host_packages, repo_packages): update_ids = [] - for package in host_packages: highest_package = package # find the packages that are potential updates - pu_q = Q(name=package.name, arch=package.arch, + pu_q = Q(name=package.name, + arch=package.arch, packagetype=package.packagetype) potential_updates = repo_packages.filter(pu_q) for pu in potential_updates: @@ -286,15 +288,19 @@ def find_osgroup_repo_updates(self, host_packages, repo_packages): if pu_is_module_package: if not pu_in_enabled_modules: continue - if highest_package.compare_version(pu) == -1 \ - and package.compare_version(pu) == -1: - highest_package = pu + if package.compare_version(pu) == -1: + # package updates that are fixed by erratum (may already be superceded by another update) + errata = pu.provides_fix_in_erratum.all() + if errata: + for erratum in errata: + self.errata.add(erratum) + if highest_package.compare_version(pu) == -1: + highest_package = pu if highest_package != package: uid = self.process_update(package, highest_package) if uid is not None: update_ids.append(uid) - return update_ids def check_if_reboot_required(self, host_highest): @@ -306,11 +312,11 @@ def check_if_reboot_required(self, host_highest): self.reboot_required = True else: self.reboot_required = False + self.save() def find_kernel_updates(self, kernel_packages, repo_packages): update_ids = [] - for package in kernel_packages: host_highest = package repo_highest = package @@ -334,13 +340,6 @@ def find_kernel_updates(self, kernel_packages, repo_packages): update_ids.append(uid) self.check_if_reboot_required(host_highest) - - try: - with transaction.atomic(): - self.save() - except DatabaseError as e: - error_message.send(sender=None, text=e) - return update_ids @@ -351,7 +350,7 @@ class HostRepo(models.Model): priority = models.IntegerField(default=0) class Meta: - unique_together = ('host', 'repo') + unique_together = ['host', 'repo'] def __str__(self): - return f'{self.host!s}-{self.repo!s}' + return f'{self.host}-{self.repo}' diff --git a/hosts/tasks.py b/hosts/tasks.py new file mode 100755 index 00000000..2fdce96f --- /dev/null +++ b/hosts/tasks.py @@ -0,0 +1,81 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from django.db.models import Count + +from hosts.models import Host +from util import get_datetime_now +from patchman.signals import info_message + + +@shared_task +def find_host_updates(host_id): + """ Task to find updates for a host + """ + host = Host.objects.get(id=host_id) + host.find_updates() + + +@shared_task +def find_all_host_updates(): + """ Task to find updates for all hosts + """ + for host in Host.objects.all(): + find_host_updates.delay(host.id) + + +@shared_task +def find_all_host_updates_homogenous(): + """ Task to find updates for all hosts where hosts are expected to be homogenous + """ + updated_hosts = [] + ts = get_datetime_now() + for host in Host.objects.all(): + if host not in updated_hosts: + host.find_updates() + host.updated_at = ts + host.save() + + # only include hosts with the exact same number of packages + filtered_hosts = Host.objects.annotate( + packages_count=Count('packages')).filter( + packages_count=host.packages.count() + ) + # and exclude hosts with the current timestamp + filtered_hosts = filtered_hosts.exclude(updated_at=ts) + + packages = set(host.packages.all()) + repos = set(host.repos.all()) + updates = host.updates.all() + + phosts = [] + for fhost in filtered_hosts: + frepos = set(fhost.repos.all()) + if repos != frepos: + continue + fpackages = set(fhost.packages.all()) + if packages != fpackages: + continue + phosts.append(fhost) + + for phost in phosts: + phost.updates.set(updates) + phost.updated_at = ts + phost.save() + updated_hosts.append(phost) + info_message.send(sender=None, text=f'Added the same updates to {phost}') diff --git a/hosts/templates/hosts/host_delete.html b/hosts/templates/hosts/host_delete.html index 13d367f8..5f37d8ab 100644 --- a/hosts/templates/hosts/host_delete.html +++ b/hosts/templates/hosts/host_delete.html @@ -13,19 +13,17 @@
    - + - - + + @@ -35,7 +33,7 @@ - + - + {% endfor %} diff --git a/repos/templates/repos/mirror_table.html b/repos/templates/repos/mirror_table.html index 85ea4011..e5b40129 100644 --- a/repos/templates/repos/mirror_table.html +++ b/repos/templates/repos/mirror_table.html @@ -3,13 +3,13 @@ - + - + @@ -20,13 +20,17 @@ - + - + diff --git a/repos/templates/repos/repo_delete.html b/repos/templates/repos/repo_delete.html index ebe74fe7..7c09cf45 100644 --- a/repos/templates/repos/repo_delete.html +++ b/repos/templates/repos/repo_delete.html @@ -12,8 +12,8 @@
    Hostname {{ host.hostname }}
    Domain {{ host.domain }}
    Domain {{ host.domain }}
    Reporting IP Address {{ host.ipaddress }}
    Reverse DNS {{ host.reversedns }}
    OS {{ host.os }}
    OS Group {{ host.os.osgroup }}
    OS Release {{ host.osvariant.osrelease }}
    OS Variant {{ host.osvariant }}
    Kernel {{ host.kernel }}
    Architecture {{ host.arch }}
    Tags - {% load tagging_tags %} - {% tags_for_object host as tags %} - {% for tag in tags %} + {% for tag in host.tags.all %} {{ tag }} {% endfor %}
    Updates Available {{ host.updates.count }}
    Reboot Required {{ host.reboot_required }}
    Packages Installed {{ host.packages.count}}
    Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Group Repos{% endif %}
    Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Release Repos{% endif %}
    Last 3 reports diff --git a/hosts/templates/hosts/host_detail.html b/hosts/templates/hosts/host_detail.html index 57d7afae..f12bf22b 100644 --- a/hosts/templates/hosts/host_detail.html +++ b/hosts/templates/hosts/host_detail.html @@ -23,29 +23,28 @@
    - + - - + + + + - - + {% endfor %} {% endif %} - {% with osrepos=host.os.osgroup.repos.select_related %} - {% if osrepos and not host.host_repos_only %} - {% for osrepo in osrepos %} - {% if osrepo.arch == host.arch %} + {% with osrelease_repos=host.osvariant.osrelease.repos.select_related %} + {% if osrelease_repos and not host.host_repos_only %} + {% for osrelease_repo in osrelease_repos %} + {% if osrelease_repo.arch == host.arch or osrelease_repo.arch == 'any' %} - - + + - - + + {% endif %} {% endfor %} diff --git a/hosts/templates/hosts/host_table.html b/hosts/templates/hosts/host_table.html index a1777944..bebb7723 100644 --- a/hosts/templates/hosts/host_table.html +++ b/hosts/templates/hosts/host_table.html @@ -4,8 +4,9 @@ + - + @@ -16,8 +17,9 @@ + - + diff --git a/hosts/templatetags/report_alert.py b/hosts/templatetags/report_alert.py index 025e2cde..3a3e3a9a 100644 --- a/hosts/templatetags/report_alert.py +++ b/hosts/templatetags/report_alert.py @@ -17,13 +17,13 @@ from datetime import timedelta -from django.conf import settings - from django.template import Library from django.utils.html import format_html from django.templatetags.static import static from django.utils import timezone +from util import get_setting_of_type + register = Library() @@ -31,11 +31,11 @@ def report_alert(lastreport): html = '' alert_icon = static('img/icon-alert.gif') - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): - days = settings.DAYS_WITHOUT_REPORT - else: - days = 14 + days = get_setting_of_type( + setting_name='DAYS_WITHOUT_REPORT', + setting_type=int, + default=14, + ) if lastreport < (timezone.now() - timedelta(days=days)): - html = f'Outdated Report' + html = f'Outdated Report' return format_html(html) diff --git a/hosts/urls.py b/hosts/urls.py index 94954c48..b1521135 100644 --- a/hosts/urls.py +++ b/hosts/urls.py @@ -26,4 +26,5 @@ path('/', views.host_detail, name='host_detail'), path('/delete/', views.host_delete, name='host_delete'), path('/edit/', views.host_edit, name='host_edit'), + path('/updates/', views.host_find_updates, name='host_updates'), ] diff --git a/hosts/utils.py b/hosts/utils.py index 6dae1d77..b328129f 100644 --- a/hosts/utils.py +++ b/hosts/utils.py @@ -17,46 +17,59 @@ from socket import gethostbyaddr, gaierror, herror -from django.db import DatabaseError +from django.db import transaction, IntegrityError -from patchman.signals import progress_info_s, progress_update_s, error_message +from patchman.signals import error_message def update_rdns(host): """ Update the reverse DNS for a host """ - try: reversedns = str(gethostbyaddr(host.ipaddress)[0]) except (gaierror, herror): reversedns = 'None' host.reversedns = reversedns.lower() - try: - host.save() - except DatabaseError as e: - error_message.send(sender=None, text=e) + host.save() -def remove_reports(host, timestamp): - """ Remove all but the last 3 reports for a host +def get_or_create_host(report, arch, osvariant, domain): + """ Get or create a host from from a report """ - - from reports.models import Report - - reports = Report.objects.filter(host=host).order_by('-created')[:3] - report_ids = [] - - for report in reports: - report_ids.append(report.id) - report.accessed = timestamp + from hosts.models import Host + if not report.host: + try: + report.host = str(gethostbyaddr(report.report_ip)[0]) + except herror: + report.host = report.report_ip report.save() - - del_reports = Report.objects.filter(host=host).exclude(id__in=report_ids) - - rlen = del_reports.count() - ptext = f'Cleaning {rlen!s} old reports' - progress_info_s.send(sender=None, ptext=ptext, plen=rlen) - for i, report in enumerate(del_reports): - report.delete() - progress_update_s.send(sender=None, index=i + 1) + try: + with transaction.atomic(): + host, created = Host.objects.get_or_create( + hostname=report.host, + defaults={ + 'ipaddress': report.report_ip, + 'arch': arch, + 'osvariant': osvariant, + 'domain': domain, + 'lastreport': report.created, + } + ) + host.ipaddress = report.report_ip + host.kernel = report.kernel + host.arch = arch + host.osvariant = osvariant + host.domain = domain + host.lastreport = report.created + host.tags = report.tags + if report.reboot == 'True': + host.reboot_required = True + else: + host.reboot_required = False + host.save() + except IntegrityError as e: + error_message.send(sender=None, text=e) + if host: + host.check_rdns() + return host diff --git a/hosts/views.py b/hosts/views.py index 4ba28e71..faad410b 100644 --- a/hosts/views.py +++ b/hosts/views.py @@ -22,14 +22,14 @@ from django.db.models import Q from django.contrib import messages -from tagging.models import Tag, TaggedItem +from taggit.models import Tag from rest_framework import viewsets from util.filterspecs import Filter, FilterBar from hosts.models import Host, HostRepo from domains.models import Domain from arch.models import MachineArchitecture -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from reports.models import Report from hosts.forms import EditHostForm from hosts.serializers import HostSerializer, HostRepoSerializer @@ -37,37 +37,36 @@ @login_required def host_list(request): - hosts = Host.objects.with_counts('get_num_security_updates', 'get_num_bugfix_updates') \ .select_related() - if 'domain' in request.GET: - hosts = hosts.filter(domain=int(request.GET['domain'])) + if 'domain_id' in request.GET: + hosts = hosts.filter(domain=request.GET['domain_id']) if 'package_id' in request.GET: - hosts = hosts.filter(packages=int(request.GET['package_id'])) + hosts = hosts.filter(packages=request.GET['package_id']) if 'package' in request.GET: hosts = hosts.filter(packages__name__name=request.GET['package']) - if 'repo' in request.GET: - hosts = hosts.filter(repos=int(request.GET['repo'])) + if 'repo_id' in request.GET: + hosts = hosts.filter(repos=request.GET['repo_id']) - if 'arch' in request.GET: - hosts = hosts.filter(arch=int(request.GET['arch'])) + if 'arch_id' in request.GET: + hosts = hosts.filter(arch=request.GET['arch_id']) - if 'os' in request.GET: - hosts = hosts.filter(os=int(request.GET['os'])) + if 'osvariant_id' in request.GET: + hosts = hosts.filter(osvariant=request.GET['osvariant_id']) - if 'osgroup' in request.GET: - hosts = hosts.filter(os__osgroup=int(request.GET['osgroup'])) + if 'osrelease_id' in request.GET: + hosts = hosts.filter(osvariant__osrelease=request.GET['osrelease_id']) if 'tag' in request.GET: - hosts = TaggedItem.objects.get_by_model(hosts, request.GET['tag']) + hosts = hosts.filter(tags__name__in=[request.GET['tag']]) if 'reboot_required' in request.GET: - reboot_required = request.GET['reboot_required'] == 'True' + reboot_required = request.GET['reboot_required'] == 'true' hosts = hosts.filter(reboot_required=reboot_required) if 'search' in request.GET: @@ -91,47 +90,40 @@ def host_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - mytags = {} - for tag in Tag.objects.usage_for_model(Host): - mytags[tag.name] = tag.name - filter_list.append(Filter(request, 'tag', mytags)) - filter_list.append(Filter(request, 'domain', Domain.objects.all())) - filter_list.append(Filter(request, 'os', OS.objects.all())) - filter_list.append(Filter(request, 'osgroup', OSGroup.objects.all())) - filter_list.append(Filter(request, 'arch', - MachineArchitecture.objects.all())) - filter_list.append(Filter(request, 'reboot_required', - {False: 'No', True: 'Yes'})) + tags = {} + for tag in Tag.objects.all(): + tags[tag.name] = tag.name + filter_list.append(Filter(request, 'Tag', 'tag', tags)) + filter_list.append(Filter(request, 'Domain', 'domain_id', Domain.objects.all())) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', + OSRelease.objects.filter(osvariant__host__in=hosts))) + filter_list.append(Filter(request, 'OS Variant', 'osvariant_id', OSVariant.objects.filter(host__in=hosts))) + filter_list.append(Filter(request, 'Architecture', 'arch_id', MachineArchitecture.objects.filter(host__in=hosts))) + filter_list.append(Filter(request, 'Reboot Required', 'reboot_required', {'true': 'Yes', 'false': 'No'})) filter_bar = FilterBar(request, filter_list) return render(request, 'hosts/host_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms}) @login_required def host_detail(request, hostname): - host = get_object_or_404(Host, hostname=hostname) - reports = Report.objects.filter(host=hostname).order_by('-created')[:3] - hostrepos = HostRepo.objects.filter(host=host).select_related('repo') - return render(request, 'hosts/host_detail.html', {'host': host, 'reports': reports, - 'hostrepos': hostrepos}, ) + 'hostrepos': hostrepos}) @login_required def host_edit(request, hostname): - host = get_object_or_404(Host, hostname=hostname) - reports = Report.objects.filter(host=hostname).order_by('-created')[:3] if request.method == 'POST': @@ -140,7 +132,7 @@ def host_edit(request, hostname): if edit_form.is_valid(): host = edit_form.save() host.save() - text = f'Saved changes to Host {host!s}' + text = f'Saved changes to Host {host}' messages.info(request, text) return redirect(host.get_absolute_url()) else: @@ -154,29 +146,39 @@ def host_edit(request, hostname): 'hosts/host_edit.html', {'host': host, 'reports': reports, - 'edit_form': edit_form}, ) + 'edit_form': edit_form}) @login_required def host_delete(request, hostname): - host = get_object_or_404(Host, hostname=hostname) if request.method == 'POST': if 'delete' in request.POST: host.delete() - text = f'Host {hostname!s} has been deleted' + text = f'Host {hostname} has been deleted' messages.info(request, text) return redirect(reverse('hosts:host_list')) elif 'cancel' in request.POST: return redirect(host.get_absolute_url()) - reports = Report.objects.filter(host=hostname).order_by('-created')[:3] return render(request, 'hosts/host_delete.html', {'host': host, - 'reports': reports}, ) + 'reports': reports}) + + +@login_required +def host_find_updates(request, hostname): + """ Find updates using a celery task + """ + from hosts.tasks import find_host_updates + host = get_object_or_404(Host, hostname=hostname) + find_host_updates.delay(host.id) + text = f'Finding updates for Host {host}' + messages.info(request, text) + return redirect(host.get_absolute_url()) class HostViewSet(viewsets.ModelViewSet): diff --git a/manage.py b/manage.py index 47e0d31c..19bd04b4 100755 --- a/manage.py +++ b/manage.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2019-2021 Marcus Furlong +# Copyright 2019-2025 Marcus Furlong # # This file is part of Patchman. # @@ -19,21 +19,19 @@ import os import sys -if __name__ == '__main__': - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "patchman.settings") + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') try: from django.core.management import execute_from_command_line - except ImportError: - # The above import may fail for some other reason. Ensure that the - # issue is really that Django is missing to avoid masking other - # exceptions on Python 2. - try: - import django # noqa - except ImportError: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) - raise + except ImportError as exc: + raise ImportError( + 'Could not import Django. Are you sure it is installed and ' + 'available on your PYTHONPATH environment variable? Did you ' + 'forget to activate a virtual environment?' + ) from exc execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/modules/migrations/0004_alter_module_options.py b/modules/migrations/0004_alter_module_options.py new file mode 100644 index 00000000..e999ec36 --- /dev/null +++ b/modules/migrations/0004_alter_module_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('modules', '0003_alter_module_options'), + ] + + operations = [ + migrations.AlterModelOptions( + name='module', + options={'ordering': ['name', 'stream'], 'verbose_name': 'Module', 'verbose_name_plural': 'Modules'}, + ), + ] diff --git a/modules/models.py b/modules/models.py index 1eb4d236..931a41c3 100644 --- a/modules/models.py +++ b/modules/models.py @@ -35,8 +35,8 @@ class Module(models.Model): class Meta: verbose_name = 'Module' verbose_name_plural = 'Modules' - unique_together = ('name', 'stream', 'version', 'context', 'arch',) - ordering = ('name', 'stream',) + unique_together = ['name', 'stream', 'version', 'context', 'arch'] + ordering = ['name', 'stream'] def __str__(self): return f'{self.name}-{self.stream}-{self.version}-{self.version}-{self.context}' diff --git a/modules/templates/modules/module_table.html b/modules/templates/modules/module_table.html index d01dcbc1..cda47ea3 100644 --- a/modules/templates/modules/module_table.html +++ b/modules/templates/modules/module_table.html @@ -19,7 +19,7 @@ - + {% endfor %} diff --git a/modules/utils.py b/modules/utils.py index 020052b3..f56a0f62 100644 --- a/modules/utils.py +++ b/modules/utils.py @@ -14,8 +14,8 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.db import IntegrityError, DatabaseError, transaction -from patchman.signals import error_message +from django.db import IntegrityError +from patchman.signals import error_message, info_message from modules.models import Module from arch.models import PackageArchitecture @@ -23,21 +23,19 @@ def get_or_create_module(name, stream, version, context, arch, repo): """ Get or create a module object - Returns the module and a boolean for created + Returns the module """ created = False - with transaction.atomic(): - m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) + m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) try: - with transaction.atomic(): - module, created = Module.objects.get_or_create( - name=name, - stream=stream, - version=version, - context=context, - arch=m_arch, - repo=repo, - ) + module, created = Module.objects.get_or_create( + name=name, + stream=stream, + version=version, + context=context, + arch=m_arch, + repo=repo, + ) except IntegrityError as e: error_message.send(sender=None, text=e) module = Module.objects.get( @@ -48,17 +46,14 @@ def get_or_create_module(name, stream, version, context, arch, repo): arch=m_arch, repo=repo, ) - except DatabaseError as e: - error_message.send(sender=None, text=e) - return module, created + return module def get_matching_modules(name, stream, version, context, arch): """ Return modules that match name, stream, version, context, and arch, regardless of repo """ - with transaction.atomic(): - m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) + m_arch, c = PackageArchitecture.objects.get_or_create(name=arch) modules = Module.objects.filter( name=name, stream=stream, @@ -67,3 +62,18 @@ def get_matching_modules(name, stream, version, context, arch): arch=m_arch, ) return modules + + +def clean_modules(): + """ Delete modules that have no host or no repo + """ + modules = Module.objects.filter( + host__isnull=True, + repo__isnull=True, + ) + mlen = modules.count() + if mlen == 0: + info_message.send(sender=None, text='No orphaned Modules found.') + else: + info_message.send(sender=None, text=f'{mlen} orphaned Modules found.') + modules.delete() diff --git a/modules/views.py b/modules/views.py index 87678e6e..b897a709 100644 --- a/modules/views.py +++ b/modules/views.py @@ -53,7 +53,7 @@ def module_list(request): return render(request, 'modules/module_list.html', {'page': page, - 'terms': terms}, ) + 'terms': terms}) @login_required @@ -62,7 +62,7 @@ def module_detail(request, module_id): module = get_object_or_404(Module, id=module_id) return render(request, 'modules/module_detail.html', - {'module': module}, ) + {'module': module}) class ModuleViewSet(viewsets.ModelViewSet): diff --git a/operatingsystems/admin.py b/operatingsystems/admin.py index 49851bc6..15f5e200 100644 --- a/operatingsystems/admin.py +++ b/operatingsystems/admin.py @@ -16,12 +16,12 @@ # along with Patchman. If not, see from django.contrib import admin -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease -class OSGroupAdmin(admin.ModelAdmin): +class OSReleaseAdmin(admin.ModelAdmin): filter_horizontal = ('repos',) -admin.site.register(OS) -admin.site.register(OSGroup, OSGroupAdmin) +admin.site.register(OSVariant) +admin.site.register(OSRelease, OSReleaseAdmin) diff --git a/operatingsystems/fixtures/os.json b/operatingsystems/fixtures/os.json new file mode 100644 index 00000000..5192cee5 --- /dev/null +++ b/operatingsystems/fixtures/os.json @@ -0,0 +1,66 @@ +[ + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "Rocky Linux 9.3", + "osrelease": [ + "Rocky Linux 9", + "Blue Onyx" + ] + } + }, + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "Rocky Linux 8.9", + "osrelease": [ + "Rocky Linux 8", + "Green Obsidian" + ] + } + }, + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "Debian 12.5", + "osrelease": [ + "Debian 12", + "bookworm" + ] + } + }, + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "Arch Linux", + "osrelease": null + } + }, + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "openSUSE Leap 15.6", + "osrelease": null + } + }, + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "AlmaLinux 8.10", + "osrelease": [ + "AlmaLinux 8", + "Cerulean Leopard" + ] + } + }, + { + "model": "operatingsystems.osvariant", + "fields": { + "name": "AlmaLinux 9.5", + "osrelease": [ + "AlmaLinux 9", + "Teal Serval" + ] + } + } +] diff --git a/operatingsystems/fixtures/osgroup.json b/operatingsystems/fixtures/osgroup.json new file mode 100644 index 00000000..27a87a17 --- /dev/null +++ b/operatingsystems/fixtures/osgroup.json @@ -0,0 +1,58 @@ +[ + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "CentOS 7", + "codename": "", + "repos": [] + } + }, + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "CentOS 8", + "codename": "", + "repos": [] + } + }, + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "Rocky Linux 8", + "codename": "Green Obsidian", + "repos": [] + } + }, + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "Rocky Linux 9", + "codename": "Blue Onyx", + "repos": [] + } + }, + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "AlmaLinux 8", + "codename": "Cerulean Leopard", + "repos": [] + } + }, + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "AlmaLinux 9", + "codename": "Teal Serval", + "repos": [] + } + }, + { + "model": "operatingsystems.osrelease", + "fields": { + "name": "Debian 12", + "codename": "bookworm", + "repos": [] + } + } +] diff --git a/operatingsystems/forms.py b/operatingsystems/forms.py index d21b6e20..548a7d88 100644 --- a/operatingsystems/forms.py +++ b/operatingsystems/forms.py @@ -18,44 +18,44 @@ from django.forms import ModelForm, ModelMultipleChoiceField from django.contrib.admin.widgets import FilteredSelectMultiple -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from repos.models import Repository -class AddOSToOSGroupForm(ModelForm): +class AddOSVariantToOSReleaseForm(ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.fields['osgroup'].label = 'OS Groups' + self.fields['osrelease'].label = 'OS Releases' class Meta: - model = OS - fields = ('osgroup',) + model = OSVariant + fields = ('osrelease',) -class CreateOSGroupForm(ModelForm): +class CreateOSReleaseForm(ModelForm): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) - self.fields['name'].label = 'New OS Group' + self.fields['name'].label = 'New OS Release' class Meta: - model = OSGroup + model = OSRelease fields = ('name',) -class AddReposToOSGroupForm(ModelForm): +class AddReposToOSReleaseForm(ModelForm): repos = ModelMultipleChoiceField( queryset=Repository.objects.select_related(), required=False, label=None, - widget=FilteredSelectMultiple('Repos', False)) + widget=FilteredSelectMultiple('Repos', False, attrs={'size': '30'})) def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['repos'].label = '' class Meta: - model = OSGroup + model = OSRelease fields = ('repos',) diff --git a/operatingsystems/managers.py b/operatingsystems/managers.py new file mode 100644 index 00000000..630484a1 --- /dev/null +++ b/operatingsystems/managers.py @@ -0,0 +1,22 @@ +# Copyright 2024 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class OSReleaseManager(models.Manager): + def get_by_natural_key(self, name, codename): + return self.get(name=name, codename=codename) diff --git a/operatingsystems/migrations/0003_osgroup_codename.py b/operatingsystems/migrations/0003_osgroup_codename.py new file mode 100644 index 00000000..97496e01 --- /dev/null +++ b/operatingsystems/migrations/0003_osgroup_codename.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.15 on 2025-01-13 18:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0003_os_arch'), + ] + + operations = [ + migrations.AddField( + model_name='osgroup', + name='codename', + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/operatingsystems/migrations/0004_alter_osgroup_unique_together.py b/operatingsystems/migrations/0004_alter_osgroup_unique_together.py new file mode 100644 index 00000000..dbda97e6 --- /dev/null +++ b/operatingsystems/migrations/0004_alter_osgroup_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.15 on 2025-01-13 19:57 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0003_osgroup_codename'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osgroup', + unique_together={('name', 'codename')}, + ), + ] diff --git a/operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py b/operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py new file mode 100644 index 00000000..b5381c1f --- /dev/null +++ b/operatingsystems/migrations/0005_rename_osgroup_osrelease_rename_os_osvariant_and_more.py @@ -0,0 +1,36 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0001_initial'), + ('hosts', '0005_rename_os_host_osvariant'), + ('operatingsystems', '0004_alter_osgroup_unique_together'), + ] + + operations = [ + migrations.RenameModel( + old_name='OSGroup', + new_name='OSRelease', + ), + migrations.RenameModel( + old_name='OS', + new_name='OSVariant', + ), + migrations.AlterModelOptions( + name='osrelease', + options={'ordering': ('name',), 'verbose_name': 'Operating System Release', 'verbose_name_plural': 'Operating System Releases'}, + ), + migrations.AlterModelOptions( + name='osvariant', + options={'ordering': ('name',), 'verbose_name': 'Operating System Variant', 'verbose_name_plural': 'Operating System Variants'}, + ), + migrations.RenameField( + model_name='osvariant', + old_name='osgroup', + new_name='osrelease', + ), + ] diff --git a/operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py b/operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py new file mode 100644 index 00000000..bb68b2a5 --- /dev/null +++ b/operatingsystems/migrations/0006_osrelease_cpe_name_osvariant_codename.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.19 on 2025-02-12 20:04 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0005_rename_osgroup_osrelease_rename_os_osvariant_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='osrelease', + name='cpe_name', + field=models.CharField(blank=True, max_length=255, null=True, unique=True), + ), + migrations.AddField( + model_name='osvariant', + name='codename', + field=models.CharField(blank=True, max_length=255), + ), + ] diff --git a/operatingsystems/migrations/0007_alter_osrelease_unique_together.py b/operatingsystems/migrations/0007_alter_osrelease_unique_together.py new file mode 100644 index 00000000..862b6f06 --- /dev/null +++ b/operatingsystems/migrations/0007_alter_osrelease_unique_together.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-02-26 16:01 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0006_osrelease_cpe_name_osvariant_codename'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='osrelease', + unique_together={('name', 'codename', 'cpe_name')}, + ), + ] diff --git a/operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py b/operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py new file mode 100644 index 00000000..bcce94e4 --- /dev/null +++ b/operatingsystems/migrations/0008_alter_osrelease_options_alter_osvariant_options.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('operatingsystems', '0007_alter_osrelease_unique_together'), + ] + + operations = [ + migrations.AlterModelOptions( + name='osrelease', + options={'ordering': ['name'], 'verbose_name': 'Operating System Release', 'verbose_name_plural': 'Operating System Releases'}, + ), + migrations.AlterModelOptions( + name='osvariant', + options={'ordering': ['name'], 'verbose_name': 'Operating System Variant', 'verbose_name_plural': 'Operating System Variants'}, + ), + ] diff --git a/operatingsystems/models.py b/operatingsystems/models.py index 905dafda..234b8ab3 100644 --- a/operatingsystems/models.py +++ b/operatingsystems/models.py @@ -22,37 +22,50 @@ from repos.models import Repository -class OSGroup(models.Model): +class OSRelease(models.Model): - name = models.CharField(max_length=255, unique=True) + name = models.CharField(max_length=255, unique=True, blank=False, null=False) repos = models.ManyToManyField(Repository, blank=True) + codename = models.CharField(max_length=255, blank=True) + cpe_name = models.CharField(max_length=255, null=True, blank=True, unique=True) + + from operatingsystems.managers import OSReleaseManager + objects = OSReleaseManager() class Meta: - verbose_name = 'Operating System Group' - verbose_name_plural = 'Operating System Groups' - ordering = ('name',) + verbose_name = 'Operating System Release' + verbose_name_plural = 'Operating System Releases' + unique_together = ['name', 'codename', 'cpe_name'] + ordering = ['name'] def __str__(self): - return self.name + if self.codename: + return f'{self.name} ({self.codename})' + else: + return self.name def get_absolute_url(self): - return reverse('operatingsystems:osgroup_detail', args=[str(self.id)]) + return reverse('operatingsystems:osrelease_detail', args=[str(self.id)]) + + def natural_key(self): + return (self.name, self.codename, self.cpe_name) -class OS(models.Model): +class OSVariant(models.Model): name = models.CharField(max_length=255, unique=True) arch = models.ForeignKey(MachineArchitecture, blank=True, null=True, on_delete=models.CASCADE) - osgroup = models.ForeignKey(OSGroup, blank=True, null=True, - on_delete=models.SET_NULL) + osrelease = models.ForeignKey(OSRelease, blank=True, null=True, on_delete=models.SET_NULL) + codename = models.CharField(max_length=255, blank=True) class Meta: - verbose_name = 'Operating System' - verbose_name_plural = 'Operating Systems' - ordering = ('name',) + verbose_name = 'Operating System Variant' + verbose_name_plural = 'Operating System Variants' + ordering = ['name'] def __str__(self): - return self.name + osvariant_name = f'{self.name} {self.arch}' + return osvariant_name def get_absolute_url(self): - return reverse('operatingsystems:os_detail', args=[str(self.id)]) + return reverse('operatingsystems:osvariant_detail', args=[str(self.id)]) diff --git a/operatingsystems/serializers.py b/operatingsystems/serializers.py index 3edd33c3..8418c720 100644 --- a/operatingsystems/serializers.py +++ b/operatingsystems/serializers.py @@ -16,16 +16,16 @@ from rest_framework import serializers -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease -class OSSerializer(serializers.HyperlinkedModelSerializer): +class OSVariantSerializer(serializers.HyperlinkedModelSerializer): class Meta: - model = OS - fields = ('id', 'name', 'osgroup') + model = OSVariant + fields = ('id', 'name', 'osrelease', 'arch') -class OSGroupSerializer(serializers.HyperlinkedModelSerializer): +class OSReleaseSerializer(serializers.HyperlinkedModelSerializer): class Meta: - model = OSGroup - fields = ('id', 'name', 'repos') + model = OSRelease + fields = ('id', 'name', 'codename', 'repos') diff --git a/operatingsystems/templates/operatingsystems/operatingsystem_table.html b/operatingsystems/templates/operatingsystems/operatingsystem_table.html deleted file mode 100644 index f407e8a7..00000000 --- a/operatingsystems/templates/operatingsystems/operatingsystem_table.html +++ /dev/null @@ -1,21 +0,0 @@ -{% load common %} -
    Hostname {{ host.hostname }}
    Domain {{ host.domain }}
    Domain {{ host.domain }}
    Reporting IP Address {{ host.ipaddress }}
    Reverse DNS {{ host.reversedns }}
    OS {{ host.os }}
    OS Group {{ host.os.osgroup }}
    OS Release {{ host.osvariant.osrelease }}
    OS Variant {{ host.osvariant }}
    Kernel {{ host.kernel }}
    Architecture {{ host.arch }}
    Tags - {% load tagging_tags %} - {% tags_for_object host as tags %} - {% for tag in tags %} + {% for tag in host.tags.all %} {{ tag }} {% endfor %}
    Updated {{ host.updated_at }}
    Last Report {{ host.lastreport }}
    Packages Installed {{ host.packages.count}}
    Updates Available {{ host.updates.count }}
    Errata{{ host.errata.count }}
    Reboot Required {{ host.reboot_required }}
    Packages Installed {{ host.packages.count}}
    Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Group Repos{% endif %}
    Repos In Use{% if host.host_repos_only %}Host Repos{% else %}Host and OS Release Repos{% endif %}
    Last 3 reports @@ -60,6 +59,7 @@ {% if user.is_authenticated and perms.is_admin %} {% bootstrap_icon "trash" %} Delete this Host {% bootstrap_icon "edit" %} Edit this Host + {% bootstrap_icon "refresh" %} Find Updates for this Host {% endif %} @@ -116,16 +116,16 @@
    {{ osrepo.name }} OS Group {{ osrelease_repo.name }} OS Release N/A {% yes_no_img osrepo.enabled %} {% yes_no_img osrepo.security %} {% yes_no_img osrelease_repo.enabled %} {% yes_no_img osrelease_repo.security %}
    Hostname UpdatesAffected by Errata Running KernelOSOS Variant Last Report Reboot Status
    {{ host }} {% with count=host.get_num_security_updates %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %} {% with count=host.get_num_bugfix_updates %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %}{% with count=host.errata.count %}{% if count != 0 %}{{ count }}{% else %} {% endif %}{% endwith %} {{ host.kernel }}{{ host.os }}{{ host.osvariant }} {{ host.lastreport }}{% report_alert host.lastreport %} {% no_yes_img host.reboot_required %}
    {{ module.version }} {{ module.context }} {{ module.repo }}{{ module.packages.count }}{{ module.packages.count }} {{ module.host_set.count }}
    - - - - - - - - - - {% for os in object_list %} - - - - - - - {% endfor %} - -
    OS NameHostsOS GroupRepos (OS Group)
    {{ os }}{{ os.host_set.count }}{% if os.osgroup != None %}{{ os.osgroup }}{% endif %}{% if os.osgroup.repos.count != None %}{{ os.osgroup.repos.count }}{% else %}0{% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html b/operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html deleted file mode 100644 index b0153949..00000000 --- a/operatingsystems/templates/operatingsystems/operatingsystemgroup_table.html +++ /dev/null @@ -1,17 +0,0 @@ -{% load common %} - - - - - - - - - {% for osgroup in object_list %} - - - - - {% endfor %} - -
    OS GroupRepos
    {{ osgroup }}{% if osgroup.repos.count != None %}{{ osgroup.repos.count }}{% else %}0{% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html new file mode 100644 index 00000000..6a7eae13 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/operatingsystemrelease_table.html @@ -0,0 +1,27 @@ +{% load common %} + + + + + + + + + + + + + + {% for osrelease in object_list %} + + + + + + + + + + {% endfor %} + +
    OS ReleaseCPE NameCodenameReposOS VariantsHostsErrata
    {{ osrelease.name }}{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}{{ osrelease.repos.count }}{{ osrelease.osvariant_set.count }}{% host_count osrelease %}{{ osrelease.erratum_set.count }}
    diff --git a/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html new file mode 100644 index 00000000..3ef8403f --- /dev/null +++ b/operatingsystems/templates/operatingsystems/operatingsystemvariant_table.html @@ -0,0 +1,25 @@ +{% load common %} + + + + + + + + + + + + + {% for osvariant in object_list %} + + + + + + + + + {% endfor %} + +
    NameArchitectureCodenameHostsOS ReleaseRepos (OS Release)
    {{ osvariant }}{{ osvariant.arch }}{% if osvariant.codename %}{{ osvariant.codename }}{% else %}{% if osvariant.osrelease %}{{ osvariant.osrelease.codename }}{% endif %}{% endif %}{{ osvariant.host_set.count }}{% if osvariant.osrelease %}{{ osvariant.osrelease }}{% endif %}{% if osvariant.osrelease.repos.count != None %}{{ osvariant.osrelease.repos.count }}{% else %}0{% endif %}
    diff --git a/operatingsystems/templates/operatingsystems/os_delete.html b/operatingsystems/templates/operatingsystems/os_delete.html deleted file mode 100644 index 5367c9c2..00000000 --- a/operatingsystems/templates/operatingsystems/os_delete.html +++ /dev/null @@ -1,77 +0,0 @@ -{% extends "base.html" %} - -{% load common bootstrap3 %} - -{% block page_title %}Operating System - {{ os }} {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • - -{% if os %} - {{ os }} -{% endif %} - -{% if oses %} - Multiple -{% endif %} - -
  • {% endblock %} - -{% block content_title %}Operating System - {{ os }}{% endblock %} - -{% block content %} - -{% if os %} -
    - - - - -
    Name {{ os.name }}
    Hosts{% if os.host_set.count != None %} {{ os.host_set.count }} {% else %} 0 {% endif %}
    OS Group{% if os.osgroup != None %} {{ os.osgroup }} {% else %}No OS Group{% endif %}
    -{% endif %} - -{% if oses %} -
    - - - - - {% for os in oses %} - - - - - - {% endfor %} -
    NameHostsOS Group
    {{ os }} {% if os.host_set.count != None %} {{ os.host_set.count }} {% else %} 0 {% endif %}{% if os.osgroup != None %} {{ os.osgroup }} {% else %}No OS Group{% endif %}
    -{% endif %} - -
    - {% if user.is_authenticated and perms.is_admin %} -
    - Are you sure you want to delete - {% if os %} - this Operating System? - {% endif %} - {% if oses %} - these Operating Systems? - {% endif %} -
    -
    -
    - {% csrf_token %} - - -
    -
    - {% else %} -
    - You do not have permission to delete Operating Systems. -
    - - {% endif %} -
    -
    - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/os_detail.html b/operatingsystems/templates/operatingsystems/os_detail.html deleted file mode 100644 index dc743770..00000000 --- a/operatingsystems/templates/operatingsystems/os_detail.html +++ /dev/null @@ -1,63 +0,0 @@ -{% extends "base.html" %} - -{% load common bootstrap3 %} - -{% block page_title %}Operating System - {{ os }} {% endblock %} - -{% block content_title %} Operating System - {{ os }} {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • {{ os }}
  • {% endblock %} - -{% block content %} - - - -
    - -
    -
    - - - - -
    Name {{ os.name }}
    Hosts{% if os.host_set.count != None %} {{ os.host_set.count }} {% else %} 0 {% endif %}
    OS Group{% if os.osgroup != None %} {{ os.osgroup }} {% else %}No OS Group{% endif %}
    - {% if user.is_authenticated and perms.is_admin %} - {% bootstrap_icon "trash" %} Delete this Operating System - {% endif %} -
    -
    - -
    -
    - {% gen_table os.host_set.all %} -
    -
    - -
    -
    - {% if user.is_authenticated and perms.is_admin %} -
    -
    - {% csrf_token %} - {% bootstrap_form add_form size='small' %} - -
    -
    -
    -
    - {% csrf_token %} - {% bootstrap_form create_form size='small' %} - -
    -
    - {% endif %} -
    -
    - -
    - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/os_landing.html b/operatingsystems/templates/operatingsystems/os_landing.html new file mode 100644 index 00000000..1acf3dcb --- /dev/null +++ b/operatingsystems/templates/operatingsystems/os_landing.html @@ -0,0 +1,18 @@ +{% extends "base.html" %} + +{% block page_title %} Operating Systems {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • {% endblock %} + +{% block content_title %} Operating Systems {% endblock %} + +{% block content %} + + + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/os_list.html b/operatingsystems/templates/operatingsystems/os_list.html deleted file mode 100644 index 28ea5ae1..00000000 --- a/operatingsystems/templates/operatingsystems/os_list.html +++ /dev/null @@ -1,19 +0,0 @@ -{% extends "objectlist.html" %} - -{% load common bootstrap3 %} - -{% block page_title %}Operating Systems{% endblock %} - -{% block content_title %} Operating Systems {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • {% endblock %} - -{% block objectlist_actions %} - -{% if user.is_authenticated and perms.is_admin and empty_oses %} - -{% endif %} - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osgroup_delete.html b/operatingsystems/templates/operatingsystems/osgroup_delete.html deleted file mode 100644 index ded9292c..00000000 --- a/operatingsystems/templates/operatingsystems/osgroup_delete.html +++ /dev/null @@ -1,46 +0,0 @@ -{% extends "base.html" %} - -{% load common bootstrap3 %} - -{% block page_title %}OS Group - {{ osgroup }} {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Groups
  • {{ osgroup }}
  • {% endblock %} - -{% block content_title %} OS Group - {{ osgroup }} {% endblock %} - -{% block content %} - -
    - - - - - - - -
    Name{{ osgroup }}
    Operating Systems in this OS Group{{ osgroup.os_set.select_related.count }}
    - -
    - {% if user.is_authenticated and perms.is_admin %} -
    - Are you sure you want to delete this OS Group? -
    -
    -
    - {% csrf_token %} - - -
    -
    - {% else %} -
    - You do not have permission to delete this OS Group. -
    - - {% endif %} -
    -
    - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osgroup_detail.html b/operatingsystems/templates/operatingsystems/osgroup_detail.html deleted file mode 100644 index 881003a3..00000000 --- a/operatingsystems/templates/operatingsystems/osgroup_detail.html +++ /dev/null @@ -1,72 +0,0 @@ -{% extends "base.html" %} - -{% load common bootstrap3 %} - -{% block extrahead %} {{ repos_form.media }} {% endblock %} - -{% block page_title %}OS Group - {{ osgroup }} {% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Groups
  • {{ osgroup }}
  • {% endblock %} - -{% block content_title %} OS Group - {{ osgroup }}{% endblock %} - -{% block content %} - - - -
    - -
    -
    - - - - - - - -
    Name{{ osgroup }}
    Operating Systems in this OS Group{{ osgroup.os_set.select_related.count }}
    - {% if user.is_authenticated and perms.is_admin %} - {% bootstrap_icon "trash" %} Delete this OS Group - {% endif %} -
    -
    - -
    -
    - - {% for repo in osgroup.os_set.select_related %} - - - - {% endfor %} -
    {{ repo }}
    -
    -
    - -
    -
    - {% if osgroup.repos.count == 0 %} - {{ osgroup }} has no Repositories
    - {% else %} - {% gen_table osgroup.repos.select_related %} - {% endif %} - {% if user.is_authenticated and perms.is_admin %} -
    -
    - {% csrf_token %} - {% bootstrap_form repos_form size='small' %} - -
    -
    - {% endif %} -
    -
    - -
    - -{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osgroup_list.html b/operatingsystems/templates/operatingsystems/osgroup_list.html deleted file mode 100644 index 22f39938..00000000 --- a/operatingsystems/templates/operatingsystems/osgroup_list.html +++ /dev/null @@ -1,7 +0,0 @@ -{% extends "objectlist.html" %} - -{% block page_title %}OS Groups{% endblock %} - -{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Groups
  • {% endblock %} - -{% block content_title %} OS Groups {% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osrelease_delete.html b/operatingsystems/templates/operatingsystems/osrelease_delete.html new file mode 100644 index 00000000..e0c6f5bf --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osrelease_delete.html @@ -0,0 +1,47 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Release - {{ osrelease }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Releases
  • {{ osrelease }}
  • {% endblock %} + +{% block content_title %} OS Release - {{ osrelease }} {% endblock %} + +{% block content %} + +
    + + + + + + + + +
    Name{{ osrelease }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    Errata{{ osrelease.erratum_set.count }}
    + +
    + {% if user.is_authenticated and perms.is_admin %} +
    + Are you sure you want to delete this OS Release? +
    +
    +
    + {% csrf_token %} + + +
    +
    + {% else %} +
    + You do not have permission to delete this OS Release. +
    + + {% endif %} +
    +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osrelease_detail.html b/operatingsystems/templates/operatingsystems/osrelease_detail.html new file mode 100644 index 00000000..740b9c4b --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osrelease_detail.html @@ -0,0 +1,72 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block extrahead %} {{ repos_form.media }} {% endblock %} + +{% block page_title %}OS Release - {{ osrelease }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Releases
  • {{ osrelease }}
  • {% endblock %} + +{% block content_title %} OS Release - {{ osrelease }}{% endblock %} + +{% block content %} + + + +
    + +
    +
    + + + + + + + + +
    Name{{ osrelease.name }}
    CPE Name{% if osrelease.cpe_name %}{{ osrelease.cpe_name }}{% endif %}
    Codename{% if osrelease.codename %}{{ osrelease.codename }}{% endif %}
    OS Variants{{ osrelease.osvariant_set.count }}
    Repositories{{ osrelease.repos.count }}
    Hosts{{ host_count }}
    Errata{{ osrelease.erratum_set.count }}
    + + {% if user.is_authenticated and perms.is_admin %} + {% bootstrap_icon "trash" %} Delete this OS Release + {% endif %} +
    +
    + +
    +
    + {% if osrelease.osvariant_set.count == 0 %} + {{ osrelease }} has no Variants + {% else %} + {% gen_table osrelease.osvariant_set.select_related %} + {% endif %} +
    +
    + +
    +
    + {% if osrelease.repos.count == 0 %} + {{ osrelease }} has no Repositories + {% else %} + {% gen_table osrelease.repos.select_related %} + {% endif %} + {% if user.is_authenticated and perms.is_admin %} +
    +
    + {% csrf_token %} + {% bootstrap_form repos_form size='large' %} + +
    +
    + {% endif %} +
    +
    + +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osrelease_list.html b/operatingsystems/templates/operatingsystems/osrelease_list.html new file mode 100644 index 00000000..1dfc80e1 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osrelease_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}OS Releases{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Releases
  • {% endblock %} + +{% block content_title %} OS Releases {% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete.html b/operatingsystems/templates/operatingsystems/osvariant_delete.html new file mode 100644 index 00000000..bea8a849 --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_delete.html @@ -0,0 +1,45 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variant - {{ osvariant }} {% endblock %} + +{% block content_title %} OS Variant - {{ osvariant }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • OS Variants
  • {{ osvariant }}
  • {% endblock %} + +{% block content %} + +
    + + + + + + +
    Name {{ osvariant.name }}
    Architecture {{ osvariant.arch }}
    Codename {{ osvariant.codename }}
    Hosts{{ osvariant.host_set.count }}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    + +
    + {% if user.is_authenticated and perms.is_admin %} +
    + Are you sure you want to delete this OS Variant? +
    +
    +
    + {% csrf_token %} + + +
    +
    + {% else %} +
    + You do not have permission to delete OS Variants. +
    + + {% endif %} +
    +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html b/operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html new file mode 100644 index 00000000..f6d0373d --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_delete_multiple.html @@ -0,0 +1,56 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variant - Multiple {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Variants
  • + +{% if osvariants %} + Multiple +{% endif %} + +
  • {% endblock %} + +{% block content_title %}OS Variant - Multiple{% endblock %} + +{% block content %} + +
    + + + + + {% for osvariant in osvariants %} + + + + + + {% endfor %} +
    NameHostsOS Release
    {{ osvariant }} {% if osvariant.host_set.count != None %} {{ osvariant.host_set.count }} {% else %} 0 {% endif %}{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    + +
    + {% if user.is_authenticated and perms.is_admin %} +
    + Are you sure you want to delete these OS Variants? +
    +
    +
    + {% csrf_token %} + + +
    +
    + {% else %} +
    + You do not have permission to delete OS Variants. +
    + + {% endif %} +
    +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_detail.html b/operatingsystems/templates/operatingsystems/osvariant_detail.html new file mode 100644 index 00000000..0c1d306b --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_detail.html @@ -0,0 +1,65 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variant - {{ osvariant }} {% endblock %} + +{% block content_title %} OS Variant - {{ osvariant }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • OS Variants
  • {{ osvariant }}
  • {% endblock %} + +{% block content %} + + + +
    + +
    +
    + + + + + + +
    Name {{ osvariant.name }}
    Architecture {{ osvariant.arch }}
    Codename {{ osvariant.codename }}
    Hosts{{ osvariant.host_set.count }}
    OS Release{% if osvariant.osrelease != None %} {{ osvariant.osrelease }} {% else %}No OS Release{% endif %}
    + {% if user.is_authenticated and perms.is_admin %} + {% bootstrap_icon "trash" %} Delete this OS Variant + {% endif %} +
    +
    + +
    +
    + {% gen_table osvariant.host_set.all %} +
    +
    + +
    +
    + {% if user.is_authenticated and perms.is_admin %} +
    +
    + {% csrf_token %} + {% bootstrap_form add_form size='small' %} + +
    +
    +
    +
    + {% csrf_token %} + {% bootstrap_form create_form size='small' %} + +
    +
    + {% endif %} +
    +
    + +
    + +{% endblock %} diff --git a/operatingsystems/templates/operatingsystems/osvariant_list.html b/operatingsystems/templates/operatingsystems/osvariant_list.html new file mode 100644 index 00000000..b83ede5f --- /dev/null +++ b/operatingsystems/templates/operatingsystems/osvariant_list.html @@ -0,0 +1,19 @@ +{% extends "objectlist.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}OS Variants{% endblock %} + +{% block content_title %} OS Variants {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Operating Systems
  • OS Variants
  • {% endblock %} + +{% block objectlist_actions %} + +{% if user.is_authenticated and perms.is_admin and nohost_osvariants %} + +{% endif %} + +{% endblock %} diff --git a/operatingsystems/urls.py b/operatingsystems/urls.py index 24e9500a..df194c9d 100644 --- a/operatingsystems/urls.py +++ b/operatingsystems/urls.py @@ -22,10 +22,12 @@ app_name = 'operatingsystems' urlpatterns = [ - path('', views.os_list, name='os_list'), - path('/', views.os_detail, name='os_detail'), - path('/delete/', views.os_delete, name='os_delete'), - path('groups/', views.osgroup_list, name='osgroup_list'), - path('groups//', views.osgroup_detail, name='osgroup_detail'), # noqa - path('groups//delete/', views.osgroup_delete, name='osgroup_delete'), # noqa + path('', views.os_landing, name='os_landing'), + path('variants/', views.osvariant_list, name='osvariant_list'), + path('variants//', views.osvariant_detail, name='osvariant_detail'), + path('variants//delete/', views.osvariant_delete, name='osvariant_delete'), + path('variants/no_host/delete/', views.delete_nohost_osvariants, name='delete_nohost_osvariants'), + path('releases/', views.osrelease_list, name='osrelease_list'), + path('releases//', views.osrelease_detail, name='osrelease_detail'), + path('releases//delete/', views.osrelease_delete, name='osrelease_delete'), ] diff --git a/operatingsystems/utils.py b/operatingsystems/utils.py new file mode 100644 index 00000000..c66182be --- /dev/null +++ b/operatingsystems/utils.py @@ -0,0 +1,79 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import IntegrityError + + +def get_or_create_osrelease(name, cpe_name=None, codename=None): + """ Get or create OSRelease from OS details + """ + from operatingsystems.models import OSRelease + osrelease = None + updated = False + if cpe_name: + try: + osrelease, created = OSRelease.objects.get_or_create(name=name, cpe_name=cpe_name) + except IntegrityError: + osreleases = OSRelease.objects.filter(cpe_name=cpe_name) + if osreleases.count() == 1: + osrelease = osreleases.first() + osrelease.name = name + if not osrelease and codename: + try: + osrelease, created = OSRelease.objects.get_or_create(name=name, codename=codename) + except IntegrityError: + osreleases = OSRelease.objects.filter(codename=codename) + if osreleases.count() == 1: + osrelease = osreleases.first() + osrelease.name = name + osrelease.save() + if not osrelease: + osrelease, created = OSRelease.objects.get_or_create(name=name) + if cpe_name and osrelease.cpe_name != cpe_name: + osrelease.cpe_name = cpe_name + updated = True + if codename and osrelease.codename != codename: + osrelease.codename = codename + updated = True + if updated: + osrelease.save() + return osrelease + + +def get_or_create_osvariant(name, osrelease, codename=None, arch=None): + """ Get or create OSVariant from OSRelease and os details + """ + from operatingsystems.models import OSVariant + osvariant = None + updated = False + try: + osvariant, created = OSVariant.objects.get_or_create(name=name, arch=arch) + except IntegrityError: + osvariants = OSVariant.objects.filter(name=name) + if osvariants.count() == 1: + osvariant = osvariants.first() + if osvariant.osrelease != osrelease: + osvariant.osrelease = osrelease + updated = True + if arch and osvariant.arch != arch: + osvariant.arch = arch + updated = True + if codename and osvariant.codename != codename: + osvariant.codename = codename + updated = True + if updated: + osvariant.save() + return osvariant diff --git a/operatingsystems/views.py b/operatingsystems/views.py index c73d1e41..2b696f92 100644 --- a/operatingsystems/views.py +++ b/operatingsystems/views.py @@ -18,25 +18,24 @@ from django.shortcuts import get_object_or_404, render, redirect from django.contrib.auth.decorators import login_required from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger -from django.db.models import Prefetch, Q +from django.db.models import Q from django.contrib import messages from django.urls import reverse from rest_framework import viewsets from hosts.models import Host -from operatingsystems.models import OS, OSGroup -from operatingsystems.forms import AddOSToOSGroupForm, \ - AddReposToOSGroupForm, CreateOSGroupForm -from operatingsystems.serializers import OSSerializer, \ - OSGroupSerializer +from operatingsystems.models import OSVariant, OSRelease +from operatingsystems.forms import AddOSVariantToOSReleaseForm, AddReposToOSReleaseForm, CreateOSReleaseForm +from operatingsystems.serializers import OSVariantSerializer, OSReleaseSerializer @login_required -def os_list(request): +def osvariant_list(request): + osvariants = OSVariant.objects.select_related() - oses = OS.objects.select_related() \ - .prefetch_related('host_set', 'osgroup__repos') + if 'osrelease_id' in request.GET: + osvariants = osvariants.filter(osrelease=request.GET['osrelease_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -44,12 +43,12 @@ def os_list(request): for term in terms.split(' '): q = Q(name__icontains=term) query = query & q - oses = oses.filter(query) + osvariants = osvariants.filter(query) else: terms = '' page_no = request.GET.get('page') - paginator = Paginator(oses, 50) + paginator = Paginator(osvariants, 50) try: page = paginator.page(page_no) @@ -58,92 +57,86 @@ def os_list(request): except EmptyPage: page = paginator.page(paginator.num_pages) - empty_oses = list(OS.objects.filter(host__isnull=True)) + nohost_osvariants = OSVariant.objects.filter(host__isnull=True).exists() return render(request, - 'operatingsystems/os_list.html', - {'page': page, 'terms': terms, 'empty_oses': empty_oses}, ) + 'operatingsystems/osvariant_list.html', + {'page': page, + 'terms': terms, + 'nohost_osvariants': nohost_osvariants}) @login_required -def os_detail(request, os_id): - - os = get_object_or_404(OS, id=os_id) - - hosts = Host.objects.with_counts('get_num_security_updates', 'get_num_bugfix_updates') - hosts_prefetch = Prefetch('host_set', queryset=hosts) - os = OS.objects.select_related() \ - .prefetch_related(hosts_prefetch) \ - .get(id=os_id) +def osvariant_detail(request, osvariant_id): + osvariant = get_object_or_404(OSVariant, id=osvariant_id) if request.method == 'POST': - create_form = CreateOSGroupForm(request.POST, prefix='create') + create_form = CreateOSReleaseForm(request.POST, prefix='create') if create_form.is_valid(): - osgroup = create_form.save() - os.osgroup = osgroup - os.save() - text = f'Created OS Group {osgroup!s} ' - text += f'and added OS {os!s} to it' + osrelease = create_form.save() + osvariant.osrelease = osrelease + osvariant.save() + text = f'Created OS Release {osrelease} and added OS Variant {osvariant} to it' messages.info(request, text) - return redirect(os.get_absolute_url()) - add_form = AddOSToOSGroupForm(request.POST, instance=os, prefix='add') + return redirect(osvariant.get_absolute_url()) + add_form = AddOSVariantToOSReleaseForm(request.POST, instance=osvariant, prefix='add') if add_form.is_valid(): add_form.save() - text = f'OS {os!s} added to OS Group {os.osgroup!s}' + text = f'OS Variant {osvariant} added to OS Release {osvariant.osrelease}' messages.info(request, text) - return redirect(os.get_absolute_url()) + return redirect(osvariant.get_absolute_url()) else: - add_form = AddOSToOSGroupForm(instance=os, prefix='add') - create_form = CreateOSGroupForm(prefix='create') + add_form = AddOSVariantToOSReleaseForm(instance=osvariant, prefix='add') + create_form = CreateOSReleaseForm(prefix='create') return render(request, - 'operatingsystems/os_detail.html', - {'os': os, + 'operatingsystems/osvariant_detail.html', + {'osvariant': osvariant, 'add_form': add_form, - 'create_form': create_form}, ) + 'create_form': create_form}) @login_required -def os_delete(request, os_id): +def osvariant_delete(request, osvariant_id): + osvariant = get_object_or_404(OSVariant, id=osvariant_id) - if os_id == 'empty_oses': - os = False - oses = list(OS.objects.filter(host__isnull=True)) - else: - os = get_object_or_404(OS, id=os_id) - oses = False + if request.method == 'POST': + if 'delete' in request.POST: + osvariant.delete() + messages.info(request, f'OS Variant {osvariant} has been deleted') + return redirect(reverse('operatingsystems:osvariant_list')) + elif 'cancel' in request.POST: + return redirect(osvariant.get_absolute_url()) + + return render(request, 'operatingsystems/osvariant_delete.html', {'osvariant': osvariant}) + + +@login_required +def delete_nohost_osvariants(request): + osvariants = OSVariant.objects.filter(host__isnull=True) if request.method == 'POST': if 'delete' in request.POST: - if os: - os.delete() - messages.info(request, f'OS {os!s} has been deleted') - return redirect(reverse('operatingsystems:os_list')) - else: - if not oses: - text = 'There are no OS\'s with no Hosts' - messages.info(request, text) - return redirect(reverse('operatingsystems:os_list')) - for os in oses: - os.delete() - text = f'{len(oses)!s} OS\'s have been deleted' + if not osvariants: + text = 'There are no OS Variants with no Hosts' messages.info(request, text) - return redirect(reverse('operatingsystems:os_list')) + return redirect(reverse('operatingsystems:osvariant_list')) + text = f'{osvariants.count()} OS Variants have been deleted' + osvariants.delete() + messages.info(request, text) + return redirect(reverse('operatingsystems:osvariant_list')) elif 'cancel' in request.POST: - if os_id == 'empty_oses': - return redirect(reverse('operatingsystems:os_list')) - else: - return redirect(os.get_absolute_url()) + return redirect(reverse('operatingsystems:osvariant_list')) - return render(request, - 'operatingsystems/os_delete.html', - {'os': os, 'oses': oses}, ) + return render(request, 'operatingsystems/osvariant_delete_multiple.html', {'osvariants': osvariants}) @login_required -def osgroup_list(request): +def osrelease_list(request): + osreleases = OSRelease.objects.select_related() - osgroups = OSGroup.objects.select_related() + if 'erratum_id' in request.GET: + osreleases = osreleases.filter(erratum=request.GET['erratum_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -151,12 +144,12 @@ def osgroup_list(request): for term in terms.split(' '): q = Q(name__icontains=term) query = query & q - osgroups = osgroups.filter(query) + osreleases = osreleases.filter(query) else: terms = '' page_no = request.GET.get('page') - paginator = Paginator(osgroups, 50) + paginator = Paginator(osreleases, 50) try: page = paginator.page(page_no) @@ -166,61 +159,71 @@ def osgroup_list(request): page = paginator.page(paginator.num_pages) return render(request, - 'operatingsystems/osgroup_list.html', - {'page': page, 'terms': terms}, ) + 'operatingsystems/osrelease_list.html', + {'page': page, + 'terms': terms}) @login_required -def osgroup_detail(request, osgroup_id): - - osgroup = get_object_or_404(OSGroup, id=osgroup_id) +def osrelease_detail(request, osrelease_id): + osrelease = get_object_or_404(OSRelease, id=osrelease_id) if request.method == 'POST': - repos_form = AddReposToOSGroupForm(request.POST, instance=osgroup) + repos_form = AddReposToOSReleaseForm(request.POST, instance=osrelease) if repos_form.is_valid(): repos_form.save() messages.info(request, 'Modified Repositories') - return redirect(osgroup.get_absolute_url()) + return redirect(osrelease.get_absolute_url()) - repos_form = AddReposToOSGroupForm(instance=osgroup) + repos_form = AddReposToOSReleaseForm(instance=osrelease) + host_count = Host.objects.filter(osvariant__osrelease=osrelease).count() return render(request, - 'operatingsystems/osgroup_detail.html', - {'osgroup': osgroup, 'repos_form': repos_form}, ) + 'operatingsystems/osrelease_detail.html', + {'osrelease': osrelease, + 'repos_form': repos_form, + 'host_count': host_count}) @login_required -def osgroup_delete(request, osgroup_id): - - osgroup = get_object_or_404(OSGroup, id=osgroup_id) +def osrelease_delete(request, osrelease_id): + osrelease = get_object_or_404(OSRelease, id=osrelease_id) if request.method == 'POST': if 'delete' in request.POST: - osgroup.delete() - text = f'OS Group {osgroup!s} has been deleted' + osrelease.delete() + text = f'OS Release {osrelease} has been deleted' messages.info(request, text) - return redirect(reverse('operatingsystems:os_list')) + return redirect(reverse('operatingsystems:osrelease_list')) elif 'cancel' in request.POST: - return redirect(osgroup.get_absolute_url()) + return redirect(osrelease.get_absolute_url()) + + host_count = Host.objects.filter(osvariant__osrelease=osrelease).count() return render(request, - 'operatingsystems/osgroup_delete.html', - {'osgroup': osgroup}, ) + 'operatingsystems/osrelease_delete.html', + {'osrelease': osrelease, + 'host_count': host_count}) + + +@login_required +def os_landing(request): + return render(request, 'operatingsystems/os_landing.html') -class OSViewSet(viewsets.ModelViewSet): +class OSVariantViewSet(viewsets.ModelViewSet): """ - API endpoint that allows operating systems to be viewed or edited. + API endpoint that allows operating system variants to be viewed or edited. """ - queryset = OS.objects.all() - serializer_class = OSSerializer + queryset = OSVariant.objects.all() + serializer_class = OSVariantSerializer filterset_fields = ['name'] -class OSGroupViewSet(viewsets.ModelViewSet): +class OSReleaseViewSet(viewsets.ModelViewSet): """ - API endpoint that allows operating system groups to be viewed or edited. + API endpoint that allows operating system releases to be viewed or edited. """ - queryset = OSGroup.objects.all() - serializer_class = OSGroupSerializer + queryset = OSRelease.objects.all() + serializer_class = OSReleaseSerializer filterset_fields = ['name'] diff --git a/packages/admin.py b/packages/admin.py index 4a782eb9..979ba779 100644 --- a/packages/admin.py +++ b/packages/admin.py @@ -16,16 +16,17 @@ # along with Patchman. If not, see from django.contrib import admin -from packages.models import Package, PackageName, \ - PackageUpdate, Erratum, ErratumReference +from packages.models import Package, PackageName, PackageUpdate -class ErratumAdmin(admin.ModelAdmin): - readonly_fields = ('packages',) +class PackageAdmin(admin.ModelAdmin): + readonly_fields = ('name',) -admin.site.register(Package) +class PackageUpdateAdmin(admin.ModelAdmin): + readonly_fields = ('oldpackage', 'newpackage') + + +admin.site.register(Package, PackageAdmin) admin.site.register(PackageName) -admin.site.register(PackageUpdate) -admin.site.register(Erratum, ErratumAdmin) -admin.site.register(ErratumReference) +admin.site.register(PackageUpdate, PackageUpdateAdmin) diff --git a/packages/migrations/0002_auto_20250207_1319.py b/packages/migrations/0002_auto_20250207_1319.py new file mode 100644 index 00000000..1563d139 --- /dev/null +++ b/packages/migrations/0002_auto_20250207_1319.py @@ -0,0 +1,36 @@ +# Generated by Django 3.2.25 on 2025-02-07 13:19 + +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0002_delete_erratum_delete_erratumreference'), + ] + + operations = [ + migrations.CreateModel( + name='PackageCategory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('name', models.CharField(max_length=255, unique=True)), + ], + options={ + 'verbose_name': 'Package Category', + 'verbose_name_plural': 'Package Categories', + 'ordering': ('name',), + }, + ), + migrations.AlterField( + model_name='package', + name='packagetype', + field=models.CharField(blank=True, choices=[('R', 'rpm'), ('D', 'deb'), ('A', 'arch'), ('G', 'gentoo'), ('U', 'unknown')], max_length=1, null=True), + ), + migrations.AddField( + model_name='package', + name='category', + field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='packages.packagecategory'), + ), + ] diff --git a/packages/migrations/0002_delete_erratum_delete_erratumreference.py b/packages/migrations/0002_delete_erratum_delete_erratumreference.py new file mode 100644 index 00000000..418bf4e5 --- /dev/null +++ b/packages/migrations/0002_delete_erratum_delete_erratumreference.py @@ -0,0 +1,19 @@ +# Generated by Django 4.2.18 on 2025-02-04 23:41 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0001_initial'), + ] + + operations = [ + migrations.DeleteModel( + name='Erratum', + ), + migrations.DeleteModel( + name='ErratumReference', + ), + ] diff --git a/packages/migrations/0003_auto_20250207_1746.py b/packages/migrations/0003_auto_20250207_1746.py new file mode 100644 index 00000000..bf97bde7 --- /dev/null +++ b/packages/migrations/0003_auto_20250207_1746.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2025-02-07 17:46 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('arch', '0001_initial'), + ('packages', '0002_auto_20250207_1319'), + ] + + operations = [ + migrations.AlterUniqueTogether( + name='package', + unique_together={('name', 'epoch', 'version', 'release', 'arch', 'packagetype', 'category')}, + ), + ] diff --git a/packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py b/packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py new file mode 100644 index 00000000..7ed04ab6 --- /dev/null +++ b/packages/migrations/0004_alter_package_options_alter_packagecategory_options_and_more.py @@ -0,0 +1,25 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0003_auto_20250207_1746'), + ] + + operations = [ + migrations.AlterModelOptions( + name='package', + options={'ordering': ['name', 'epoch', 'version', 'release', 'arch']}, + ), + migrations.AlterModelOptions( + name='packagecategory', + options={'ordering': ['name'], 'verbose_name': 'Package Category', 'verbose_name_plural': 'Package Categories'}, + ), + migrations.AlterModelOptions( + name='packagename', + options={'ordering': ['name'], 'verbose_name': 'Package', 'verbose_name_plural': 'Packages'}, + ), + ] diff --git a/packages/migrations/0005_alter_package_packagetype.py b/packages/migrations/0005_alter_package_packagetype.py new file mode 100644 index 00000000..06a0ecec --- /dev/null +++ b/packages/migrations/0005_alter_package_packagetype.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-10 17:36 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('packages', '0004_alter_package_options_alter_packagecategory_options_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='package', + name='packagetype', + field=models.CharField(blank=True, choices=[('R', 'rpm'), ('D', 'deb'), ('A', 'pkgbuild'), ('G', 'ebuild'), ('U', 'unknown')], max_length=1, null=True), + ), + ] diff --git a/packages/models.py b/packages/models.py index 560d0efb..74a83c0c 100644 --- a/packages/models.py +++ b/packages/models.py @@ -24,7 +24,7 @@ from rpm import labelCompare from debian.debian_support import Version, version_compare -from arch.models import PackageArchitecture, MachineArchitecture +from arch.models import PackageArchitecture from packages.managers import PackageManager @@ -35,13 +35,26 @@ class PackageName(models.Model): class Meta: verbose_name = 'Package' verbose_name_plural = 'Packages' - ordering = ('name',) + ordering = ['name'] def __str__(self): return self.name def get_absolute_url(self): - return reverse('packages:package_detail', args=[self.name]) + return reverse('packages:package_name_detail', args=[self.name]) + + +class PackageCategory(models.Model): + + name = models.CharField(unique=True, max_length=255) + + class Meta: + verbose_name = 'Package Category' + verbose_name_plural = 'Package Categories' + ordering = ['name'] + + def __str__(self): + return self.name class Package(models.Model): @@ -49,12 +62,14 @@ class Package(models.Model): RPM = 'R' DEB = 'D' ARCH = 'A' + GENTOO = 'G' UNKNOWN = 'U' PACKAGE_TYPES = ( (RPM, 'rpm'), (DEB, 'deb'), - (ARCH, 'arch'), + (ARCH, 'pkgbuild'), + (GENTOO, 'ebuild'), (UNKNOWN, 'unknown'), ) @@ -63,37 +78,40 @@ class Package(models.Model): version = models.CharField(max_length=255) release = models.CharField(max_length=255, blank=True, null=True) arch = models.ForeignKey(PackageArchitecture, on_delete=models.CASCADE) - packagetype = models.CharField(max_length=1, - choices=PACKAGE_TYPES, - blank=True, - null=True) + packagetype = models.CharField(max_length=1, choices=PACKAGE_TYPES, blank=True, null=True) + category = models.ForeignKey(PackageCategory, blank=True, null=True, on_delete=models.SET_NULL) description = models.TextField(blank=True, null=True) url = models.URLField(max_length=255, blank=True, null=True) objects = PackageManager() class Meta: - ordering = ('name', 'epoch', 'version', 'release', 'arch') - unique_together = ( - 'name', 'epoch', 'version', 'release', 'arch', 'packagetype',) + ordering = ['name', 'epoch', 'version', 'release', 'arch'] + unique_together = ['name', 'epoch', 'version', 'release', 'arch', 'packagetype', 'category'] def __str__(self): if self.epoch: - epo = f'{self.epoch!s}:' + epo = f'{self.epoch}:' else: epo = '' if self.release: - rel = f'-{self.release!s}' + rel = f'-{self.release}' else: rel = '' - return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + if self.packagetype == self.GENTOO: + return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype in [self.DEB, self.ARCH]: + return f'{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype == self.RPM: + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' + else: + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' def get_absolute_url(self): - return self.name.get_absolute_url() + return reverse('packages:package_detail', args=[self.id]) def __key(self): - return (self.name, self.epoch, self.version, self.release, self.arch, - self.packagetype) + return (self.name, self.epoch, self.version, self.release, self.arch, self.packagetype, self.category) def __eq__(self, other): return self.__key() == other.__key() @@ -122,7 +140,7 @@ def _version_string_deb_arch(self): return (epoch + version + release) def get_version_string(self): - if self.packagetype == 'R': + if self.packagetype == 'R' or self.packagetype == 'G': return self._version_string_rpm() elif self.packagetype == 'D' or self.packagetype == 'A': return self._version_string_deb_arch() @@ -143,6 +161,9 @@ def compare_version(self, other): vs = Version(self.get_version_string()) vo = Version(other.get_version_string()) return version_compare(vs, vo) + elif self.packagetype == 'G' and other.packagetype == 'G': + return labelCompare(self.get_version_string(), + other.get_version_string()) def repo_count(self): from repos.models import Repository @@ -152,32 +173,39 @@ def repo_count(self): class PackageString(models.Model): - class Meta: - managed = False - name = models.CharField(max_length=255) version = models.CharField(max_length=255) epoch = models.CharField(max_length=255, blank=True, null=True) release = models.CharField(max_length=255, blank=True, null=True) arch = models.CharField(max_length=255) packagetype = models.CharField(max_length=1, blank=True, null=True) + category = models.CharField(max_length=255, blank=True, null=True) description = models.TextField(blank=True, null=True) url = models.URLField(max_length=255, blank=True, null=True) + class Meta: + managed = False + def __str__(self): if self.epoch: - epo = f'{self.epoch!s}:' + epo = f'{self.epoch}:' else: epo = '' if self.release: - rel = f'-{self.release!s}' + rel = f'-{self.release}' else: rel = '' - return f'{self.name!s}-{epo!s}{self.version!s}{rel!s}-{self.arch!s}' + if self.packagetype == Package.GENTOO: + return f'{self.category}/{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype in [Package.DEB, Package.ARCH]: + return f'{self.name}_{epo}{self.version}{rel}_{self.arch}.{self.get_packagetype_display()}' + elif self.packagetype == Package.RPM: + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' + else: + return f'{self.name}-{epo}{self.version}{rel}-{self.arch}.{self.get_packagetype_display()}' def __key(self): - return (self.name, self.epoch, self.version, self.release, self.arch, - self.packagetype) + return (self.name, self.epoch, self.version, self.release, self.arch, self.packagetype, self.category) def __eq__(self, other): return self.__key() == other.__key() @@ -193,52 +221,16 @@ def __hash__(self): class PackageUpdate(models.Model): - oldpackage = models.ForeignKey(Package, - on_delete=models.CASCADE, - related_name='oldpackage') - newpackage = models.ForeignKey(Package, - on_delete=models.CASCADE, - related_name='newpackage') + oldpackage = models.ForeignKey(Package, on_delete=models.CASCADE, related_name='oldpackage') + newpackage = models.ForeignKey(Package, on_delete=models.CASCADE, related_name='newpackage') security = models.BooleanField(default=False) class Meta: - unique_together = (('oldpackage', 'newpackage', 'security')) + unique_together = ['oldpackage', 'newpackage', 'security'] def __str__(self): if self.security: update_type = 'Security' else: update_type = 'Bugfix' - return f'{self.oldpackage!s} -> {self.newpackage!s} ({update_type!s})' - - -class ErratumReference(models.Model): - - url = models.URLField(max_length=255) - - def __str__(self): - return self.url - - -class Erratum(models.Model): - - name = models.CharField(max_length=255) - etype = models.CharField(max_length=255) - issue_date = models.DateTimeField() - synopsis = models.CharField(max_length=255) - packages = models.ManyToManyField(Package, blank=True) - arches = models.ManyToManyField(MachineArchitecture, blank=True) - from operatingsystems.models import OSGroup - releases = models.ManyToManyField(OSGroup, blank=True) - references = models.ManyToManyField(ErratumReference, blank=True) - - class Meta: - verbose_name = 'Erratum' - verbose_name_plural = 'Errata' - - def __str__(self): - text = f'{self.name!s} {self.issue_date!s} ({self.etype!s}) : ' - text += f'{self.packages.count()!s} packages, ' - text += f'{self.arches.count()!s} arches, ' - text += f'{self.releases.count()!s} releases' - return text + return f'{self.oldpackage} -> {self.newpackage} ({update_type})' diff --git a/packages/serializers.py b/packages/serializers.py index 8664a40f..902cb3e0 100644 --- a/packages/serializers.py +++ b/packages/serializers.py @@ -16,8 +16,7 @@ from rest_framework import serializers -from packages.models import PackageName, Package, PackageUpdate, \ - Erratum, ErratumReference +from packages.models import PackageName, Package, PackageUpdate class PackageNameSerializer(serializers.HyperlinkedModelSerializer): @@ -36,16 +35,3 @@ class PackageUpdateSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = PackageUpdate fields = ('id', 'oldpackage', 'newpackage', 'security') - - -class ErratumSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = Erratum - fields = ('id', 'name', 'etype', 'issue_date', 'synopsis', 'arches', - 'releases', 'references') - - -class ErratumReferenceSerializer(serializers.HyperlinkedModelSerializer): - class Meta: - model = ErratumReference - fields = ('id', 'url') diff --git a/packages/templates/packages/package_detail.html b/packages/templates/packages/package_detail.html index 631ac113..a982c8af 100644 --- a/packages/templates/packages/package_detail.html +++ b/packages/templates/packages/package_detail.html @@ -9,30 +9,28 @@ {% block content %}
    - {% if allversions %} - - - - - - - - - - - {% for version in allversions %} - - - - - - - - {% endfor %} -
    EpochVersionReleaseArchTypeRepositoriesHosts
    {{ version.epoch }} {{ version.version }} {{ version.release }} {{ version.arch }} {{ version.get_packagetype_display }} Available from {{ version.repo_count }} Repositories Installed on {{ version.host_set.count }} Hosts
    - {% else %} - No versions of this Package exist. - {% endif %} + + + + + + + + + + + + + + + + + + + + + +
    EpochVersionReleaseArchTypeRepositoriesHostsErrata
    {{ package.epoch }} {{ package.version }} {{ package.release }} {{ package.arch }} {{ package.get_packagetype_display }} Available from {{ package.repo_count }} Repositories Installed on {{ package.host_set.count }} Hosts Affected by {{ package.affected_by_erratum.count }} Errata Provides fix in {{ package.provides_fix_in_erratum.count }} Errata
    See All Versions of this Package
    {% endblock %} diff --git a/packages/templates/packages/package_name_detail.html b/packages/templates/packages/package_name_detail.html new file mode 100644 index 00000000..5cef00ec --- /dev/null +++ b/packages/templates/packages/package_name_detail.html @@ -0,0 +1,43 @@ +{% extends "base.html" %} + +{% block page_title %}Package - {{ package }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Packages
  • {{ package }}
  • {% endblock %} + +{% block content_title %} Package - {{ package }} {% endblock %} + +{% block content %} + +
    + {% if allversions %} + + + + + + + + + + + + + {% for version in allversions %} + + + + + + + + + + + {% endfor %} +
    PackageEpochVersionReleaseArchTypeRepositoriesHostsErrata
    {{ version }} {{ version.epoch }} {{ version.version }} {{ version.release }} {{ version.arch }} {{ version.get_packagetype_display }} Available from {{ version.repo_count }} Repositories Installed on {{ version.host_set.count }} Hosts Affected by {{ version.affected_by_erratum.count }} Errata Provides fix in {{ version.provides_fix_in_erratum.count }} Errata
    + {% else %} + No versions of this Package exist. + {% endif %} +
    + +{% endblock %} diff --git a/packages/templates/packages/package_name_list.html b/packages/templates/packages/package_name_list.html new file mode 100644 index 00000000..7288b519 --- /dev/null +++ b/packages/templates/packages/package_name_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}Packages{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Packages
  • {% endblock %} + +{% block content_title %} Packages {% endblock %} diff --git a/packages/templates/packages/package_name_table.html b/packages/templates/packages/package_name_table.html new file mode 100644 index 00000000..39977d96 --- /dev/null +++ b/packages/templates/packages/package_name_table.html @@ -0,0 +1,17 @@ +{% load common %} + + + + + + + + + {% for packagename in object_list %} + + + + + {% endfor %} + +
    PackageVersions available
    {{ packagename }}{{ packagename.package_set.count }}
    diff --git a/packages/templates/packages/package_table.html b/packages/templates/packages/package_table.html index 39977d96..06316521 100644 --- a/packages/templates/packages/package_table.html +++ b/packages/templates/packages/package_table.html @@ -2,15 +2,28 @@ - - + + + + + + + + + - {% for packagename in object_list %} + {% for package in object_list %} - - + + + + + + + + {% endfor %} diff --git a/packages/urls.py b/packages/urls.py index c2f136dc..bc027807 100644 --- a/packages/urls.py +++ b/packages/urls.py @@ -22,6 +22,9 @@ app_name = 'packages' urlpatterns = [ - path('', views.package_list, name='package_list'), - path('/', views.package_detail, name='package_detail'), + path('', views.package_name_list, name='package_name_list'), + path('name/', views.package_name_list, name='package_name_list'), + path('name//', views.package_name_detail, name='package_name_detail'), + path('id/', views.package_list, name='package_list'), + path('id//', views.package_detail, name='package_detail'), ] diff --git a/packages/utils.py b/packages/utils.py index cb408993..9b098225 100644 --- a/packages/utils.py +++ b/packages/utils.py @@ -16,17 +16,62 @@ # along with Patchman. If not, see import re -from defusedxml.lxml import _etree as etree -from django.conf import settings from django.core.exceptions import MultipleObjectsReturned -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError, transaction -from util import bunzip2, get_url, download_url, get_sha1 -from packages.models import ErratumReference, PackageName, \ - Package, PackageUpdate -from arch.models import MachineArchitecture, PackageArchitecture -from patchman.signals import error_message, progress_info_s, progress_update_s +from arch.models import PackageArchitecture +from packages.models import PackageName, Package, PackageUpdate, PackageCategory, PackageString +from patchman.signals import error_message, info_message, warning_message + + +def convert_package_to_packagestring(package): + """ Convert a Package object to a PackageString object + """ + name = package.name.name + arch = package.arch.name + if package.category: + category = package.category.name + else: + category = None + + string_package = PackageString( + name=name, + epoch=package.epoch, + version=package.version, + release=package.release, + arch=arch, + packagetype=package.packagetype, + category=category, + ) + return string_package + + +def convert_packagestring_to_package(strpackage): + """ Convert a PackageString object to a Package object + """ + name, created = PackageName.objects.get_or_create(name=strpackage.name.lower()) + epoch = strpackage.epoch + version = strpackage.version + release = strpackage.release + arch, created = PackageArchitecture.objects.get_or_create(name=strpackage.arch) + packagetype = strpackage.packagetype + if strpackage.category: + category, created = PackageCategory.objects.get_or_create(name=strpackage.category) + else: + category = None + + with transaction.atomic(): + package, created = Package.objects.get_or_create( + name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype=packagetype, + category=category, + ) + return package def find_evr(s): @@ -62,27 +107,36 @@ def find_version(s, epoch, release): """ Given a package version string, return the version """ try: - es = f'{epoch!s}:' + es = f'{epoch}:' e = s.index(es) + len(epoch) + 1 except ValueError: e = 0 try: - rs = f'-{release!s}' + rs = f'-{release}' r = s.index(rs) except ValueError: r = len(s) return s[e:r] -def parse_package_string(pkg_str): - """ Parse a package string and return - name, epoch, ver, release, dist, arch +def parse_debian_package_string(pkg_str): + """ Parse a debian package string and return + name, epoch, ver, release, arch """ + parts = pkg_str.split('_') + name = parts[0] + full_version = parts[1] + arch = parts[2] + epoch, ver, rel = find_evr(full_version) + return name, epoch, ver, rel, None, arch + - for suffix in ['rpm', 'deb']: - pkg_str = re.sub(f'.{suffix}$', '', pkg_str) - pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa - m = pkg_re.match(pkg_str) +def parse_redhat_package_string(pkg_str): + """ Parse a redhat package string and return + name, epoch, ver, release, dist, arch + """ + rpm_pkg_re = re.compile(r'(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa + m = rpm_pkg_re.match(pkg_str) if m: name, epoch, ver, rel, dist, arch = m.groups() else: @@ -94,169 +148,22 @@ def parse_package_string(pkg_str): return name, epoch, ver, rel, dist, arch -def update_errata(force=False): - """ Update CentOS errata from https://cefs.steve-meier.de/ - and mark packages that are security updates - """ - data = download_errata_checksum() - expected_checksum = parse_errata_checksum(data) - data = download_errata() - actual_checksum = get_sha1(data) - if actual_checksum != expected_checksum: - e = 'CEFS checksum did not match, skipping errata parsing' - error_message.send(sender=None, text=e) - else: - if data: - parse_errata(bunzip2(data), force) - - -def download_errata_checksum(): - """ Download CentOS errata checksum from https://cefs.steve-meier.de/ - """ - res = get_url('https://cefs.steve-meier.de/errata.latest.sha1') - return download_url(res, 'Downloading Errata Checksum:') - - -def download_errata(): - """ Download CentOS errata from https://cefs.steve-meier.de/ - """ - res = get_url('https://cefs.steve-meier.de/errata.latest.xml.bz2') - return download_url(res, 'Downloading CentOS Errata:') - - -def parse_errata_checksum(data): - """ Parse the errata checksum and return the bz2 checksum - """ - for line in data.decode('utf-8').splitlines(): - if line.endswith('errata.latest.xml.bz2'): - return line.split()[0] - - -def parse_errata(data, force): - """ Parse CentOS errata from https://cefs.steve-meier.de/ - """ - result = etree.XML(data) - errata_xml = result.findall('*') - elen = len(errata_xml) - ptext = f'Processing {elen!s} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) - for i, child in enumerate(errata_xml): - progress_update_s.send(sender=None, index=i + 1) - if not check_centos_release(child.findall('os_release')): - continue - e = parse_errata_tag(child.tag, child.attrib, force) - if e is not None: - parse_errata_children(e, child.getchildren()) - - -def parse_errata_tag(name, attribs, force): - """ Parse all tags that contain errata. If the erratum already exists, - we assume that it already has all refs, packages, releases and arches. - """ - e = None - if name.startswith('CE'): - issue_date = attribs['issue_date'] - references = attribs['references'] - synopsis = attribs['synopsis'] - if name.startswith('CEBA'): - etype = 'bugfix' - elif name.startswith('CESA'): - etype = 'security' - elif name.startswith('CEEA'): - etype = 'enhancement' - e = create_erratum(name=name, - etype=etype, - issue_date=issue_date, - synopsis=synopsis, - force=force) - if e is not None: - add_erratum_refs(e, references) - return e - - -def parse_errata_children(e, children): - """ Parse errata children to obtain architecture, release and packages - """ - for c in children: - if c.tag == 'os_arch': - m_arches = MachineArchitecture.objects.all() - with transaction.atomic(): - m_arch, c = m_arches.get_or_create(name=c.text) - e.arches.add(m_arch) - elif c.tag == 'os_release': - from operatingsystems.models import OSGroup - osgroups = OSGroup.objects.all() - osgroup_name = f'CentOS {c.text!s}' - with transaction.atomic(): - osgroup, c = osgroups.get_or_create(name=osgroup_name) - e.releases.add(osgroup) - elif c.tag == 'packages': - pkg_str = c.text.replace('.rpm', '') - pkg_re = re.compile('(\S+)-(?:(\d*):)?(.*)-(~?\w+)[.+]?(~?\S+)?\.(\S+)$') # noqa - m = pkg_re.match(pkg_str) - if m: - name, epoch, ver, rel, dist, arch = m.groups() - else: - e = 'Error parsing errata: ' - e += f'could not parse package "{pkg_str!s}"' - error_message.send(sender=None, text=e) - continue - if dist: - rel = f'{rel!s}.{dist!s}' - p_type = Package.RPM - pkg = get_or_create_package(name, epoch, ver, rel, arch, p_type) - e.packages.add(pkg) - - -def check_centos_release(releases_xml): - """ Check if we care about the release that the erratum affects +def parse_package_string(pkg_str): + """ Parse a package string and return + name, epoch, ver, release, dist, arch """ - releases = set() - for release in releases_xml: - releases.add(int(release.text)) - if hasattr(settings, 'MIN_CENTOS_RELEASE') and \ - isinstance(settings.MIN_CENTOS_RELEASE, int): - min_release = settings.MIN_CENTOS_RELEASE + if pkg_str.endswith('.deb'): + return parse_debian_package_string(pkg_str.removesuffix('.deb')) + elif pkg_str.endswith('.rpm'): + return parse_redhat_package_string(pkg_str.removesuffix('.rpm')) else: - # defaults to CentOS 6 - min_release = 6 - wanted_release = False - for release in releases: - if release >= min_release: - wanted_release = True - return wanted_release - - -def create_erratum(name, etype, issue_date, synopsis, force=False): - """ Create an Erratum object. Returns the object or None if it already - exists. To force update the erratum, set force=True - """ - from packages.models import Erratum - errata = Erratum.objects.all() - with transaction.atomic(): - e, c = errata.get_or_create(name=name, - etype=etype, - issue_date=issue_date, - synopsis=synopsis) - if c or force: - return e - - -def add_erratum_refs(e, references): - """ Add references to an Erratum object - """ - for reference in references.split(' '): - erratarefs = ErratumReference.objects.all() - with transaction.atomic(): - er, c = erratarefs.get_or_create(url=reference) - e.references.add(er) + return parse_redhat_package_string(pkg_str) def get_or_create_package(name, epoch, version, release, arch, p_type): """ Get or create a Package object. Returns the object. Returns None if the package is the pseudo package gpg-pubkey, or if it cannot create it """ - package = None name = name.lower() if name == 'gpg-pubkey': return @@ -264,45 +171,32 @@ def get_or_create_package(name, epoch, version, release, arch, p_type): if epoch in [None, 0, '0']: epoch = '' - try: - with transaction.atomic(): - package_names = PackageName.objects.all() - p_name, c = package_names.get_or_create(name=name) - except IntegrityError as e: - error_message.send(sender=None, text=e) - p_name = package_names.get(name=name) - except DatabaseError as e: - error_message.send(sender=None, text=e) - - package_arches = PackageArchitecture.objects.all() + package_name, c = PackageName.objects.get_or_create(name=name) + package_arch, c = PackageArchitecture.objects.get_or_create(name=arch) with transaction.atomic(): - p_arch, c = package_arches.get_or_create(name=arch) - - packages = Package.objects.all() - potential_packages = packages.filter( - name=p_name, - arch=p_arch, - version=version, - release=release, - packagetype=p_type, - ).order_by('-epoch') - if potential_packages.exists(): - package = potential_packages[0] - if epoch and package.epoch != epoch: - package.epoch = epoch - with transaction.atomic(): - package.save() - else: try: - with transaction.atomic(): - package = packages.create(name=p_name, - arch=p_arch, - epoch=epoch, - version=version, - release=release, - packagetype=p_type) - except DatabaseError as e: - error_message.send(sender=None, text=e) + package, c = Package.objects.get_or_create( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) + except MultipleObjectsReturned: + packages = Package.objects.filter( + name=package_name, + arch=package_arch, + epoch=epoch, + version=version, + release=release, + packagetype=p_type, + ) + package = packages.first() + # TODO this should handle gentoo package categories too, otherwise we may be deleting packages + # that should be kept + warning_message.send(sender=None, text=f'Deleting duplicate packages: {packages.exclude(id=package.id)}') + packages.exclude(id=package.id).delete() return package @@ -310,7 +204,6 @@ def get_or_create_package_update(oldpackage, newpackage, security): """ Get or create a PackageUpdate object. Returns the object. Returns None if it cannot be created """ - updates = PackageUpdate.objects.all() # see if any version of this update exists # if it's already marked as a security update, leave it that way # if not, mark it as a security update if security==True @@ -319,72 +212,131 @@ def get_or_create_package_update(oldpackage, newpackage, security): # very likely to happen. if it does, we err on the side of caution # and mark it as the security update try: - update = updates.get( - oldpackage=oldpackage, - newpackage=newpackage - ) + update = PackageUpdate.objects.get(oldpackage=oldpackage, newpackage=newpackage) except PackageUpdate.DoesNotExist: update = None except MultipleObjectsReturned: e = 'Error: MultipleObjectsReturned when attempting to add package \n' e += f'update with oldpackage={oldpackage} | newpackage={newpackage}:' error_message.send(sender=None, text=e) - updates = updates.filter( - oldpackage=oldpackage, - newpackage=newpackage - ) + updates = PackageUpdate.objects.filter(oldpackage=oldpackage, newpackage=newpackage) for update in updates: - e = str(update) - error_message.send(sender=None, text=e) + error_message.send(sender=None, text=str(update)) return try: if update: if security and not update.security: update.security = True - with transaction.atomic(): - update.save() + update.save() else: - with transaction.atomic(): - update, c = updates.get_or_create( - oldpackage=oldpackage, - newpackage=newpackage, - security=security) - except IntegrityError as e: - error_message.send(sender=None, text=e) - update = updates.get(oldpackage=oldpackage, - newpackage=newpackage, - security=security) - except DatabaseError as e: - error_message.send(sender=None, text=e) + update, c = PackageUpdate.objects.get_or_create( + oldpackage=oldpackage, + newpackage=newpackage, + security=security, + ) + except IntegrityError: + update = PackageUpdate.objects.get(oldpackage=oldpackage, newpackage=newpackage, security=security) return update -def mark_errata_security_updates(): - """ For each set of erratum packages, modify any PackageUpdate that - should be marked as a security update. +def get_matching_packages(name, epoch, version, release, p_type, arch=None): + """ Get packages matching the name, epoch, version, release, and package_type + Arch can be omitted if unknown + Returns the matching packages or an empty list """ - package_updates = PackageUpdate.objects.all() - from packages.models import Erratum - errata = Erratum.objects.all() - elen = Erratum.objects.count() - ptext = f'Scanning {elen!s} Errata:' - progress_info_s.send(sender=None, ptext=ptext, plen=elen) - for i, erratum in enumerate(errata): - progress_update_s.send(sender=None, index=i + 1) - if erratum.etype == 'security': - for package in erratum.packages.all(): - affected_updates = package_updates.filter( - newpackage=package, - security=False - ) - for affected_update in affected_updates: - if not affected_update.security: - affected_update.security = True - try: - with transaction.atomic(): - affected_update.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - # a version of this update already exists that is - # marked as a security update, so delete this one - affected_update.delete() + try: + package_name = PackageName.objects.get(name=name) + except PackageName.DoesNotExist: + return [] + if arch: + if not isinstance(arch, PackageArchitecture): + try: + arch = PackageArchitecture.objects.get_or_create(name=arch) + except PackageArchitecture.DoesNotExist: + return [] + packages = Package.objects.filter( + epoch=epoch, + name=package_name, + version=version, + release=release, + arch=arch, + packagetype=p_type, + ) + return packages + else: + packages = Package.objects.filter( + epoch=epoch, + name=package_name, + version=version, + release=release, + packagetype=p_type, + ) + return packages + + +def clean_packageupdates(): + """ Removes PackageUpdate objects that are no longer linked to any hosts + """ + package_updates = list(PackageUpdate.objects.all()) + for update in package_updates: + if update.host_set.count() == 0: + text = f'Removing unused PackageUpdate {update}' + info_message.send(sender=None, text=text) + update.delete() + for duplicate in package_updates: + if update.oldpackage == duplicate.oldpackage and update.newpackage == duplicate.newpackage and \ + update.security == duplicate.security and update.id != duplicate.id: + text = f'Removing duplicate PackageUpdate: {update}' + info_message.send(sender=None, text=text) + for host in duplicate.host_set.all(): + host.updates.remove(duplicate) + host.updates.add(update) + duplicate.delete() + + +def clean_packages(remove_duplicates=False): + """ Remove packages that are no longer in use + Optionally check for duplicate packages and remove those too + """ + packages = Package.objects.filter( + mirror__isnull=True, + host__isnull=True, + affected_by_erratum__isnull=True, + provides_fix_in_erratum__isnull=True, + module__isnull=True, + ) + plen = packages.count() + if plen == 0: + info_message.send(sender=None, text='No orphaned Packages found.') + else: + info_message.send(sender=None, text=f'Removing {plen} orphaned Packages') + packages.delete() + if remove_duplicates: + info_message.send(sender=None, text='Checking for duplicate Packages...') + for package in Package.objects.all(): + potential_duplicates = Package.objects.filter( + name=package.name, + arch=package.arch, + epoch=package.epoch, + version=package.version, + release=package.release, + packagetype=package.packagetype, + category=package.category, + ) + if potential_duplicates.count() > 1: + for dupe in potential_duplicates: + if dupe.id != package.id: + info_message.send(sender=None, text=f'Removing duplicate Package {dupe}') + dupe.delete() + + +def clean_packagenames(): + """ Remove package names that are no longer in use + """ + names = PackageName.objects.filter(package__isnull=True) + nlen = names.count() + if nlen == 0: + info_message.send(sender=None, text='No orphaned PackageNames found.') + else: + info_message.send(sender=None, text=f'Removing {nlen} orphaned PackageNames') + names.delete() diff --git a/packages/views.py b/packages/views.py index f0ff7ff0..cd53fa6e 100644 --- a/packages/views.py +++ b/packages/views.py @@ -23,26 +23,107 @@ from rest_framework import viewsets from util.filterspecs import Filter, FilterBar -from packages.models import PackageName, Package, PackageUpdate, \ - Erratum, ErratumReference +from packages.models import PackageName, Package, PackageUpdate from arch.models import PackageArchitecture -from packages.serializers import PackageNameSerializer, \ - PackageSerializer, PackageUpdateSerializer, ErratumSerializer, \ - ErratumReferenceSerializer +from packages.serializers import PackageNameSerializer, PackageSerializer, PackageUpdateSerializer @login_required def package_list(request): + packages = Package.objects.select_related() - packages = PackageName.objects.select_related().prefetch_related('package_set') + if 'arch_id' in request.GET: + packages = packages.filter(arch=request.GET['arch_id']).distinct() - if 'arch' in request.GET: - packages = packages.filter( - package__arch=int(request.GET['arch'])).distinct() + if 'packagetype' in request.GET: + packages = packages.filter(packagetype=request.GET['packagetype']).distinct() + + if 'erratum_id' in request.GET: + if request.GET['type'] == 'affected': + packages = packages.filter(affected_by_erratum=request.GET['erratum_id']).distinct() + elif request.GET['type'] == 'fixed': + packages = packages.filter(provides_fix_in_erratum=request.GET['erratum_id']).distinct() + + if 'host' in request.GET: + packages = packages.filter(host__hostname=request.GET['host']).distinct() + + if 'cve_id' in request.GET: + if request.GET['type'] == 'affected': + packages = packages.filter(affected_by_erratum__cves__cve_id=request.GET['cve_id']).distinct() + elif request.GET['type'] == 'fixed': + packages = packages.filter(provides_fix_in_erratum__cves__cve_id=request.GET['cve_id']).distinct() + + if 'mirror_id' in request.GET: + packages = packages.filter(mirror=request.GET['mirror_id']).distinct() + + if 'module_id' in request.GET: + packages = packages.filter(module=request.GET['module_id']).distinct() + + if 'affected_by_errata' in request.GET: + affected_by_errata = request.GET['affected_by_errata'] == 'true' + if affected_by_errata: + packages = packages.filter(erratum__isnull=False) + else: + packages = packages.filter(erratum__isnull=True) + + if 'installed_on_hosts' in request.GET: + installed_on_hosts = request.GET['installed_on_hosts'] == 'true' + if installed_on_hosts: + packages = packages.filter(host__isnull=False) + else: + packages = packages.filter(host__isnull=True) + + if 'available_in_repos' in request.GET: + available_in_repos = request.GET['available_in_repos'] == 'true' + if available_in_repos: + packages = packages.filter(mirror__isnull=False) + else: + packages = packages.filter(mirror__isnull=True) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(name__name__icontains=term) + query = query & q + packages = packages.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(packages, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append(Filter(request, 'Affected by Errata', 'affected_by_errata', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Installed on Hosts', 'installed_on_hosts', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Available in Repos', 'available_in_repos', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) + filter_list.append(Filter(request, 'Architecture', 'arch_id', PackageArchitecture.objects.all())) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'packages/package_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) + + +@login_required +def package_name_list(request): + packages = PackageName.objects.select_related() + + if 'arch_id' in request.GET: + packages = packages.filter(package__arch=request.GET['arch_id']).distinct() if 'packagetype' in request.GET: - packages = packages.filter( - package__packagetype=request.GET['packagetype']).distinct() + packages = packages.filter(package__packagetype=request.GET['packagetype']).distinct() if 'search' in request.GET: terms = request.GET['search'].lower() @@ -65,32 +146,34 @@ def package_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append( - Filter(request, 'arch', PackageArchitecture.objects.all())) -# Disabled due to being a huge slowdown -# filter_list.append( -# Filter( -# request, 'packagetype', -# Package.objects.values_list('packagetype', flat=True).distinct())) + filter_list.append(Filter(request, 'Package Type', 'packagetype', Package.PACKAGE_TYPES)) + filter_list.append(Filter(request, 'Architecture', 'arch_id', PackageArchitecture.objects.all())) filter_bar = FilterBar(request, filter_list) return render(request, - 'packages/package_list.html', + 'packages/package_name_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms, + 'table_template': 'packages/package_name_table.html'}) @login_required -def package_detail(request, packagename): +def package_detail(request, package_id): + package = get_object_or_404(Package, id=package_id) + return render(request, + 'packages/package_detail.html', + {'package': package}) + +@login_required +def package_name_detail(request, packagename): package = get_object_or_404(PackageName, name=packagename) allversions = Package.objects.select_related().filter(name=package.id) - return render(request, - 'packages/package_detail.html', + 'packages/package_name_detail.html', {'package': package, - 'allversions': allversions}, ) + 'allversions': allversions}) class PackageNameViewSet(viewsets.ModelViewSet): @@ -125,19 +208,3 @@ class PackageUpdateViewSet(viewsets.ModelViewSet): queryset = PackageUpdate.objects.all() serializer_class = PackageUpdateSerializer filterset_fields = ['oldpackage', 'newpackage', 'security'] - - -class ErratumViewSet(viewsets.ModelViewSet): - """ - API endpoint that allows errata to be viewed or edited. - """ - queryset = Erratum.objects.all() - serializer_class = ErratumSerializer - - -class ErratumReferenceViewSet(viewsets.ModelViewSet): - """ - API endpoint that allows erratum references to be viewed or edited. - """ - queryset = ErratumReference.objects.all() - serializer_class = ErratumReferenceSerializer diff --git a/patchman-client.spec b/patchman-client.spec index 8aeef6fc..68736038 100644 --- a/patchman-client.spec +++ b/patchman-client.spec @@ -8,7 +8,9 @@ License: GPLv3 URL: http://patchman.openbytes.ie Source: %{expand:%%(pwd)} BuildArch: noarch -Requires: curl which coreutils util-linux +Requires: curl which coreutils util-linux gawk + +%define binary_payload w9.gzdio %description patchman-client provides a client that uploads reports to a patchman server diff --git a/patchman/__init__.py b/patchman/__init__.py index 34f6f97e..af122cc6 100644 --- a/patchman/__init__.py +++ b/patchman/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2013-2021 Marcus Furlong +# Copyright 2013-2025 Marcus Furlong # # This file is part of Patchman. # @@ -15,3 +15,9 @@ # along with Patchman. If not, see from .receivers import * # noqa + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery import app as celery_app + +__all__ = ('celery_app',) diff --git a/patchman/celery.py b/patchman/celery.py index f75dd1f0..3c58edc5 100644 --- a/patchman/celery.py +++ b/patchman/celery.py @@ -20,10 +20,6 @@ os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') # noqa from django.conf import settings # noqa - app = Celery('patchman') app.config_from_object('django.conf:settings', namespace='CELERY') -app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) - -if __name__ == '__main__': - app.start() +app.autodiscover_tasks() diff --git a/patchman/receivers.py b/patchman/receivers.py index 9b636017..5ec32cdd 100644 --- a/patchman/receivers.py +++ b/patchman/receivers.py @@ -16,20 +16,20 @@ # along with Patchman. If not, see from colorama import init, Fore, Style +from tqdm import tqdm from django.dispatch import receiver from util import create_pbar, update_pbar, get_verbosity -from patchman.signals import progress_info_s, progress_update_s, \ - info_message, warning_message, error_message, debug_message +from patchman.signals import pbar_start, pbar_update, info_message, warning_message, error_message, debug_message from django.conf import settings init(autoreset=True) -@receiver(progress_info_s) -def progress_info_r(**kwargs): +@receiver(pbar_start) +def pbar_start_receiver(**kwargs): """ Receiver to create a progressbar """ ptext = kwargs.get('ptext') @@ -38,8 +38,8 @@ def progress_info_r(**kwargs): create_pbar(ptext, plen) -@receiver(progress_update_s) -def progress_update_r(**kwargs): +@receiver(pbar_update) +def pbar_update_receiver(**kwargs): """ Receiver to update a progressbar """ index = kwargs.get('index') @@ -53,7 +53,7 @@ def print_info_message(sender=None, **kwargs): """ text = str(kwargs.get('text')) if get_verbosity(): - print(Style.RESET_ALL + Fore.RESET + text) + tqdm.write(Style.RESET_ALL + Fore.RESET + text) @receiver(warning_message) @@ -62,7 +62,7 @@ def print_warning_message(**kwargs): """ text = str(kwargs.get('text')) if get_verbosity(): - print(Style.BRIGHT + Fore.YELLOW + text) + tqdm.write(Style.BRIGHT + Fore.YELLOW + text) @receiver(error_message) @@ -71,7 +71,7 @@ def print_error_message(**kwargs): """ text = str(kwargs.get('text')) if text: - print(Style.BRIGHT + Fore.RED + text) + tqdm.write(Style.BRIGHT + Fore.RED + text) @receiver(debug_message) @@ -80,4 +80,4 @@ def print_debug_message(**kwargs): """ text = str(kwargs.get('text')) if get_verbosity() and settings.DEBUG and text: - print(Style.BRIGHT + Fore.BLUE + text) + tqdm.write(Style.BRIGHT + Fore.BLUE + text) diff --git a/patchman/settings.py b/patchman/settings.py index 4a943a2f..557e8c68 100644 --- a/patchman/settings.py +++ b/patchman/settings.py @@ -59,7 +59,7 @@ TIME_ZONE = 'America/NewYork' USE_I18N = True USE_L10N = True -USE_TZ = False +USE_TZ = True DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' @@ -77,20 +77,24 @@ THIRD_PARTY_APPS = [ 'django_extensions', - 'tagging', + 'taggit', 'bootstrap3', 'rest_framework', 'django_filters', + 'celery', + 'django_celery_beat', ] LOCAL_APPS = [ 'arch.apps.ArchConfig', 'domains.apps.DomainsConfig', + 'errata.apps.ErrataConfig', 'hosts.apps.HostsConfig', 'modules.apps.ModulesConfig', 'operatingsystems.apps.OperatingsystemsConfig', 'packages.apps.PackagesConfig', 'repos.apps.ReposConfig', + 'security.apps.SecurityConfig', 'reports.apps.ReportsConfig', 'util.apps.UtilConfig', ] @@ -102,15 +106,9 @@ 'PAGE_SIZE': 100, } -try: - from celery import Celery # noqa -except ImportError: - USE_ASYNC_PROCESSING = False -else: - THIRD_PARTY_APPS += ['celery'] - CELERY_IMPORTS = ['reports.tasks'] - USE_ASYNC_PROCESSING = True - CELERY_BROKER_URL = 'redis://127.0.0.1:6379/0' +TAGGIT_CASE_INSENSITIVE = True + +CELERY_BROKER_URL = 'redis://127.0.0.1:6379/0' LOGIN_REDIRECT_URL = '/patchman/' LOGOUT_REDIRECT_URL = '/patchman/login/' diff --git a/patchman/signals.py b/patchman/signals.py index e2f967e3..917a48e4 100644 --- a/patchman/signals.py +++ b/patchman/signals.py @@ -17,8 +17,8 @@ from django.dispatch import Signal -progress_info_s = Signal() -progress_update_s = Signal() +pbar_start = Signal() +pbar_update = Signal() info_message = Signal() warning_message = Signal() error_message = Signal() diff --git a/patchman/sqlite3/base.py b/patchman/sqlite3/base.py new file mode 100644 index 00000000..308e0563 --- /dev/null +++ b/patchman/sqlite3/base.py @@ -0,0 +1,11 @@ +# temporary fix for 'database is locked' error on sqlite3 +# can be removed when using django 5.1 and BEGIN IMMEDIATE in OPTIONS +# see https://blog.pecar.me/django-sqlite-dblock for more details + +from django.db.backends.sqlite3 import base + + +class DatabaseWrapper(base.DatabaseWrapper): + def _start_transaction_under_autocommit(self): + # Acquire a write lock immediately for transactions + self.cursor().execute('BEGIN IMMEDIATE') diff --git a/patchman/static/js/expandable-text.js b/patchman/static/js/expandable-text.js new file mode 100644 index 00000000..0f5861ce --- /dev/null +++ b/patchman/static/js/expandable-text.js @@ -0,0 +1,8 @@ +document.addEventListener('DOMContentLoaded', function() { + const expandableTexts = document.querySelectorAll('.expandable-text'); + expandableTexts.forEach(text => { + text.addEventListener('click', function() { + this.textContent = this.dataset.fullText; + }); + }); +}); diff --git a/patchman/urls.py b/patchman/urls.py index c5ffa847..337d6b63 100644 --- a/patchman/urls.py +++ b/patchman/urls.py @@ -25,10 +25,12 @@ from arch import views as arch_views from domains import views as domain_views +from errata import views as errata_views from hosts import views as host_views from operatingsystems import views as os_views from packages import views as package_views from repos import views as repo_views +from security import views as security_views router = routers.DefaultRouter() router.register(r'package-architecture', arch_views.PackageArchitectureViewSet) @@ -36,13 +38,14 @@ router.register(r'domain', domain_views.DomainViewSet) router.register(r'host', host_views.HostViewSet) router.register(r'host-repo', host_views.HostRepoViewSet) -router.register(r'os', os_views.OSViewSet) -router.register(r'os-group', os_views.OSGroupViewSet) +router.register(r'os-variant', os_views.OSVariantViewSet) +router.register(r'os-release', os_views.OSReleaseViewSet) router.register(r'package-name', package_views.PackageNameViewSet) router.register(r'package', package_views.PackageViewSet) router.register(r'package-update', package_views.PackageUpdateViewSet) -router.register(r'erratum', package_views.ErratumViewSet) -router.register(r'erratum-reference', package_views.ErratumReferenceViewSet) +router.register(r'cve', security_views.CVEViewSet) +router.register(r'reference', security_views.ReferenceViewSet), +router.register(r'erratum', errata_views.ErratumViewSet) router.register(r'repo', repo_views.RepositoryViewSet) router.register(r'mirror', repo_views.MirrorViewSet) router.register(r'mirror-package', repo_views.MirrorPackageViewSet) @@ -55,11 +58,13 @@ path('api/', include(router.urls)), path('api-auth/', include('rest_framework.urls', namespace='rest_framework')), # noqa path('', include('util.urls', namespace='util')), + path('errata/', include('errata.urls', namespace='errata')), path('reports/', include('reports.urls', namespace='reports')), path('hosts/', include('hosts.urls', namespace='hosts')), path('packages/', include('packages.urls', namespace='packages')), path('modules/', include('modules.urls', namespace='modules')), path('repos/', include('repos.urls', namespace='repos')), + path('security/', include('security.urls', namespace='security')), path('os/', include('operatingsystems.urls', namespace='operatingsystems')), # noqa ] diff --git a/reports/migrations/0003_remove_report_accessed.py b/reports/migrations/0003_remove_report_accessed.py new file mode 100644 index 00000000..406fc6a1 --- /dev/null +++ b/reports/migrations/0003_remove_report_accessed.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-02-27 04:16 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('reports', '0002_report_modules'), + ] + + operations = [ + migrations.RemoveField( + model_name='report', + name='accessed', + ), + ] diff --git a/reports/migrations/0004_migrate_to_tz_aware.py b/reports/migrations/0004_migrate_to_tz_aware.py new file mode 100644 index 00000000..98176510 --- /dev/null +++ b/reports/migrations/0004_migrate_to_tz_aware.py @@ -0,0 +1,18 @@ +from django.db import migrations +from django.utils import timezone + +def make_datetimes_tz_aware(apps, schema_editor): + Report = apps.get_model('reports', 'Report') + for report in Report.objects.all(): + if report.created and timezone.is_naive(report.created): + report.created = timezone.make_aware(report.created, timezone=timezone.get_default_timezone()) + report.save() + +class Migration(migrations.Migration): + dependencies = [ + ('reports', '0003_remove_report_accessed'), + ] + + operations = [ + migrations.RunPython(make_datetimes_tz_aware, migrations.RunPython.noop), + ] diff --git a/reports/migrations/0005_alter_report_options.py b/reports/migrations/0005_alter_report_options.py new file mode 100644 index 00000000..e2626984 --- /dev/null +++ b/reports/migrations/0005_alter_report_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('reports', '0004_migrate_to_tz_aware'), + ] + + operations = [ + migrations.AlterModelOptions( + name='report', + options={'ordering': ['-created'], 'verbose_name_plural': 'Reports'}, + ), + ] diff --git a/reports/models.py b/reports/models.py index b778f212..6818ea23 100644 --- a/reports/models.py +++ b/reports/models.py @@ -15,22 +15,16 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.db import models, IntegrityError, DatabaseError, transaction +from django.db import models from django.urls import reverse -from hosts.models import Host -from arch.models import MachineArchitecture -from operatingsystems.models import OS -from domains.models import Domain +from hosts.utils import get_or_create_host from patchman.signals import error_message, info_message -from socket import gethostbyaddr, herror - class Report(models.Model): created = models.DateTimeField(auto_now_add=True) - accessed = models.DateTimeField(auto_now_add=True) host = models.CharField(max_length=255, null=True) domain = models.CharField(max_length=255, null=True) tags = models.CharField(max_length=255, null=True, default='') @@ -51,16 +45,17 @@ class Report(models.Model): class Meta: verbose_name_plural = 'Report' verbose_name_plural = 'Reports' - ordering = ('-created',) + ordering = ['-created'] def __str__(self): - return f"{self.host!s} {self.created.strftime('%c')!s}" + return f"{self.host} {self.created.strftime('%c')}" def get_absolute_url(self): return reverse('reports:report_detail', args=[str(self.id)]) def parse(self, data, meta): - + """ Parse a report and save the object + """ x_real_ip = meta.get('HTTP_X_REAL_IP') x_forwarded_for = meta.get('HTTP_X_FORWARDED_FOR') if x_forwarded_for: @@ -96,101 +91,38 @@ def parse(self, data, meta): fqdn = self.host.split('.', 1) if len(fqdn) == 2: self.domain = fqdn.pop() - - with transaction.atomic(): - self.save() + self.save() def process(self, find_updates=True, verbose=False): """ Process a report and extract os, arch, domain, packages, repos etc """ + if not self.os or not self.kernel or not self.arch: + error_message.send(sender=None, text=f'Error: OS, kernel or arch not sent with report {self.id}') + return - if self.os and self.kernel and self.arch and not self.processed: - - oses = OS.objects.all() - with transaction.atomic(): - os, c = oses.get_or_create(name=self.os) - - machine_arches = MachineArchitecture.objects.all() - with transaction.atomic(): - arch, c = machine_arches.get_or_create(name=self.arch) - os.arch = arch - - if not self.domain: - self.domain = 'unknown' - domains = Domain.objects.all() - with transaction.atomic(): - domain, c = domains.get_or_create(name=self.domain) - - if not self.host: - try: - self.host = str(gethostbyaddr(self.report_ip)[0]) - except herror: - self.host = self.report_ip - - hosts = Host.objects.all() - with transaction.atomic(): - host, c = hosts.get_or_create( - hostname=self.host, - defaults={ - 'ipaddress': self.report_ip, - 'arch': arch, - 'os': os, - 'domain': domain, - 'lastreport': self.created, - }) - - host.ipaddress = self.report_ip - host.kernel = self.kernel - host.arch = arch - host.os = os - host.domain = domain - host.lastreport = self.created - host.tags = self.tags - if self.reboot == 'True': - host.reboot_required = True - else: - host.reboot_required = False - try: - with transaction.atomic(): - host.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) - host.check_rdns() + if self.processed: + info_message.send(sender=None, text=f'Report {self.id} has already been processed') + return + + from reports.utils import get_arch, get_os, get_domain + arch = get_arch(self.arch) + osvariant = get_os(self.os, arch) + domain = get_domain(self.domain) + host = get_or_create_host(self, arch, osvariant, domain) + if verbose: + info_message.send(sender=None, text=f'Processing report {self.id} - {self.host}') + + from reports.utils import process_packages, process_repos, process_updates, process_modules + process_repos(report=self, host=host) + process_modules(report=self, host=host) + process_packages(report=self, host=host) + process_updates(report=self, host=host) + + self.processed = True + self.save() + + if find_updates: if verbose: - text = 'Processing report ' - text += f'{self.id!s} - {self.host!s}' - info_message.send(sender=None, text=text) - - from reports.utils import process_packages, \ - process_repos, process_updates, process_modules - with transaction.atomic(): - process_repos(report=self, host=host) - with transaction.atomic(): - process_modules(report=self, host=host) - with transaction.atomic(): - process_packages(report=self, host=host) - with transaction.atomic(): - process_updates(report=self, host=host) - - self.processed = True - with transaction.atomic(): - self.save() - - if find_updates: - if verbose: - text = 'Finding updates for report ' - text += f'{self.id!s} - {self.host!s}' - info_message.send(sender=None, text=text) - host.find_updates() - else: - if self.processed: - text = f'Report {self.id!s} ' - text += 'has already been processed' - info_message.send(sender=None, text=text) - else: - text = 'Error: OS, kernel or arch not sent ' - text += f'with report {self.id!s}' - error_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Finding updates for report {self.id} - {self.host}') + host.find_updates() diff --git a/reports/tasks.py b/reports/tasks.py index d205d7c5..db9e4103 100755 --- a/reports/tasks.py +++ b/reports/tasks.py @@ -15,15 +15,38 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.conf import settings +from celery import shared_task +from django.db.utils import OperationalError + +from hosts.models import Host from reports.models import Report +from util import info_message + + +@shared_task(bind=True, autoretry_for=(OperationalError,), retry_backoff=True, retry_kwargs={'max_retries': 5}) +def process_report(self, report_id): + """ Task to process a single report + """ + report = Report.objects.get(id=report_id) + report.process() + + +@shared_task +def process_reports(): + """ Task to process all unprocessed reports + """ + reports = Report.objects.filter(processed=False) + for report in reports: + process_report.delay(report.id) -if settings.USE_ASYNC_PROCESSING: - from celery import shared_task - from patchman.celery import app # noqa - @shared_task - def process_report(report_id): - report = Report.objects.get(id=report_id) - report.process(verbose=True) +@shared_task +def clean_reports_with_no_hosts(): + """ Task to clean processed reports where the host no longer exists + """ + for report in Report.objects.filter(processed=True): + if not Host.objects.filter(hostname=report.host).exists(): + text = f'Deleting report {report.id} for Host `{report.host}` as the host no longer exists' + info_message.send(sender=None, text=text) + report.delete() diff --git a/reports/urls.py b/reports/urls.py index 56965f52..8826cc82 100644 --- a/reports/urls.py +++ b/reports/urls.py @@ -26,5 +26,5 @@ path('upload/', views.upload), path('/', views.report_detail, name='report_detail'), path('/delete/', views.report_delete, name='report_delete'), - path('/process/', views.report_process, name='report_process'), # noqa + path('/process/', views.report_process, name='report_process'), ] diff --git a/reports/utils.py b/reports/utils.py index 7b39aec5..641f90df 100644 --- a/reports/utils.py +++ b/reports/utils.py @@ -17,17 +17,18 @@ import re -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError -from hosts.models import HostRepo from arch.models import MachineArchitecture, PackageArchitecture +from domains.models import Domain +from hosts.models import HostRepo +from modules.utils import get_or_create_module +from operatingsystems.utils import get_or_create_osrelease, get_or_create_osvariant +from packages.models import Package, PackageCategory +from packages.utils import find_evr, get_or_create_package, get_or_create_package_update, parse_package_string +from patchman.signals import pbar_start, pbar_update, info_message from repos.models import Repository, Mirror, MirrorPackage -from modules.models import Module -from packages.models import Package -from packages.utils import find_evr, get_or_create_package, \ - get_or_create_package_update, parse_package_string -from patchman.signals import progress_info_s, progress_update_s, \ - error_message, info_message +from repos.utils import get_or_create_repo def process_repos(report, host): @@ -38,28 +39,19 @@ def process_repos(report, host): host_repos = HostRepo.objects.filter(host=host) repos = parse_repos(report.repos) - progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]!s} repos', - plen=len(repos)) + pbar_start.send(sender=None, ptext=f'{host} Repos', plen=len(repos)) for i, repo_str in enumerate(repos): repo, priority = process_repo(repo_str, report.arch) if repo: repo_ids.append(repo.id) try: - with transaction.atomic(): - hostrepo, c = host_repos.get_or_create(host=host, - repo=repo) - except IntegrityError as e: - error_message.send(sender=None, text=e) + hostrepo, c = host_repos.get_or_create(host=host, repo=repo) + except IntegrityError: hostrepo = host_repos.get(host=host, repo=repo) - try: - if hostrepo.priority != priority: - hostrepo.priority = priority - with transaction.atomic(): - hostrepo.save() - except IntegrityError as e: - error_message.send(sender=None, text=e) - progress_update_s.send(sender=None, index=i + 1) + if hostrepo.priority != priority: + hostrepo.priority = priority + hostrepo.save() + pbar_update.send(sender=None, index=i + 1) for hostrepo in host_repos: if hostrepo.repo_id not in repo_ids: @@ -73,21 +65,13 @@ def process_modules(report, host): module_ids = [] modules = parse_modules(report.modules) - progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]!s} modules', - plen=len(modules)) + pbar_start.send(sender=None, ptext=f'{host} Modules', plen=len(modules)) for i, module_str in enumerate(modules): module = process_module(module_str) if module: module_ids.append(module.id) - try: - with transaction.atomic(): - host.modules.add(module) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) - progress_update_s.send(sender=None, index=i + 1) + host.modules.add(module) + pbar_update.send(sender=None, index=i + 1) for module in host.modules.all(): if module.id not in module_ids: @@ -101,25 +85,16 @@ def process_packages(report, host): package_ids = [] packages = parse_packages(report.packages) - progress_info_s.send(sender=None, - ptext=f'{str(host)[0:25]!s} packages', - plen=len(packages)) + pbar_start.send(sender=None, ptext=f'{host} Packages', plen=len(packages)) for i, pkg_str in enumerate(packages): package = process_package(pkg_str, report.protocol) if package: package_ids.append(package.id) - try: - with transaction.atomic(): - host.packages.add(package) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + host.packages.add(package) else: if pkg_str[0].lower() != 'gpg-pubkey': - text = f'No package returned for {pkg_str!s}' - info_message.send(sender=None, text=text) - progress_update_s.send(sender=None, index=i + 1) + info_message.send(sender=None, text=f'No package returned for {pkg_str}') + pbar_update.send(sender=None, index=i + 1) for package in host.packages.all(): if package.id not in package_ids: @@ -157,14 +132,12 @@ def add_updates(updates, host): host.updates.remove(host_update) ulen = len(updates) if ulen > 0: - ptext = f'{str(host)[0:25]!s} updates' - progress_info_s.send(sender=None, ptext=ptext, plen=ulen) - + pbar_start.send(sender=None, ptext=f'{host} Updates', plen=ulen) for i, (u, sec) in enumerate(updates.items()): update = process_update(host, u, sec) if update: host.updates.add(update) - progress_update_s.send(sender=None, index=i + 1) + pbar_update.send(sender=None, index=i + 1) def parse_updates(updates_string, security): @@ -174,7 +147,7 @@ def parse_updates(updates_string, security): updates = {} ulist = updates_string.lower().split() while ulist: - name = f'{ulist[0]!s} {ulist[1]!s} {ulist[2]!s}\n' + name = f'{ulist[0]} {ulist[1]} {ulist[2]}\n' del ulist[:3] updates[name] = security return updates @@ -182,7 +155,7 @@ def parse_updates(updates_string, security): def process_update(host, update_string, security): """ Processes a single sanitized update string and converts to an update - object. Only works if the original package exists. Returns None otherwise + object. Only works if the original package exists. Returns None otherwise """ update_str = update_string.split() repo_id = update_str[2] @@ -192,29 +165,26 @@ def process_update(host, update_string, security): p_arch = parts[2] p_epoch, p_version, p_release = find_evr(update_str[1]) - package = get_or_create_package(name=p_name, - epoch=p_epoch, - version=p_version, - release=p_release, - arch=p_arch, - p_type='R') + package = get_or_create_package( + name=p_name, + epoch=p_epoch, + version=p_version, + release=p_release, + arch=p_arch, + p_type=Package.RPM + ) try: repo = Repository.objects.get(repo_id=repo_id) except Repository.DoesNotExist: repo = None if repo: for mirror in repo.mirror_set.all(): - with transaction.atomic(): - MirrorPackage.objects.create(mirror=mirror, package=package) + MirrorPackage.objects.create(mirror=mirror, package=package) - installed_packages = host.packages.filter(name=package.name, - arch=package.arch, - packagetype='R') + installed_packages = host.packages.filter(name=package.name, arch=package.arch, packagetype=Package.RPM) if installed_packages: installed_package = installed_packages[0] - update = get_or_create_package_update(oldpackage=installed_package, - newpackage=package, - security=security) + update = get_or_create_package_update(oldpackage=installed_package, newpackage=package, security=security) return update @@ -223,9 +193,9 @@ def parse_repos(repos_string): """ repos = [] for r in [s for s in repos_string.splitlines() if s]: - repodata = re.findall('\'.*?\'', r) + repodata = re.findall(r"'.*?'", r) for i, rs in enumerate(repodata): - repodata[i] = rs.replace('\'', '') + repodata[i] = rs.replace("'", '') repos.append(repodata) return repos @@ -246,59 +216,50 @@ def process_repo(repo, arch): r_type = Repository.ARCH r_id = repo[2] r_priority = 0 + elif repo[0] == 'gentoo': + r_type = Repository.GENTOO + r_id = repo.pop(2) + r_priority = repo[2] + arch = 'any' if repo[1]: r_name = repo[1] - machine_arches = MachineArchitecture.objects.all() - with transaction.atomic(): - r_arch, c = machine_arches.get_or_create(name=arch) + r_arch, c = MachineArchitecture.objects.get_or_create(name=arch) unknown = [] for r_url in repo[3:]: + if r_type == Repository.GENTOO and r_url.startswith('rsync'): + r_url = 'https://api.gentoo.org/mirrors/distfiles.xml' try: - mirror = Mirror.objects.get(url=r_url) + mirror = Mirror.objects.get(url=r_url.strip('/')) except Mirror.DoesNotExist: if repository: - Mirror.objects.create(repo=repository, url=r_url) + Mirror.objects.create(repo=repository, url=r_url.rstrip('/')) else: unknown.append(r_url) else: repository = mirror.repo if not repository: - repositories = Repository.objects.all() - try: - with transaction.atomic(): - repository, c = repositories.get_or_create(name=r_name, - arch=r_arch, - repotype=r_type) - except IntegrityError as e: - error_message.send(sender=None, text=e) - repository = repositories.get(name=r_name, - arch=r_arch, - repotype=r_type) - except DatabaseError as e: - error_message.send(sender=None, text=e) + repository = get_or_create_repo(r_name, r_arch, r_type) if r_id and repository.repo_id != r_id: repository.repo_id = r_id - with transaction.atomic(): - repository.save() + + if r_name and repository.name != r_name: + repository.name = r_name for url in unknown: - Mirror.objects.create(repo=repository, url=url) + Mirror.objects.create(repo=repository, url=url.rstrip('/')) for mirror in Mirror.objects.filter(repo=repository).values('url'): - if mirror['url'].find('cdn.redhat.com') != -1 or \ - mirror['url'].find('nu.novell.com') != -1 or \ - mirror['url'].find('updates.suse.com') != -1: + mirror_url = mirror.get('url') + auth_urls = ['cdn.redhat.com', 'cdn-ubi.redhat.com', 'nu.novell.com', 'updates.suse.com'] + if any(auth_url in mirror_url for auth_url in auth_urls): repository.auth_required = True - with transaction.atomic(): - repository.save() - if mirror['url'].find('security') != -1: + if 'security' in mirror_url: repository.security = True - with transaction.atomic(): - repository.save() + repository.save() return repository, r_priority @@ -308,7 +269,7 @@ def parse_modules(modules_string): """ modules = [] for module in modules_string.splitlines(): - module_string = [m for m in module.replace('\'', '').split(' ') if m] + module_string = [m for m in module.replace("'", '').split(' ') if m] if module_string: modules.append(module_string) return modules @@ -321,17 +282,15 @@ def process_module(module_str): m_stream = module_str[1] m_version = module_str[2] m_context = module_str[3] - arch = module_str[4] + m_arch = module_str[4] repo_id = module_str[5] - package_arches = PackageArchitecture.objects.all() - with transaction.atomic(): - m_arch, c = package_arches.get_or_create(name=arch) + arch, c = PackageArchitecture.objects.get_or_create(name=m_arch) try: - m_repo = Repository.objects.get(repo_id=repo_id) + repo = Repository.objects.get(repo_id=repo_id) except Repository.DoesNotExist: - m_repo = None + repo = None packages = set() for pkg_str in module_str[6:]: @@ -340,34 +299,9 @@ def process_module(module_str): package = get_or_create_package(p_name, p_epoch, p_ver, p_rel, p_arch, p_type) packages.add(package) - modules = Module.objects.all() - try: - with transaction.atomic(): - module, c = modules.get_or_create(name=m_name, - stream=m_stream, - version=m_version, - context=m_context, - arch=m_arch, - repo=m_repo) - except IntegrityError as e: - error_message.send(sender=None, text=e) - module = modules.get(name=m_name, - stream=m_stream, - version=m_version, - context=m_context, - arch=m_arch, - repo=m_repo) - except DatabaseError as e: - error_message.send(sender=None, text=e) - + module = get_or_create_module(m_name, m_stream, m_version, m_context, arch, repo) for package in packages: - try: - with transaction.atomic(): - module.packages.add(package) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) + module.packages.add(package) return module @@ -376,7 +310,7 @@ def parse_packages(packages_string): """ packages = [] for p in packages_string.splitlines(): - packages.append(p.replace('\'', '').split(' ')) + packages.append(p.replace("'", '').split(' ')) return packages @@ -389,6 +323,7 @@ def process_package(pkg, protocol): arch = 'unknown' name = pkg[0] + p_category = p_repo = None if pkg[1]: epoch = pkg[1] if pkg[2]: @@ -404,8 +339,111 @@ def process_package(pkg, protocol): p_type = Package.RPM elif pkg[5] == 'arch': p_type = Package.ARCH + elif pkg[5] == 'gentoo': + p_type = Package.GENTOO + p_category = pkg[6] + p_repo = pkg[7] else: p_type = Package.UNKNOWN package = get_or_create_package(name, epoch, ver, rel, arch, p_type) + if p_type == Package.GENTOO: + process_gentoo_package(package, name, p_category, p_repo) return package + + +def process_gentoo_package(package, name, category, repo): + """ Processes a single gentoo package + """ + category, created = PackageCategory.objects.get_or_create(name=category) + package.category = category + package.save() + + +def get_arch(arch): + """ Get or create MachineArchitecture from arch + Returns the MachineArchitecture + """ + return MachineArchitecture.objects.get_or_create(name=arch)[0] + + +def get_os(os, arch): + """ Get or create OSRelease and OSVariant from os details + Returns the OSVariant + """ + cpe_name = codename = osrelease_codename = osvariant_codename = None + osrelease_name = osvariant_name = os + + # find cpe_name if it exists + match = re.match(r'(.*) \[(.*)\]', os) + if match: + os = match.group(1) + cpe_name = match.group(2) + + # find codename if it exists + match = re.match(r'(.*) \((.*)\)', os) + if match: + os = match.group(1) + codename = match.group(2) + if os.startswith('AlmaLinux'): + # alma changes the codename with each minor release, so it's useless to us now + osvariant_codename = codename + else: + osrelease_codename = codename + + osrelease_name = os + osvariant_name = os + + if os.startswith('Gentoo'): + osrelease_name = 'Gentoo Linux' + cpe_name = 'cpe:2.3:o:gentoo:linux:-:*:*:*:*:*:*:*' + elif os.startswith('Arch'): + cpe_name = 'cpe:2.3:o:archlinux:arch_linux:-:*:*:*:*:*:*:*' + elif os.startswith('Debian'): + major, minor = os.split(' ')[1].split('.') + osrelease_name = f'Debian {major}' + cpe_name = f'cpe:2.3:o:debian:debian_linux:{major}.0:*:*:*:*:*:*:*' + elif os.startswith('Ubuntu'): + lts = '' + if 'LTS' in os: + lts = ' LTS' + major, minor, patch = os.split(' ')[1].split('.') + ubuntu_version = f'{major}_{minor}' + osrelease_name = f'Ubuntu {major}.{minor}{lts}' + cpe_name = f"cpe:2.3:o:canonical:ubuntu_linux:{ubuntu_version}:*:*:*:{'lts' if lts else '*'}:*:*:*" + elif os.startswith('AlmaLinux'): + osvariant_name = os.replace('AlmaLinux', 'Alma Linux') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Rocky'): + osvariant_name = os + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Red Hat'): + osvariant_name = os.replace(' release', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Fedora'): + osvariant_name = os.replace(' release', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('CentOS'): + osvariant_name = os.replace(' release', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Oracle'): + osvariant_name = os.replace(' Server', '') + osrelease_name = osvariant_name.split('.')[0] + elif os.startswith('Amazon Linux AMI 2018.03'): + osrelease_name = osvariant_name = 'Amazon Linux 1' + + osrelease = get_or_create_osrelease(name=osrelease_name, codename=osrelease_codename, cpe_name=cpe_name) + osvariant = get_or_create_osvariant( + name=osvariant_name, + osrelease=osrelease, + codename=osvariant_codename, + arch=arch, + ) + return osvariant + + +def get_domain(report_domain): + if not report_domain: + report_domain = 'unknown' + domain, c = Domain.objects.get_or_create(name=report_domain) + return domain diff --git a/reports/views.py b/reports/views.py index e3d672de..ccef1bb2 100644 --- a/reports/views.py +++ b/reports/views.py @@ -15,22 +15,27 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential + from django.http import HttpResponse, Http404 from django.views.decorators.csrf import csrf_exempt from django.shortcuts import get_object_or_404, render, redirect from django.contrib.auth.decorators import login_required from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.urls import reverse -from django.db import transaction from django.db.models import Q -from django.conf import settings from django.contrib import messages +from django.db.utils import OperationalError from util.filterspecs import Filter, FilterBar - from reports.models import Report +@retry( + retry=retry_if_exception_type(OperationalError), + stop=stop_after_attempt(5), + wait=wait_exponential(multiplier=1, min=2, max=15), +) @csrf_exempt def upload(request): @@ -38,18 +43,17 @@ def upload(request): data = request.POST.copy() meta = request.META.copy() - with transaction.atomic(): - report = Report.objects.create() + report = Report.objects.create() report.parse(data, meta) - if settings.USE_ASYNC_PROCESSING: - from reports.tasks import process_report - process_report.delay(report.id) + + from reports.tasks import process_report + process_report.delay(report.id) if 'report' in data and data['report'] == 'true': packages = [] if 'packages' in data: for p in data['packages'].splitlines(): - packages.append(p.replace('\'', '').split(' ')) + packages.append(p.replace("'", '').split(' ')) repos = data.get('repos') modules = data.get('modules') sec_updates = data.get('sec_updates') @@ -75,10 +79,10 @@ def report_list(request): reports = Report.objects.select_related() if 'host_id' in request.GET: - reports = reports.filter(hostname=int(request.GET['host_id'])) + reports = reports.filter(hostname=request.GET['host_id']) if 'processed' in request.GET: - processed = request.GET['processed'] == 'True' + processed = request.GET['processed'] == 'true' reports = reports.filter(processed=processed) if 'search' in request.GET: @@ -102,15 +106,14 @@ def report_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append(Filter(request, 'processed', - {False: 'No', True: 'Yes'})) + filter_list.append(Filter(request, 'Processed', 'processed', {'true': 'Yes', 'false': 'No'})) filter_bar = FilterBar(request, filter_list) return render(request, 'reports/report_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms}) @login_required @@ -120,18 +123,21 @@ def report_detail(request, report_id): return render(request, 'reports/report_detail.html', - {'report': report}, ) + {'report': report}) @login_required def report_process(request, report_id): - + """ Process a report using a celery task + """ + from reports.tasks import process_report report = get_object_or_404(Report, id=report_id) - report.process() - - return render(request, - 'reports/report_detail.html', - {'report': report}, ) + report.processed = False + report.save() + process_report.delay(report.id) + text = f'Report {report} is being processed' + messages.info(request, text) + return redirect(report.get_absolute_url()) @login_required @@ -142,7 +148,7 @@ def report_delete(request, report_id): if request.method == 'POST': if 'delete' in request.POST: report.delete() - text = f'Report {report!s} has been deleted' + text = f'Report {report} has been deleted' messages.info(request, text) return redirect(reverse('reports:report_list')) elif 'cancel' in request.POST: @@ -150,4 +156,4 @@ def report_delete(request, report_id): return render(request, 'reports/report_delete.html', - {'report': report}, ) + {'report': report}) diff --git a/repos/forms.py b/repos/forms.py index 7ca97206..0800a5c3 100644 --- a/repos/forms.py +++ b/repos/forms.py @@ -15,8 +15,7 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.forms import ModelForm, ModelMultipleChoiceField, TextInput, \ - Form, ModelChoiceField, ValidationError +from django.forms import ModelForm, ModelMultipleChoiceField, TextInput, Form, ModelChoiceField, ValidationError from django.contrib.admin.widgets import FilteredSelectMultiple from repos.models import Repository, Mirror @@ -91,9 +90,9 @@ class Media: def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['url'].widget = TextInput(attrs={'size': 150},) - self.fields['file_checksum'].widget = TextInput(attrs={'size': 100},) + self.fields['packages_checksum'].widget = TextInput(attrs={'size': 100},) class Meta: model = Mirror fields = ('repo', 'url', 'enabled', 'refresh', 'mirrorlist', - 'last_access_ok', 'fail_count', 'file_checksum') + 'last_access_ok', 'fail_count', 'packages_checksum') diff --git a/repos/migrations/0002_alter_repository_repotype.py b/repos/migrations/0002_alter_repository_repotype.py new file mode 100644 index 00000000..ec8dd33e --- /dev/null +++ b/repos/migrations/0002_alter_repository_repotype.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2025-02-07 13:19 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0001_initial'), + ] + + operations = [ + migrations.AlterField( + model_name='repository', + name='repotype', + field=models.CharField(choices=[('R', 'rpm'), ('D', 'deb'), ('A', 'arch'), ('G', 'gentoo')], max_length=1), + ), + ] diff --git a/repos/migrations/0003_migrate_to_tz_aware.py b/repos/migrations/0003_migrate_to_tz_aware.py new file mode 100644 index 00000000..dddd78ba --- /dev/null +++ b/repos/migrations/0003_migrate_to_tz_aware.py @@ -0,0 +1,18 @@ +from django.db import migrations +from django.utils import timezone + +def make_datetimes_tz_aware(apps, schema_editor): + Mirror = apps.get_model('repos', 'Mirror') + for mirror in Mirror.objects.all(): + if mirror.timestamp and timezone.is_naive(mirror.timestamp): + mirror.timestamp = timezone.make_aware(mirror.timestamp, timezone=timezone.get_default_timezone()) + mirror.save() + +class Migration(migrations.Migration): + dependencies = [ + ('repos', '0002_alter_repository_repotype'), + ] + + operations = [ + migrations.RunPython(make_datetimes_tz_aware, migrations.RunPython.noop), + ] diff --git a/repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py b/repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py new file mode 100644 index 00000000..3a5c0d77 --- /dev/null +++ b/repos/migrations/0004_rename_file_checksum_mirror_package_checksum.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-01 15:50 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0003_migrate_to_tz_aware'), + ] + + operations = [ + migrations.RenameField( + model_name='mirror', + old_name='file_checksum', + new_name='package_checksum', + ), + ] diff --git a/repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py b/repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py new file mode 100644 index 00000000..8b25a20f --- /dev/null +++ b/repos/migrations/0005_rename_package_checksum_mirror_packages_checksum.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.19 on 2025-03-01 15:54 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0004_rename_file_checksum_mirror_package_checksum'), + ] + + operations = [ + migrations.RenameField( + model_name='mirror', + old_name='package_checksum', + new_name='packages_checksum', + ), + ] diff --git a/repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py b/repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py new file mode 100644 index 00000000..67f13c36 --- /dev/null +++ b/repos/migrations/0006_mirror_errata_checksum_mirror_modules_checksum.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.19 on 2025-03-01 15:55 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('repos', '0005_rename_package_checksum_mirror_packages_checksum'), + ] + + operations = [ + migrations.AddField( + model_name='mirror', + name='errata_checksum', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.AddField( + model_name='mirror', + name='modules_checksum', + field=models.CharField(blank=True, max_length=255, null=True), + ), + ] diff --git a/repos/models.py b/repos/models.py index 7e6089f9..181a103d 100644 --- a/repos/models.py +++ b/repos/models.py @@ -15,15 +15,17 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -from django.conf import settings from django.db import models from django.urls import reverse from arch.models import MachineArchitecture from packages.models import Package +from util import get_setting_of_type -from repos.utils import refresh_deb_repo, refresh_rpm_repo, \ - refresh_arch_repo, update_mirror_packages +from repos.repo_types.deb import refresh_deb_repo +from repos.repo_types.rpm import refresh_rpm_repo, refresh_repo_errata +from repos.repo_types.arch import refresh_arch_repo +from repos.repo_types.gentoo import refresh_gentoo_repo from patchman.signals import info_message, warning_message, error_message @@ -32,11 +34,13 @@ class Repository(models.Model): RPM = 'R' DEB = 'D' ARCH = 'A' + GENTOO = 'G' REPO_TYPES = ( (RPM, 'rpm'), (DEB, 'deb'), (ARCH, 'arch'), + (GENTOO, 'gentoo') ) name = models.CharField(max_length=255, unique=True) @@ -47,6 +51,9 @@ class Repository(models.Model): repo_id = models.CharField(max_length=255, null=True, blank=True) auth_required = models.BooleanField(default=False) + from repos.managers import RepositoryManager + objects = RepositoryManager() + class Meta: verbose_name_plural = 'Repository' verbose_name_plural = 'Repositories' @@ -60,9 +67,9 @@ def get_absolute_url(self): def show(self): """ Show info about this repo, including mirrors """ - text = f'{self.id!s} : {self.name!s}\n' - text += f'security: {self.security!s} ' - text += f'arch: {self.arch!s}\n' + text = f'{self.id} : {self.name}\n' + text += f'security: {self.security} ' + text += f'arch: {self.arch}\n' text += 'Mirrors:' info_message.send(sender=None, text=text) @@ -74,10 +81,11 @@ def refresh(self, force=False): """ Refresh all of a repos mirror metadata, force can be set to force a reset of all the mirrors metadata """ - if force: for mirror in self.mirror_set.all(): - mirror.file_checksum = None + mirror.packages_checksum = None + mirror.modules_checksum = None + mirror.errata_checksum = None mirror.save() if not self.auth_required: @@ -87,20 +95,30 @@ def refresh(self, force=False): refresh_rpm_repo(self) elif self.repotype == Repository.ARCH: refresh_arch_repo(self) + elif self.repotype == Repository.GENTOO: + refresh_gentoo_repo(self) else: - text = 'Error: unknown repo type for repo ' - text += f'{self.id!s}: {self.repotype!s}' + text = f'Error: unknown repo type for repo {self.id}: {self.repotype}' error_message.send(sender=None, text=text) else: - text = 'Repo requires certificate authentication, not updating' + text = 'Repo requires authentication, not updating' warning_message.send(sender=None, text=text) + def refresh_errata(self, force=False): + """ Refresh errata metadata for all of a repos mirrors + """ + if force: + for mirror in self.mirror_set.all(): + mirror.errata_checksum = None + mirror.save() + if self.repotype == Repository.RPM: + refresh_repo_errata(self) + def disable(self): """ Disable a repo. This involves disabling each mirror, which stops it being considered for package updates, and disabling refresh for each mirror so that it doesn't try to update its package metadata. """ - self.enabled = False for mirror in self.mirror_set.all(): mirror.enabled = False @@ -112,7 +130,6 @@ def enable(self): to be considered for package updates, and enabling refresh for each mirror so that it updates its package metadata. """ - self.enabled = True for mirror in self.mirror_set.all(): mirror.enabled = True @@ -125,11 +142,11 @@ class Mirror(models.Model): repo = models.ForeignKey(Repository, on_delete=models.CASCADE) url = models.CharField(max_length=255, unique=True) last_access_ok = models.BooleanField(default=False) - file_checksum = models.CharField(max_length=255, blank=True, null=True) + packages_checksum = models.CharField(max_length=255, blank=True, null=True) + modules_checksum = models.CharField(max_length=255, blank=True, null=True) + errata_checksum = models.CharField(max_length=255, blank=True, null=True) timestamp = models.DateTimeField(auto_now_add=True) - packages = models.ManyToManyField(Package, - blank=True, - through='MirrorPackage') + packages = models.ManyToManyField(Package, blank=True, through='MirrorPackage') mirrorlist = models.BooleanField(default=False) enabled = models.BooleanField(default=True) refresh = models.BooleanField(default=True) @@ -148,9 +165,9 @@ def get_absolute_url(self): def show(self): """ Show info about this mirror """ - text = f' {self.id!s} : {self.url!s}\n' + text = f' {self.id} : {self.url}\n' text += ' last updated: ' - text += f'{self.timestamp!s} checksum: {self.file_checksum!s}\n' + text += f'{self.timestamp} checksum: {self.packages_checksum}\n' info_message.send(sender=None, text=text) def fail(self): @@ -159,26 +176,28 @@ def fail(self): Set MAX_MIRROR_FAILURES to -1 to disable marking mirrors as failures Default is 28 """ - text = f'No usable mirror found at {self.url!s}' + if self.repo.auth_required: + text = f'Mirror requires authentication, not updating - {self.url}' + warning_message.send(sender=None, text=text) + return + text = f'No usable mirror found at {self.url}' error_message.send(sender=None, text=text) default_max_mirror_failures = 28 - if hasattr(settings, 'MAX_MIRROR_FAILURES') and \ - isinstance(settings.MAX_MIRROR_FAILURES, int): - max_mirror_failures = settings.MAX_MIRROR_FAILURES - else: - max_mirror_failures = default_max_mirror_failures + max_mirror_failures = get_setting_of_type( + setting_name='MAX_MIRROR_FAILURES', + setting_type=int, + default=default_max_mirror_failures + ) self.fail_count = self.fail_count + 1 if max_mirror_failures == -1: text = f'Mirror has failed {self.fail_count} times, but MAX_MIRROR_FAILURES=-1, not disabling refresh' + error_message.send(sender=None, text=text) elif self.fail_count > max_mirror_failures: self.refresh = False text = f'Mirror has failed {self.fail_count} times (max={max_mirror_failures}), disabling refresh' - error_message.send(sender=None, text=text) - - def update_packages(self, packages): - """ Update the packages associated with a mirror - """ - update_mirror_packages(self, packages) + error_message.send(sender=None, text=text) + self.last_access_ok = False + self.save() class MirrorPackage(models.Model): diff --git a/repos/repo_types/__init__.py b/repos/repo_types/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/repos/repo_types/arch.py b/repos/repo_types/arch.py new file mode 100644 index 00000000..6e85b153 --- /dev/null +++ b/repos/repo_types/arch.py @@ -0,0 +1,115 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import tarfile +from io import BytesIO + +from packages.models import PackageString +from patchman.signals import info_message, warning_message, pbar_start, pbar_update +from repos.utils import get_max_mirrors, fetch_mirror_data, find_mirror_url, update_mirror_packages +from util import get_datetime_now, get_checksum, Checksum + + +def refresh_arch_repo(repo): + """ Refresh all mirrors of an arch linux repo + """ + max_mirrors = get_max_mirrors() + fname = f'{repo.arch}/{repo.repo_id}.db' + ts = get_datetime_now() + + enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) + for i, mirror in enumerate(enabled_mirrors): + if i >= max_mirrors: + text = f'{max_mirrors} Mirrors already refreshed (max={max_mirrors}), skipping further refreshes' + warning_message.send(sender=None, text=text) + break + + res = find_mirror_url(mirror.url, [fname]) + if not res: + continue + mirror_url = res.url + text = f'Found Arch Repo - {mirror_url}' + info_message.send(sender=None, text=text) + + package_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Fetching Repo data') + if not package_data: + continue + + computed_checksum = get_checksum(package_data, Checksum.sha1) + if mirror.packages_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + else: + mirror.packages_checksum = computed_checksum + + packages = extract_arch_packages(package_data) + update_mirror_packages(mirror, packages) + packages.clear() + mirror.timestamp = ts + mirror.save() + + +def extract_arch_packages(data): + """ Extract package metadata from an arch linux tarfile + """ + from packages.utils import find_evr + bio = BytesIO(data) + tf = tarfile.open(fileobj=bio, mode='r:*') + packages = set() + plen = len(tf.getnames()) + if plen > 0: + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + for i, tarinfo in enumerate(tf): + pbar_update.send(sender=None, index=i + 1) + if tarinfo.isfile(): + name_sec = ver_sec = arch_sec = False + t = tf.extractfile(tarinfo).read() + for line in t.decode('utf-8').splitlines(): + if line.startswith('%NAME%'): + name_sec = True + continue + if name_sec: + name_sec = False + name = line + continue + if line.startswith('%VERSION%'): + ver_sec = True + continue + if ver_sec: + ver_sec = False + epoch, version, release = find_evr(line) + continue + if line.startswith('%ARCH%'): + arch_sec = True + continue + if arch_sec: + arch_sec = False + arch = line + continue + package = PackageString(name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='A') + packages.add(package) + else: + info_message.send(sender=None, text='No Packages found in Repo') + return packages diff --git a/repos/repo_types/deb.py b/repos/repo_types/deb.py new file mode 100644 index 00000000..25d8eba7 --- /dev/null +++ b/repos/repo_types/deb.py @@ -0,0 +1,109 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import re +from debian.deb822 import Packages +from debian.debian_support import Version + +from packages.models import PackageString +from patchman.signals import error_message, pbar_start, pbar_update, info_message, warning_message +from repos.utils import fetch_mirror_data, update_mirror_packages, find_mirror_url +from util import get_datetime_now, get_checksum, Checksum, extract + + +def extract_deb_packages(data, url): + """ Extract package metadata from debian Packages file + """ + try: + extracted = extract(data, url).decode('utf-8') + except UnicodeDecodeError as e: + error_message.send(sender=None, text=f'Skipping {url} : {e}') + return + package_re = re.compile('^Package: ', re.M) + plen = len(package_re.findall(extracted)) + packages = set() + + if plen > 0: + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + for i, stanza in enumerate(Packages.iter_paragraphs(extracted)): + # https://github.com/furlongm/patchman/issues/55 + if 'version' not in stanza: + continue + fullversion = Version(stanza['version']) + arch = stanza['architecture'] + name = stanza['package'] + epoch = fullversion._BaseVersion__epoch + if epoch is None: + epoch = '' + version = fullversion._BaseVersion__upstream_version + release = fullversion._BaseVersion__debian_revision + if release is None: + release = '' + pbar_update.send(sender=None, index=i + 1) + package = PackageString(name=name, + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='D') + packages.add(package) + else: + info_message.send(sender=None, text='No packages found in repo') + return packages + + +def refresh_deb_repo(repo): + """ Refresh a debian repo. + Checks for the Packages* files to determine what the mirror urls + are and then fetches and extracts packages from those files. + """ + + formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] + + ts = get_datetime_now() + enabled_mirrors = repo.mirror_set.filter(refresh=True, enabled=True) + for mirror in enabled_mirrors: + res = find_mirror_url(mirror.url, formats) + if not res: + continue + mirror_url = res.url + text = f'Found deb Repo - {mirror_url}' + info_message.send(sender=None, text=text) + + package_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Fetching Repo data') + if not package_data: + continue + + computed_checksum = get_checksum(package_data, Checksum.sha1) + if mirror.packages_checksum == computed_checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + else: + mirror.packages_checksum = computed_checksum + + packages = extract_deb_packages(package_data, mirror_url) + if not packages: + mirror.fail() + continue + + update_mirror_packages(mirror, packages) + packages.clear() + mirror.timestamp = ts + mirror.save() diff --git a/repos/repo_types/gentoo.py b/repos/repo_types/gentoo.py new file mode 100644 index 00000000..94df139a --- /dev/null +++ b/repos/repo_types/gentoo.py @@ -0,0 +1,300 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import git +import os +import shutil +import tarfile +import tempfile +from defusedxml import ElementTree +from fnmatch import fnmatch +from io import BytesIO +from pathlib import Path + +from packages.models import PackageString +from packages.utils import find_evr +from patchman.signals import info_message, warning_message, error_message, pbar_start, pbar_update +from repos.utils import add_mirrors_from_urls, mirror_checksum_is_valid, update_mirror_packages +from util import extract, get_url, get_datetime_now, get_checksum, Checksum, fetch_content, response_is_valid + + +def refresh_gentoo_main_repo(repo): + """ Refresh all mirrors of the main gentoo repo + """ + mirrors = get_gentoo_mirror_urls() + add_mirrors_from_urls(repo, mirrors) + ts = get_datetime_now() + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): + if mirror.url == 'https://api.gentoo.org/mirrors/distfiles.xml': + mirror.mirrorlist = True + mirror.save() + continue + + res = get_url(mirror.url + '.md5sum') + data = fetch_content(res, 'Fetching Repo checksum') + if data is None: + mirror.fail() + continue + + checksum = data.decode().split()[0] + if checksum is None: + mirror.fail() + continue + + if mirror.packages_checksum == checksum: + text = 'Mirror checksum has not changed, not refreshing Package metadata' + warning_message.send(sender=None, text=text) + continue + + res = get_url(mirror.url) + mirror.last_access_ok = response_is_valid(res) + if not mirror.last_access_ok: + mirror.fail() + continue + + data = fetch_content(res, 'Fetching Repo data') + if data is None: + mirror.fail() + continue + extracted = extract(data, mirror.url) + info_message.send(sender=None, text=f'Found Gentoo Repo - {mirror.url}') + + computed_checksum = get_checksum(data, Checksum.md5) + if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, 'package'): + mirror.fail() + continue + else: + mirror.packages_checksum = checksum + + packages = extract_gentoo_packages(mirror, extracted) + if packages: + update_mirror_packages(mirror, packages) + + mirror.timestamp = ts + mirror.save() + + +def refresh_gentoo_overlay_repo(repo): + """ Refresh all mirrors of a Gentoo overlay repo + """ + mirrors = get_gentoo_overlay_mirrors(repo.repo_id) + add_mirrors_from_urls(repo, mirrors) + ts = get_datetime_now() + for mirror in repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True): + # FIXME: need to check for failure + packages = extract_gentoo_overlay_packages(mirror) + if packages: + update_mirror_packages(mirror, packages) + mirror.timestamp = ts + mirror.save() + + +def get_gentoo_ebuild_keywords(content): + """ Get the keywords for an ebuild + """ + keywords = set() + default_keywords = { + 'alpha', + 'amd64', + 'arm', + 'arm64', + 'hppa', + 'loong', + 'm68k', + 'mips', + 'ppc', + 'ppc64', + 'riscv', + 's390', + 'sparc', + 'x86', + } + for line in content.decode().splitlines(): + if not line.startswith('KEYWORDS='): + continue + all_keywords = line.split('=')[1].split('#')[0].strip(' "').split() + if len(all_keywords) == 0 or '*' in all_keywords: + all_keywords = default_keywords + for keyword in all_keywords: + if keyword.startswith('~'): + continue + if keyword.startswith('-'): + keyword = keyword.replace('-', '') + if keyword in all_keywords: + all_keywords.remove(keyword) + continue + keywords.add(keyword) + break + if keywords: + return keywords + else: + return default_keywords + + +def get_gentoo_overlay_mirrors(repo_name): + """Get the gentoo overlay repos that match repo.id + """ + gentoo_overlays_url = 'https://api.gentoo.org/overlays/repositories.xml' + res = get_url(gentoo_overlays_url) + if not res: + return + mirrors = [] + try: + tree = ElementTree.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'repo': + found = False + for element in child: + if element.tag == 'name' and element.text == repo_name: + found = True + if found and element.tag == 'source': + if element.text.startswith('http'): + mirrors.append(element.text) + except ElementTree.ParseError as e: + error_message.send(sender=None, text=f'Error parsing {gentoo_overlays_url}: {e}') + return mirrors + + +def get_gentoo_mirror_urls(): + """ Use the Gentoo API to find http(s) mirrors + """ + gentoo_distfiles_url = 'https://api.gentoo.org/mirrors/distfiles.xml' + res = get_url(gentoo_distfiles_url) + if not res: + return + mirrors = {} + try: + tree = ElementTree.parse(BytesIO(res.content)) + root = tree.getroot() + for child in root: + if child.tag == 'mirrorgroup': + for k, v in child.attrib.items(): + if k == 'region': + region = v + elif k == 'country': + country = v + for mirror in child: + for element in mirror: + if element.tag == 'name': + name = element.text + mirrors[name] = {} + mirrors[name]['region'] = region + mirrors[name]['country'] = country + mirrors[name]['urls'] = [] + elif element.tag == 'uri': + if element.get('protocol') == 'http': + mirrors[name]['urls'].append(element.text) + except ElementTree.ParseError as e: + error_message.send(sender=None, text=f'Error parsing {gentoo_distfiles_url}: {e}') + mirror_urls = [] + # for now, ignore region data and choose MAX_MIRRORS mirrors at random + for _, v in mirrors.items(): + for url in v['urls']: + mirror_urls.append(url.rstrip('/') + '/snapshots/gentoo-latest.tar.xz') + return mirror_urls + + +def extract_gentoo_ebuilds(data): + """ Extract ebuilds from a Gentoo tarball + """ + extracted_ebuilds = {} + with tarfile.open(fileobj=BytesIO(data), mode='r') as tar: + for member in tar.getmembers(): + if member.isfile() and member.name.endswith('ebuild') and not member.name.endswith('skel.ebuild'): + file_content = tar.extractfile(member).read() + full_path = Path(member.name) + ebuild_path = Path(*full_path.parts[1:]) + extracted_ebuilds[str(ebuild_path)] = file_content + return extracted_ebuilds + + +def extract_gentoo_overlay_ebuilds(t): + """ Extract ebuilds from a Gentoo overlay tarball + """ + extracted_ebuilds = {} + for root, dirs, files in os.walk(t): + for name in files: + if fnmatch(name, '*.ebuild'): + package_name = root.replace(t + '/', '') + if len(package_name.split('/')) > 2: + continue + with open(os.path.join(root, name), 'rb') as f: + content = f.read() + extracted_ebuilds[f'{package_name}/{name}'] = content + return extracted_ebuilds + + +def extract_gentoo_packages(mirror, data): + """ Extract packages from a Gentoo mirror + """ + extracted_ebuilds = extract_gentoo_ebuilds(data) + return extract_gentoo_packages_from_ebuilds(extracted_ebuilds) + + +def extract_gentoo_packages_from_ebuilds(extracted_ebuilds): + """ Extract packages from ebuilds + """ + if not extracted_ebuilds: + return + + packages = set() + elen = len(extracted_ebuilds) + pbar_start.send(sender=None, ptext=f'Processing {elen} ebuilds', plen=elen) + for i, (path, content) in enumerate(extracted_ebuilds.items()): + pbar_update.send(sender=None, index=i + 1) + components = path.split(os.sep) + category = components[0] + name = components[1] + evr = components[2].replace(f'{name}-', '').replace('.ebuild', '') + epoch, version, release = find_evr(evr) + arches = get_gentoo_ebuild_keywords(content) + for arch in arches: + package = PackageString( + name=name.lower(), + epoch=epoch, + version=version, + release=release, + arch=arch, + packagetype='G', + category=category, + ) + packages.add(package) + plen = len(packages) + info_message.send(sender=None, text=f'Extracted {plen} Packages', plen=plen) + return packages + + +def extract_gentoo_overlay_packages(mirror): + """ Extract packages from gentoo overlay repo + """ + t = tempfile.mkdtemp() + info_message.send(sender=None, text=f'Extracting Gentoo packages from {mirror.url}') + git.Repo.clone_from(mirror.url, t, depth=1) + packages = set() + extracted_ebuilds = extract_gentoo_overlay_ebuilds(t) + shutil.rmtree(t) + packages = extract_gentoo_packages_from_ebuilds(extracted_ebuilds) + return packages + + +def refresh_gentoo_repo(repo): + """ Refresh a Gentoo repo + """ + if repo.repo_id == 'gentoo': + refresh_gentoo_main_repo(repo) + else: + refresh_gentoo_overlay_repo(repo) diff --git a/repos/repo_types/rpm.py b/repos/repo_types/rpm.py new file mode 100644 index 00000000..aa3354c7 --- /dev/null +++ b/repos/repo_types/rpm.py @@ -0,0 +1,99 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db.models import Q + +from patchman.signals import info_message, warning_message +from repos.repo_types.yast import refresh_yast_repo +from repos.repo_types.yum import refresh_yum_repo +from repos.utils import check_for_metalinks, check_for_mirrorlists, find_mirror_url, get_max_mirrors, fetch_mirror_data +from util import get_datetime_now + + +def refresh_repo_errata(repo): + refresh_rpm_repo_mirrors(repo, errata_only=True) + + +def refresh_rpm_repo(repo): + """ Refresh an rpm repo (yum or yast) + Checks if the repo url is a mirrorlist or metalink, + and extracts mirrors if so, then refreshes the mirrors + """ + check_for_mirrorlists(repo) + check_for_metalinks(repo) + refresh_rpm_repo_mirrors(repo) + + +def max_mirrors_refreshed(repo, checksum, ts): + """ Only refresh X mirrors, where X = max_mirrors + """ + if checksum is None: + return False + max_mirrors = get_max_mirrors() + mirrors_q = Q(mirrorlist=False, refresh=True, enabled=True, timestamp=ts, packages_checksum=checksum) + have_checksum_and_ts = repo.mirror_set.filter(mirrors_q).count() + if have_checksum_and_ts >= max_mirrors: + text = f'{max_mirrors} Mirrors already have this checksum and timestamp, skipping further refreshes' + warning_message.send(sender=None, text=text) + return True + return False + + +def refresh_rpm_repo_mirrors(repo, errata_only=False): + """ Checks a number of common yum repo formats to determine + which type of repo it is, then refreshes the mirrors + """ + formats = [ + 'repodata/repomd.xml.xz', + 'repodata/repomd.xml.bz2', + 'repodata/repomd.xml.gz', + 'repodata/repomd.xml', + 'suse/repodata/repomd.xml.xz', + 'suse/repodata/repomd.xml.bz2', + 'suse/repodata/repomd.xml.gz', + 'suse/repodata/repomd.xml', + 'content', + ] + ts = get_datetime_now() + enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True, enabled=True) + for i, mirror in enumerate(enabled_mirrors): + res = find_mirror_url(mirror.url, formats) + if not res: + mirror.fail() + continue + mirror_url = res.url + + repo_data = fetch_mirror_data( + mirror=mirror, + url=mirror_url, + text='Fetching Repo data') + if not repo_data: + continue + + if mirror_url.endswith('content'): + text = f'Found yast rpm Repo - {mirror_url}' + info_message.send(sender=None, text=text) + refresh_yast_repo(mirror, repo_data) + else: + text = f'Found yum rpm Repo - {mirror_url}' + info_message.send(sender=None, text=text) + refresh_yum_repo(mirror, repo_data, mirror_url, errata_only) + if mirror.last_access_ok: + mirror.timestamp = ts + mirror.save() + checksum = mirror.packages_checksum + if max_mirrors_refreshed(repo, checksum, ts): + break diff --git a/repos/repo_types/yast.py b/repos/repo_types/yast.py new file mode 100644 index 00000000..0ef54358 --- /dev/null +++ b/repos/repo_types/yast.py @@ -0,0 +1,69 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import re + +from packages.models import PackageString +from patchman.signals import pbar_start, pbar_update, info_message +from repos.utils import fetch_mirror_data, update_mirror_packages +from util import extract + + +def refresh_yast_repo(mirror, data): + """ Refresh package metadata for a yast-style rpm mirror + and add the packages to the mirror + """ + package_dir = re.findall('DESCRDIR *(.*)', data.decode('utf-8'))[0] + package_url = f'{mirror.url}/{package_dir}/packages.gz' + + package_data = fetch_mirror_data( + mirror=mirror, + url=package_url, + text='Fetching yast Repo data') + if not package_data: + return + + mirror.packages_checksum = 'yast' + packages = extract_yast_packages(package_data) + if packages: + update_mirror_packages(mirror, packages) + packages.clear() + + +def extract_yast_packages(data): + """ Extract package metadata from yast metadata file + """ + extracted = extract(data, 'gz').decode('utf-8') + pkgs = re.findall('=Pkg: (.*)', extracted) + plen = len(pkgs) + packages = set() + + if plen > 0: + pbar_start.send(sender=None, ptext=f'Extracting {plen} Packages', plen=plen) + + for i, pkg in enumerate(pkgs): + pbar_update.send(sender=None, index=i + 1) + name, version, release, arch = pkg.split() + package = PackageString(name=name.lower(), + epoch='', + version=version, + release=release, + arch=arch, + packagetype='R') + packages.add(package) + else: + info_message.send(sender=None, text='No packages found in repo') + return packages diff --git a/repos/repo_types/yum.py b/repos/repo_types/yum.py new file mode 100644 index 00000000..7ac85816 --- /dev/null +++ b/repos/repo_types/yum.py @@ -0,0 +1,254 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from repos.models import Repository + + +@shared_task +def refresh_repo(repo_id, force=False): + """ Refresh metadata for a single repo + """ + repo = Repository.objects.get(id=repo_id) + repo.refresh(force) + + +@shared_task +def refresh_repos(force=False): + """ Refresh metadata for all enabled repos + """ + repos = Repository.objects.filter(enabled=True) + for repo in repos: + refresh_repo.delay(repo.id, force) diff --git a/repos/templates/repos/mirror_delete.html b/repos/templates/repos/mirror_delete.html index 808c8024..4ecfb982 100644 --- a/repos/templates/repos/mirror_delete.html +++ b/repos/templates/repos/mirror_delete.html @@ -14,14 +14,14 @@
    PackageVersions availablePackageEpochVersionReleaseArchTypeRepositoriesHostsErrata
    {{ packagename }}{{ packagename.package_set.count }}{{ package }} {{ package.epoch }} {{ package.version }} {{ package.release }} {{ package.arch }} {{ package.get_packagetype_display }} Available from {{ package.repo_count }} Repositories Installed on {{ package.host_set.count }} Hosts Affected by {{ package.affected_by_erratum.count }} Errata Provides fix in {{ package.provides_fix_in_erratum.count }} Errata
    - + - +
    Repo {{ mirror.repo }}
    URL {{ mirror.url }}
    Packages {{ mirror.packages.count }}
    Packages{{ mirror.packages.count }}
    Enabled {% yes_no_img mirror.enabled 'Enabled' 'Not Enabled' %}
    Refresh {% yes_no_img mirror.refresh 'True' 'False' %}
    Mirrorlist/Metalink {% yes_no_img mirror.mirrorlist 'True' 'False' %}
    Last Access OK {% yes_no_img mirror.last_access_ok 'True' 'False' %}
    Fail Count {{ mirror.fail_count }}
    Timestamp {{ mirror.timestamp }}
    Checksum {{ mirror.file_checksum }}
    Checksum {{ mirror.packages_checksum }}
    diff --git a/repos/templates/repos/mirror_detail.html b/repos/templates/repos/mirror_detail.html index 35c81877..dfe3da62 100644 --- a/repos/templates/repos/mirror_detail.html +++ b/repos/templates/repos/mirror_detail.html @@ -14,14 +14,14 @@ - + - +
    Repo {{ mirror.repo }}
    URL {{ mirror.url }}
    Packages {{ mirror.packages.count }}
    Packages{{ mirror.packages.count }}
    Enabled {% yes_no_img mirror.enabled 'Enabled' 'Not Enabled' %}
    Refresh {% yes_no_img mirror.refresh 'True' 'False' %}
    Mirrorlist/Metalink {% yes_no_img mirror.mirrorlist 'True' 'False' %}
    Last Access OK {% yes_no_img mirror.last_access_ok 'True' 'False' %}
    Fail Count {{ mirror.fail_count }}
    Timestamp {{ mirror.timestamp }}
    Checksum {{ mirror.file_checksum }}
    Checksum {{ mirror.packages_checksum }}
    {% if user.is_authenticated and perms.is_admin %} {% bootstrap_icon "trash" %} Delete this Mirror diff --git a/repos/templates/repos/mirror_edit_repo.html b/repos/templates/repos/mirror_edit_repo.html index a2e18e2a..1a785538 100644 --- a/repos/templates/repos/mirror_edit_repo.html +++ b/repos/templates/repos/mirror_edit_repo.html @@ -26,7 +26,7 @@
    {% yes_no_img mirror.mirrorlist 'Yes' 'No' %} {% yes_no_img mirror.last_access_ok 'Yes' 'No' %} {{ mirror.timestamp }}{{ mirror.file_checksum|truncatechars:16 }}{{ mirror.packages_checksum|truncatechars:16 }}
    IDURLURL Packages Enabled Refresh Mirrorlist/Metalink Last Access OKTimestampTimestamp Checksum Delete Edit
    {{ mirror.id }} {{ mirror.url|truncatechars:25 }}{{ mirror.packages.count }} + {% if not mirror.mirrorlist %} + {{ mirror.packages.count }} + {% endif %} + {% yes_no_img mirror.enabled 'Enabled' 'Not Enabled' %} {% yes_no_img mirror.refresh 'Yes' 'No' %} {% yes_no_img mirror.mirrorlist 'Yes' 'No' %} {% yes_no_img mirror.last_access_ok 'Yes' 'No' %} {{ mirror.timestamp }}{{ mirror.file_checksum|truncatechars:16 }}{% if not mirror.mirrorlist %}{{ mirror.packages_checksum|truncatechars:16 }}{% endif %} {% bootstrap_icon "trash" %} Delete this Mirror {% bootstrap_icon "edit" %} Edit this Mirror
    - - + + diff --git a/repos/templates/repos/repo_detail.html b/repos/templates/repos/repo_detail.html index be42f3cf..59401ab2 100644 --- a/repos/templates/repos/repo_detail.html +++ b/repos/templates/repos/repo_detail.html @@ -14,7 +14,7 @@
  • Details
  • Mirrors
  • Hosts with this Repository
  • -
  • OS Groups with this Repository
  • +
  • OS Releases with this Repository
  • Modules in this Repository
  • @@ -22,13 +22,14 @@
    Name {{ repo.name }}
    ID {% if repo.id != None %} {{ repo.id }} {% endif %}
    Name {{ repo.name }}
    Repo ID {% if repo.repo_id != None %} {{ repo.repo_id }} {% endif %}
    Type {{ repo.get_repotype_display }}
    Architecture {{ repo.arch }}
    Security {% yes_no_img repo.security 'Security' 'Not Security' %}
    - - + + - + +
    Name {{ repo.name }}
    ID {% if repo.id != None %} {{ repo.id }} {% endif %}
    Name {{ repo.name }}
    Repo ID {% if repo.repo_id %} {{ repo.repo_id }} {% endif %}
    Type {{ repo.get_repotype_display }}
    Architecture {{ repo.arch }}
    Security {% yes_no_img repo.security 'Security' 'Not Security' %}
    Enabled {% yes_no_img repo.enabled 'Enabled' 'Not Enabled' %}
    Mirrors {{ repo.mirror_set.count }}
    Mirrors {{ repo.mirror_set.count }}
    Hosts with this Repository {{ repo.host_set.count }}
    Requires Authentication {{ repo.auth_required }}
    {% if user.is_authenticated and perms.is_admin %} @@ -44,6 +45,7 @@ {% else %} {% bootstrap_icon "star" %} Mark as Security repo {% endif %} + {% bootstrap_icon "tasks" %} Refresh this Repository {% endif %}
    @@ -60,9 +62,9 @@ -
    +
    - {% gen_table repo.osgroup_set.all.distinct %} + {% gen_table repo.osrelease_set.all.distinct %}
    diff --git a/repos/templates/repos/repository_table.html b/repos/templates/repos/repository_table.html index cf716690..bcd7e721 100644 --- a/repos/templates/repos/repository_table.html +++ b/repos/templates/repos/repository_table.html @@ -2,8 +2,8 @@ - - + + @@ -14,8 +14,8 @@ {% for repo in object_list %} - - + + diff --git a/repos/templatetags/repo_buttons.py b/repos/templatetags/repo_buttons.py index 67f0af7f..3689c8b7 100644 --- a/repos/templatetags/repo_buttons.py +++ b/repos/templatetags/repo_buttons.py @@ -29,11 +29,11 @@ def yes_no_button_repo_en(repo): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') html = '' return format_html(html) @@ -45,10 +45,10 @@ def yes_no_button_repo_sec(repo): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') html = '' return format_html(html) diff --git a/repos/urls.py b/repos/urls.py index 246ce678..176f9a13 100644 --- a/repos/urls.py +++ b/repos/urls.py @@ -24,12 +24,13 @@ urlpatterns = [ path('', views.repo_list, name='repo_list'), path('/', views.repo_detail, name='repo_detail'), - path('/toggle_enabled/', views.repo_toggle_enabled, name='repo_toggle_enabled'), # noqa - path('/toggle_security/', views.repo_toggle_security, name='repo_toggle_security'), # noqa + path('/toggle_enabled/', views.repo_toggle_enabled, name='repo_toggle_enabled'), + path('/toggle_security/', views.repo_toggle_security, name='repo_toggle_security'), path('/edit/', views.repo_edit, name='repo_edit'), path('/delete/', views.repo_delete, name='repo_delete'), + path('/refresh/', views.repo_refresh, name='repo_refresh'), path('mirrors/', views.mirror_list, name='mirror_list'), - path('mirrors/mirror//', views.mirror_detail, name='mirror_detail'), # noqa - path('mirrors/mirror//edit/', views.mirror_edit, name='mirror_edit'), # noqa - path('mirrors/mirror//delete/', views.mirror_delete, name='mirror_delete'), # noqa + path('mirrors/mirror//', views.mirror_detail, name='mirror_detail'), + path('mirrors/mirror//edit/', views.mirror_edit, name='mirror_edit'), + path('mirrors/mirror//delete/', views.mirror_delete, name='mirror_delete'), ] diff --git a/repos/utils.py b/repos/utils.py index cb40aaf1..49b5d07f 100644 --- a/repos/utils.py +++ b/repos/utils.py @@ -1,5 +1,5 @@ # Copyright 2012 VPAC, http://www.vpac.org -# Copyright 2013-2021 Marcus Furlong +# Copyright 2013-2025 Marcus Furlong # # This file is part of Patchman. # @@ -16,175 +16,84 @@ # along with Patchman. If not, see import re -import tarfile -import yaml -from datetime import datetime from io import BytesIO -from defusedxml.lxml import _etree as etree -from debian.debian_support import Version -from debian.deb822 import Packages +from defusedxml import ElementTree +from tenacity import RetryError -from django.conf import settings -from django.db import IntegrityError, DatabaseError, transaction +from django.db import IntegrityError from django.db.models import Q -from packages.models import Package, PackageName, PackageString -from packages.utils import parse_package_string, get_or_create_package -from arch.models import PackageArchitecture -from util import get_url, download_url, response_is_valid, extract, \ - get_checksum, Checksum -from patchman.signals import progress_info_s, progress_update_s, \ - info_message, warning_message, error_message, debug_message +from packages.models import Package +from packages.utils import convert_package_to_packagestring, convert_packagestring_to_package +from util import get_url, fetch_content, response_is_valid, extract, get_checksum, Checksum, get_setting_of_type +from patchman.signals import info_message, warning_message, error_message, debug_message, pbar_start, pbar_update + + +def get_or_create_repo(r_name, r_arch, r_type, r_id=None): + """ Get or create a Repository object and returns the object. + Returns None if it cannot get or create the object. + """ + from repos.models import Repository + try: + repository, c = Repository.objects.get_or_create(name=r_name, arch=r_arch, repotype=r_type) + except IntegrityError: + repository = Repository.objects.get(name=r_name, arch=r_arch, repotype=r_type) + if repository: + if r_id: + repository.repo_id = r_id + repository.save() + return repository def update_mirror_packages(mirror, packages): """ Updates the packages contained on a mirror, and removes obsolete packages. """ - new = set() - old = set() - removals = set() + from repos.models import MirrorPackage # noqa + old = set() mirror_packages = mirror.packages.all() - mlen = mirror_packages.count() - - ptext = 'Fetching existing packages:' - progress_info_s.send(sender=None, ptext=ptext, plen=mlen) + plen = mirror_packages.count() + pbar_start.send(sender=None, ptext=f'Fetching {plen} existing Packages', plen=plen) for i, package in enumerate(mirror_packages): - progress_update_s.send(sender=None, index=i + 1) - name = str(package.name) - arch = str(package.arch) - strpackage = PackageString(name=name, - epoch=package.epoch, - version=package.version, - release=package.release, - arch=arch, - packagetype=package.packagetype) + pbar_update.send(sender=None, index=i + 1) + strpackage = convert_package_to_packagestring(package) old.add(strpackage) - new = packages.difference(old) removals = old.difference(packages) - - nlen = len(new) rlen = len(removals) + pbar_start.send(sender=None, ptext=f'Removing {rlen} obsolete Packages', plen=rlen) + for i, strpackage in enumerate(removals): + pbar_update.send(sender=None, index=i + 1) + package = convert_packagestring_to_package(strpackage) + MirrorPackage.objects.filter(mirror=mirror, package=package).delete() - ptext = f'Removing {rlen!s} obsolete packages:' - progress_info_s.send(sender=None, ptext=ptext, plen=rlen) - for i, package in enumerate(removals): - progress_update_s.send(sender=None, index=i + 1) - package_id = PackageName.objects.get(name=package.name) - epoch = package.epoch - version = package.version - release = package.release - arch = PackageArchitecture.objects.get(name=package.arch) - packagetype = package.packagetype - p = Package.objects.get(name=package_id, - epoch=epoch, - version=version, - arch=arch, - release=release, - packagetype=packagetype) - from repos.models import MirrorPackage - mirror_packages = MirrorPackage.objects.filter(mirror=mirror, package=p) - for mirror_package in mirror_packages: - with transaction.atomic(): - mirror_package.delete() - - ptext = f'Adding {nlen!s} new packages:' - progress_info_s.send(sender=None, ptext=ptext, plen=nlen) - for i, package in enumerate(new): - progress_update_s.send(sender=None, index=i + 1) - - package_names = PackageName.objects.all() - with transaction.atomic(): - package_id, c = package_names.get_or_create(name=package.name) - - epoch = package.epoch - version = package.version - release = package.release - packagetype = package.packagetype - - package_arches = PackageArchitecture.objects.all() - with transaction.atomic(): - arch, c = package_arches.get_or_create(name=package.arch) - - all_packages = Package.objects.all() - with transaction.atomic(): - p, c = all_packages.get_or_create(name=package_id, - epoch=epoch, - version=version, - arch=arch, - release=release, - packagetype=packagetype) - # This fixes a subtle bug where a stored package name with uppercase - # letters will not match until it is lowercased. - if package_id.name != package.name: - package_id.name = package.name - with transaction.atomic(): - package_id.save() - from repos.models import MirrorPackage # noqa - with transaction.atomic(): - mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=p) - - -def get_primary_url(mirror_url, data): - - if isinstance(data, str): - if data.startswith('Bad repo - not in list') or \ - data.startswith('Invalid repo'): - return None, None, None - ns = 'http://linux.duke.edu/metadata/repo' - try: - context = etree.parse(BytesIO(data), etree.XMLParser()) - except etree.XMLSyntaxError: - context = etree.parse(BytesIO(extract(data, 'gz')), etree.XMLParser()) - location = context.xpath("//ns:data[@type='primary']/ns:location/@href", - namespaces={'ns': ns})[0] - checksum = context.xpath("//ns:data[@type='primary']/ns:checksum", - namespaces={'ns': ns})[0].text - csum_type = context.xpath("//ns:data[@type='primary']/ns:checksum/@type", - namespaces={'ns': ns})[0] - url = str(mirror_url.rsplit('/', 2)[0]) + '/' + location - return url, checksum, csum_type - - -def get_modules_url(mirror_url, data): - - if isinstance(data, str): - if data.startswith('Bad repo - not in list') or \ - data.startswith('Invalid repo'): - return None, None, None - ns = 'http://linux.duke.edu/metadata/repo' - try: - context = etree.parse(BytesIO(data), etree.XMLParser()) - except etree.XMLSyntaxError: - context = etree.parse(BytesIO(extract(data, 'gz')), etree.XMLParser()) - try: - location = context.xpath("//ns:data[@type='modules']/ns:location/@href", - namespaces={'ns': ns})[0] - except IndexError: - return None, None, None - checksum = context.xpath("//ns:data[@type='modules']/ns:checksum", - namespaces={'ns': ns})[0].text - csum_type = context.xpath("//ns:data[@type='modules']/ns:checksum/@type", - namespaces={'ns': ns})[0] - url = str(mirror_url.rsplit('/', 2)[0]) + '/' + location - return url, checksum, csum_type + new = packages.difference(old) + nlen = len(new) + pbar_start.send(sender=None, ptext=f'Adding {nlen} new Packages', plen=nlen) + for i, strpackage in enumerate(new): + pbar_update.send(sender=None, index=i + 1) + try: + package = convert_packagestring_to_package(strpackage) + mirror_package, c = MirrorPackage.objects.get_or_create(mirror=mirror, package=package) + except Package.MultipleObjectsReturned: + error_message.send(sender=None, text=f'Duplicate Package found in {mirror}: {strpackage}') def find_mirror_url(stored_mirror_url, formats): """ Find the actual URL of the mirror by trying predefined paths """ - for fmt in formats: mirror_url = stored_mirror_url for f in formats: if mirror_url.endswith(f): mirror_url = mirror_url[:-len(f)] - mirror_url = mirror_url.rstrip('/') + '/' + fmt - debug_message.send(sender=None, - text=f'Checking {mirror_url!s}') - res = get_url(mirror_url) + mirror_url = f"{mirror_url.rstrip('/')}/{fmt}" + debug_message.send(sender=None, text=f'Checking for Mirror at {mirror_url}') + try: + res = get_url(mirror_url) + except RetryError: + continue if res is not None and res.ok: return res @@ -198,61 +107,79 @@ def is_metalink(url): def get_metalink_urls(url): """ Parses a metalink and returns a list of mirrors """ - res = get_url(url) - if response_is_valid(res): - if 'content-type' in res.headers and \ - res.headers['content-type'] == 'application/metalink+xml': - data = download_url(res, 'Downloading repo info:') - ns = 'http://www.metalinker.org/' - try: - context = etree.parse(BytesIO(data), etree.XMLParser()) - except etree.XMLSyntaxError: - context = etree.parse(BytesIO(extract(data, 'gz')), - etree.XMLParser()) - xpath = "//ns:files/ns:file[@name='repomd.xml']/ns:resources/ns:url[@protocol='https']" # noqa - metalink_urls = context.xpath(xpath, namespaces={'ns': ns}) - return [x.text for x in metalink_urls] + try: + res = get_url(url) + except RetryError: + return + if not response_is_valid(res): + return + if not res.headers.get('content-type') == 'application/metalink+xml': + return + metalink_urls = [] + data = fetch_content(res, 'Fetching metalink data') + extracted = extract(data, url) + ns = 'http://www.metalinker.org/' + try: + tree = ElementTree.parse(BytesIO(extracted)) + root = tree.getroot() + for child in root: + if child.tag == f'{{{ns}}}files': + for grandchild in child: + if grandchild.tag == f'{{{ns}}}file': + for greatgrandchild in grandchild: + if greatgrandchild.tag == f'{{{ns}}}resources': + for greatgreatgrandchild in greatgrandchild: + if greatgreatgrandchild.tag == f'{{{ns}}}url': + if greatgreatgrandchild.attrib.get('protocol') in ['https', 'http']: + metalink_urls.append(greatgreatgrandchild.text) + except ElementTree.ParseError as e: + error_message.send(sender=None, text=f'Error parsing metalink {url}: {e}') + return metalink_urls def get_mirrorlist_urls(url): - """ Checks if a given url returns a mirrorlist by checking if it is of - type text/plain and contains a list of urls. Returns a list of - mirrors if it is a mirrorlist. + """ Checks if a given url returns a mirrorlist by checking if it contains + a list of urls. Returns a list of mirrors if it is a mirrorlist. """ - res = get_url(url) + try: + res = get_url(url) + except RetryError: + return if response_is_valid(res): - if 'content-type' in res.headers and \ - 'text/plain' in res.headers['content-type']: - data = download_url(res, 'Downloading repo info:') + try: + data = fetch_content(res, 'Fetching Repo data') if data is None: return - mirror_urls = re.findall('^http://.*$|^ftp://.*$', - data.decode('utf-8'), re.MULTILINE) + mirror_urls = re.findall(r'^http[s]*://.*$|^ftp://.*$', data.decode('utf-8'), re.MULTILINE) if mirror_urls: + debug_message.send(sender=None, text=f'Found mirrorlist: {url}') return mirror_urls + else: + debug_message.send(sender=None, text=f'Not a mirrorlist: {url}') + except Exception as e: + error_message.send(sender=None, text=f'Error attempting to parse a mirrorlist: {e} {url}') def add_mirrors_from_urls(repo, mirror_urls): """ Creates mirrors from a list of mirror urls """ + max_mirrors = get_max_mirrors() for mirror_url in mirror_urls: mirror_url = mirror_url.replace('$ARCH', repo.arch.name) mirror_url = mirror_url.replace('$basearch', repo.arch.name) - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - # only add X mirrors, where X = max_mirrors - q = Q(mirrorlist=False, refresh=True) - existing = repo.mirror_set.filter(q).count() - if existing >= max_mirrors: - text = f'{max_mirrors!s} mirrors already ' - text += f'exist, not adding {mirror_url!s}' - warning_message.send(sender=None, text=text) - continue + mirror_url = mirror_url.rstrip('/') + q = Q(mirrorlist=False, refresh=True, enabled=True) + existing = repo.mirror_set.filter(q).count() + if existing >= max_mirrors: + text = f'{existing} Mirrors already exist (max={max_mirrors}), not adding more' + warning_message.send(sender=None, text=text) + break from repos.models import Mirror - m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url) + # FIXME: maybe we should store the mirrorlist url with full path to repomd.xml? + # that is what metalink urls return now + m, c = Mirror.objects.get_or_create(repo=repo, url=mirror_url.rstrip('/').replace('repodata/repomd.xml', '')) if c: - text = f'Added mirror - {mirror_url!s}' + text = f'Added Mirror - {mirror_url}' info_message.send(sender=None, text=text) @@ -266,8 +193,7 @@ def check_for_mirrorlists(repo): mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() - text = f'Found mirrorlist - {mirror.url!s}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Found mirrorlist - {mirror.url}') add_mirrors_from_urls(repo, mirror_urls) @@ -284,284 +210,37 @@ def check_for_metalinks(repo): mirror.mirrorlist = True mirror.last_access_ok = True mirror.save() - text = f'Found metalink - {mirror.url!s}' - info_message.send(sender=None, text=text) + info_message.send(sender=None, text=f'Found metalink - {mirror.url}') add_mirrors_from_urls(repo, mirror_urls) -def extract_module_metadata(data, url, repo): - """ Extract module metadata from a modules.yaml file - """ - modules = set() - extracted = extract(data, url) - try: - modules_yaml = yaml.safe_load_all(extracted) - except yaml.YAMLError as e: - print(e) - for doc in modules_yaml: - document = doc['document'] - modulemd = doc['data'] - if document == 'modulemd': - modulemd = doc['data'] - m_name = modulemd.get('name') - m_stream = modulemd['stream'] - m_version = modulemd.get('version') - m_context = modulemd.get('context') - arch = modulemd.get('arch') - raw_packages = modulemd.get('artifacts', {}).get('rpms', '') - # raw_profiles = list(modulemd.get('profiles', {}).keys()) - - packages = set() - p_type = Package.RPM - for pkg_str in raw_packages: - p_name, p_epoch, p_ver, p_rel, p_dist, p_arch = parse_package_string(pkg_str) - package = get_or_create_package(p_name, p_epoch, p_ver, p_rel, p_arch, p_type) - packages.add(package) - - from modules.utils import get_or_create_module - module, created = get_or_create_module(m_name, m_stream, m_version, m_context, arch, repo) - - package_ids = [] - for package in packages: - package_ids.append(package.id) - try: - with transaction.atomic(): - module.packages.add(package) - except IntegrityError as e: - error_message.send(sender=None, text=e) - except DatabaseError as e: - error_message.send(sender=None, text=e) - modules.add(module) - for package in module.packages.all(): - if package.id not in package_ids: - module.packages.remove(package) - - -def extract_yum_packages(data, url): - """ Extract package metadata from a yum primary.xml file - """ - extracted = extract(data, url) - ns = 'http://linux.duke.edu/metadata/common' - m_context = etree.iterparse(BytesIO(extracted), - tag=f'{{{ns!s}}}metadata') - plen = int(next(m_context)[1].get('packages')) - p_context = etree.iterparse(BytesIO(extracted), - tag=f'{{{ns!s}}}package') - packages = set() - - if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) - - for i, p_data in enumerate(p_context): - elem = p_data[1] - progress_update_s.send(sender=None, index=i + 1) - name = elem.xpath('//ns:name', - namespaces={'ns': ns})[0].text.lower() - arch = elem.xpath('//ns:arch', - namespaces={'ns': ns})[0].text - fullversion = elem.xpath('//ns:version', - namespaces={'ns': ns})[0] - epoch = fullversion.get('epoch') - version = fullversion.get('ver') - release = fullversion.get('rel') - elem.clear() - while elem.getprevious() is not None: - del elem.getparent()[0] - - if name != '' and version != '' and arch != '': - if epoch == '0': - epoch = '' - package = PackageString(name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='R') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - -def extract_deb_packages(data, url): - """ Extract package metadata from debian Packages file - """ - try: - extracted = extract(data, url).decode('utf-8') - except UnicodeDecodeError as e: - error_message.send(sender=None, text=f'Skipping {url} : {e}') - return - package_re = re.compile('^Package: ', re.M) - plen = len(package_re.findall(extracted)) - packages = set() - - if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) - for i, stanza in enumerate(Packages.iter_paragraphs(extracted)): - # https://github.com/furlongm/patchman/issues/55 - if 'version' not in stanza: - continue - fullversion = Version(stanza['version']) - arch = stanza['architecture'] - name = stanza['package'] - epoch = fullversion._BaseVersion__epoch - if epoch is None: - epoch = '' - version = fullversion._BaseVersion__upstream_version - release = fullversion._BaseVersion__debian_revision - if release is None: - release = '' - progress_update_s.send(sender=None, index=i + 1) - package = PackageString(name=name, - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='D') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - -def extract_yast_packages(data): - """ Extract package metadata from yast metadata file - """ - extracted = extract(data, 'gz').decode('utf-8') - pkgs = re.findall('=Pkg: (.*)', extracted) - plen = len(pkgs) - packages = set() - - if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) - - for i, pkg in enumerate(pkgs): - progress_update_s.send(sender=None, index=i + 1) - name, version, release, arch = pkg.split() - package = PackageString(name=name.lower(), - epoch='', - version=version, - release=release, - arch=arch, - packagetype='R') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - -def extract_arch_packages(data): - """ Extract package metadata from an arch linux tarfile - """ - from packages.utils import find_evr - bio = BytesIO(data) - tf = tarfile.open(fileobj=bio, mode='r:*') - packages = set() - plen = len(tf.getnames()) - if plen > 0: - ptext = 'Extracting packages: ' - progress_info_s.send(sender=None, ptext=ptext, plen=plen) - for i, tarinfo in enumerate(tf): - progress_update_s.send(sender=None, index=i + 1) - if tarinfo.isfile(): - name_sec = ver_sec = arch_sec = False - t = tf.extractfile(tarinfo).read() - for line in t.decode('utf-8').splitlines(): - if line.startswith('%NAME%'): - name_sec = True - continue - if name_sec: - name_sec = False - name = line - continue - if line.startswith('%VERSION%'): - ver_sec = True - continue - if ver_sec: - ver_sec = False - epoch, version, release = find_evr(line) - continue - if line.startswith('%ARCH%'): - arch_sec = True - continue - if arch_sec: - arch_sec = False - arch = line - continue - package = PackageString(name=name.lower(), - epoch=epoch, - version=version, - release=release, - arch=arch, - packagetype='A') - packages.add(package) - else: - info_message.send(sender=None, text='No packages found in repo') - return packages - - -def refresh_yum_repo(mirror, data, mirror_url, ts): - """ Refresh package metadata for a yum-style rpm mirror - and add the packages to the mirror - """ - primary_url, primary_checksum, primary_checksum_type = get_primary_url(mirror_url, data) - modules_url, modules_checksum, modules_checksum_type = get_modules_url(mirror_url, data) - - if not primary_url: +def fetch_mirror_data(mirror, url, text, checksum=None, checksum_type=None, metadata_type=None): + if not url: mirror.fail() return - res = get_url(primary_url) - mirror.last_access_ok = response_is_valid(res) - - if not mirror.last_access_ok: + try: + res = get_url(url) + except RetryError: mirror.fail() return - package_data = download_url(res, 'Downloading package info:') - if package_data is None: + if not response_is_valid(res): mirror.fail() return + mirror.last_access_ok = True + mirror.save() - computed_checksum = get_checksum(package_data, Checksum[primary_checksum_type]) - if not mirror_checksum_is_valid(computed_checksum, primary_checksum, mirror, 'package'): + data = fetch_content(res, text) + if not data: return - if mirror.file_checksum == primary_checksum: - text = 'Mirror checksum has not changed, ' - text += 'not refreshing package metadata' - warning_message.send(sender=None, text=text) - return - - mirror.file_checksum = primary_checksum - - if modules_url: - res = get_url(modules_url) - module_data = download_url(res, 'Downloading module info:') - computed_checksum = get_checksum(module_data, Checksum[modules_checksum_type]) - if not mirror_checksum_is_valid(computed_checksum, modules_checksum, mirror, 'module'): + if checksum and checksum_type and metadata_type: + computed_checksum = get_checksum(data, Checksum[checksum_type]) + if not mirror_checksum_is_valid(computed_checksum, checksum, mirror, metadata_type): + mirror.fail() return - - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - # only refresh X mirrors, where X = max_mirrors - checksum_q = Q(mirrorlist=False, refresh=True, timestamp=ts, - file_checksum=primary_checksum) - have_checksum = mirror.repo.mirror_set.filter(checksum_q).count() - if have_checksum >= max_mirrors: - text = f'{max_mirrors!s} mirrors already have this ' - text += 'checksum, ignoring refresh to save time' - info_message.send(sender=None, text=text) - else: - packages = extract_yum_packages(package_data, primary_url) - if packages: - update_mirror_packages(mirror, packages) - if modules_url: - extract_module_metadata(module_data, modules_url, mirror.repo) + return data def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): @@ -569,8 +248,7 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): Returns True if both match. """ if not computed or computed != provided: - text = f'Checksum failed for mirror {mirror.id!s}' - text += f', not refreshing {metadata_type} metadata' + text = f'Checksum failed for mirror {mirror.id}, not refreshing {metadata_type} metadata' error_message.send(sender=None, text=text) text = f'Found checksum: {computed}\nExpected checksum: {provided}' error_message.send(sender=None, text=text) @@ -581,164 +259,6 @@ def mirror_checksum_is_valid(computed, provided, mirror, metadata_type): return True -def refresh_arch_repo(repo): - """ Refresh all mirrors of an arch linux repo - """ - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - fname = f'{repo.arch!s}/{repo.repo_id!s}.db' - ts = datetime.now().replace(microsecond=0) - for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)): - res = find_mirror_url(mirror.url, [fname]) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - if i >= max_mirrors: - text = f'{max_mirrors!s} mirrors already refreshed, ' - text += f' not refreshing {mirror.url!s}' - warning_message.send(sender=None, text=text) - continue - mirror_url = res.url - text = f'Found arch repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - data = download_url(res, 'Downloading repo info:') - if data is None: - mirror.fail() - return - computed_checksum = get_checksum(data, Checksum.sha1) - if mirror.file_checksum == computed_checksum: - text = 'Mirror checksum has not changed, ' - text += 'not refreshing package metadata' - warning_message.send(sender=None, text=text) - else: - packages = extract_arch_packages(data) - mirror.last_access_ok = True - mirror.timestamp = ts - update_mirror_packages(mirror, packages) - mirror.file_checksum = computed_checksum - packages.clear() - else: - mirror.fail() - mirror.save() - - -def refresh_yast_repo(mirror, data): - """ Refresh package metadata for a yast-style rpm mirror - and add the packages to the mirror - """ - package_dir = re.findall('DESCRDIR *(.*)', data.decode('utf-8'))[0] - package_url = f'{mirror.url!s}/{package_dir!s}/packages.gz' - res = get_url(package_url) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - data = download_url(res, 'Downloading yast repo info:') - if data is None: - mirror.fail() - return - mirror.file_checksum = 'yast' - packages = extract_yast_packages(data) - if packages: - update_mirror_packages(mirror, packages) - else: - mirror.fail() - - -def refresh_rpm_repo(repo): - """ Refresh an rpm repo. - Checks if the repo url is a mirrorlist, and extracts mirrors if so. - If not, checks a number of common rpm repo formats to determine - which type of repo it is, and to determine the mirror urls. - """ - - formats = [ - 'repodata/repomd.xml.xz', - 'repodata/repomd.xml.bz2', - 'repodata/repomd.xml.gz', - 'repodata/repomd.xml', - 'suse/repodata/repomd.xml.xz', - 'suse/repodata/repomd.xml.bz2', - 'suse/repodata/repomd.xml.gz', - 'suse/repodata/repomd.xml', - 'content', - ] - - check_for_mirrorlists(repo) - check_for_metalinks(repo) - - if hasattr(settings, 'MAX_MIRRORS') and \ - isinstance(settings.MAX_MIRRORS, int): - max_mirrors = settings.MAX_MIRRORS - ts = datetime.now().replace(microsecond=0) - enabled_mirrors = repo.mirror_set.filter(mirrorlist=False, refresh=True) - for i, mirror in enumerate(enabled_mirrors): - res = find_mirror_url(mirror.url, formats) - mirror.last_access_ok = response_is_valid(res) - if mirror.last_access_ok: - if i >= max_mirrors: - text = f'{max_mirrors!s} mirrors already refreshed, ' - text += f' not refreshing {mirror.url!s}' - warning_message.send(sender=None, text=text) - continue - data = download_url(res, 'Downloading repo info:') - if data is None: - mirror.fail() - return - mirror_url = res.url - if res.url.endswith('content'): - text = f'Found yast rpm repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - refresh_yast_repo(mirror, data) - else: - text = f'Found yum rpm repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - refresh_yum_repo(mirror, data, mirror_url, ts) - mirror.timestamp = ts - else: - mirror.fail() - mirror.save() - - -def refresh_deb_repo(repo): - """ Refresh a debian repo. - Checks for the Packages* files to determine what the mirror urls - are and then downloads and extracts packages from those files. - """ - - formats = ['Packages.xz', 'Packages.bz2', 'Packages.gz', 'Packages'] - - ts = datetime.now().replace(microsecond=0) - for mirror in repo.mirror_set.filter(refresh=True): - res = find_mirror_url(mirror.url, formats) - mirror.last_access_ok = response_is_valid(res) - - if mirror.last_access_ok: - mirror_url = res.url - text = f'Found deb repo - {mirror_url!s}' - info_message.send(sender=None, text=text) - data = download_url(res, 'Downloading repo info:') - if data is None: - mirror.fail() - return - computed_checksum = get_checksum(data, Checksum.sha1) - if mirror.file_checksum == computed_checksum: - text = 'Mirror checksum has not changed, ' - text += 'not refreshing package metadata' - warning_message.send(sender=None, text=text) - else: - packages = extract_deb_packages(data, mirror_url) - if packages is None: - mirror.fail() - else: - mirror.last_access_ok = True - mirror.timestamp = ts - update_mirror_packages(mirror, packages) - mirror.file_checksum = computed_checksum - packages.clear() - else: - mirror.fail() - mirror.save() - - def find_best_repo(package, hostrepos): """ Given a package and a set of HostRepos, determine the best repo. Returns the best repo. @@ -756,3 +276,46 @@ def find_best_repo(package, hostrepos): if hostrepo.priority > best_repo.priority: best_repo = hostrepo return best_repo + + +def get_max_mirrors(): + """ Find the max number of mirrors for refresh + """ + max_mirrors = get_setting_of_type( + setting_name='MAX_MIRRORS', + setting_type=int, + default=3, + ) + return max_mirrors + + +def clean_repos(): + """ Remove repositories that contain no mirrors + """ + from repos.models import Repository + repos = Repository.objects.filter(mirror__isnull=True) + rlen = repos.count() + if rlen == 0: + info_message.send(sender=None, text='No Repositories with zero Mirrors found.') + else: + info_message.send(sender=None, text=f'Removing {rlen} empty Repositories.') + repos.delete() + + +def remove_mirror_trailing_slashes(): + """ Remove trailing slashes from mirrors, delete duplicates + """ + from repos.models import Mirror + mirrors = Mirror.objects.filter(url__endswith='/') + mlen = mirrors.count() + if mlen == 0: + info_message.send(sender=None, text='No Mirrors with trailing slashes found.') + else: + info_message.send(sender=None, text=f'Removing trailing slashes from {mlen} Mirrors.') + for mirror in mirrors: + mirror.url = mirror.url.rstrip('/') + try: + mirror.save() + except IntegrityError: + warning_message.send(sender=None, text=f'Deleting duplicate Mirror {mirror.id}: {mirror.url}') + mirror.delete() diff --git a/repos/views.py b/repos/views.py index a600aa7f..92b9750c 100644 --- a/repos/views.py +++ b/repos/views.py @@ -29,12 +29,10 @@ from util.filterspecs import Filter, FilterBar from hosts.models import Host, HostRepo from repos.models import Repository, Mirror, MirrorPackage -from operatingsystems.models import OSGroup +from operatingsystems.models import OSRelease from arch.models import MachineArchitecture -from repos.forms import EditRepoForm, LinkRepoForm, CreateRepoForm, \ - EditMirrorForm -from repos.serializers import RepositorySerializer, \ - MirrorSerializer, MirrorPackageSerializer +from repos.forms import EditRepoForm, LinkRepoForm, CreateRepoForm, EditMirrorForm +from repos.serializers import RepositorySerializer, MirrorSerializer, MirrorPackageSerializer @login_required @@ -46,23 +44,22 @@ def repo_list(request): if 'repotype' in request.GET: repos = repos.filter(repotype=request.GET['repotype']) - if 'arch' in request.GET: - repos = repos.filter(arch=request.GET['arch']) + if 'arch_id' in request.GET: + repos = repos.filter(arch=request.GET['arch_id']) - if 'osgroup' in request.GET: - repos = repos.filter(osgroup=request.GET['osgroup']) + if 'osrelease_id' in request.GET: + repos = repos.filter(osrelease=request.GET['osrelease_id']) if 'security' in request.GET: - security = request.GET['security'] == 'True' + security = request.GET['security'] == 'true' repos = repos.filter(security=security) if 'enabled' in request.GET: - enabled = request.GET['enabled'] == 'True' + enabled = request.GET['enabled'] == 'true' repos = repos.filter(enabled=enabled) if 'package_id' in request.GET: - repos = repos.filter( - mirror__packages=int(request.GET['package_id'])) + repos = repos.filter(mirror__packages=request.GET['package_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -87,24 +84,19 @@ def repo_list(request): page = paginator.page(paginator.num_pages) filter_list = [] - filter_list.append( - Filter( - request, - 'repotype', - Repository.objects.values_list('repotype', flat=True).distinct())) - filter_list.append(Filter(request, - 'arch', - MachineArchitecture.objects.all())) - filter_list.append(Filter(request, 'enabled', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'security', {False: 'No', True: 'Yes'})) - filter_list.append(Filter(request, 'osgroup', OSGroup.objects.all())) + filter_list.append(Filter(request, 'OS Release', 'osrelease_id', OSRelease.objects.filter(repos__in=repos))) + filter_list.append(Filter(request, 'Enabled', 'enabled', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Security', 'security', {'true': 'Yes', 'false': 'No'})) + filter_list.append(Filter(request, 'Repo Type', 'repotype', Repository.REPO_TYPES)) + filter_list.append(Filter(request, 'Architecture', 'arch_id', + MachineArchitecture.objects.filter(repository__in=repos))) filter_bar = FilterBar(request, filter_list) return render(request, 'repos/repo_list.html', {'page': page, 'filter_bar': filter_bar, - 'terms': terms}, ) + 'terms': terms}) @login_required @@ -116,9 +108,7 @@ def pre_reqs(arch, repotype): text = 'Not all mirror architectures are the same,' text += ' cannot link to or create repos' messages.info(request, text) - return render(request, - 'repos/mirror_with_repo_list.html', - {'page': page, 'checksum': checksum}, ) + return render(request, 'repos/mirror_with_repo_list.html', {'page': page, 'checksum': checksum}) if mirror.repo.repotype != repotype: text = 'Not all mirror repotypes are the same,' @@ -126,7 +116,7 @@ def pre_reqs(arch, repotype): messages.info(request, text) return render(request, 'repos/mirror_with_repo_list.html', - {'page': page, 'checksum': checksum}, ) + {'page': page, 'checksum': checksum}) return True def move_mirrors(repo): @@ -143,7 +133,7 @@ def move_mirrors(repo): if oldrepo.mirror_set.count() == 0: oldrepo.delete() - mirrors = Mirror.objects.select_related().order_by('file_checksum') + mirrors = Mirror.objects.select_related().order_by('packages_checksum') checksum = None if 'checksum' in request.GET: @@ -151,7 +141,10 @@ def move_mirrors(repo): if 'checksum' in request.POST: checksum = request.POST['checksum'] if checksum is not None: - mirrors = mirrors.filter(file_checksum=checksum) + mirrors = mirrors.filter(packages_checksum=checksum) + + if 'repo_id' in request.GET: + mirrors = mirrors.filter(repo=request.GET['repo_id']) if 'search' in request.GET: terms = request.GET['search'].lower() @@ -192,7 +185,7 @@ def move_mirrors(repo): repo.security = security repo.save() move_mirrors(repo) - text = f'Mirrors linked to new Repository {repo!s}' + text = f'Mirrors linked to new Repository {repo}' messages.info(request, text) return redirect(repo.get_absolute_url()) @@ -200,7 +193,7 @@ def move_mirrors(repo): if link_form.is_valid(): repo = link_form.cleaned_data['name'] move_mirrors(repo) - text = f'Mirrors linked to Repository {repo!s}' + text = f'Mirrors linked to Repository {repo}' messages.info(request, text) return redirect(repo.get_absolute_url()) else: @@ -218,10 +211,10 @@ def move_mirrors(repo): {'page': page, 'link_form': link_form, 'create_form': create_form, - 'checksum': checksum}, ) + 'checksum': checksum}) return render(request, 'repos/mirror_list.html', - {'page': page}, ) + {'page': page}) @login_required @@ -229,7 +222,7 @@ def mirror_detail(request, mirror_id): mirror = get_object_or_404(Mirror, id=mirror_id) return render(request, 'repos/mirror_detail.html', - {'mirror': mirror}, ) + {'mirror': mirror}) @login_required @@ -239,7 +232,7 @@ def mirror_delete(request, mirror_id): if request.method == 'POST': if 'delete' in request.POST: mirror.delete() - text = f'Mirror {mirror!s} has been deleted' + text = f'Mirror {mirror} has been deleted' messages.info(request, text) return redirect(reverse('repos:mirror_list')) elif 'cancel' in request.POST: @@ -247,7 +240,7 @@ def mirror_delete(request, mirror_id): return render(request, 'repos/mirror_delete.html', - {'mirror': mirror}, ) + {'mirror': mirror}) @login_required @@ -261,7 +254,7 @@ def mirror_edit(request, mirror_id): if edit_form.is_valid(): mirror = edit_form.save() mirror.save() - text = f'Saved changes to Mirror {mirror!s}' + text = f'Saved changes to Mirror {mirror}' messages.info(request, text) return redirect(mirror.get_absolute_url()) else: @@ -273,7 +266,7 @@ def mirror_edit(request, mirror_id): return render(request, 'repos/mirror_edit.html', - {'mirror': mirror, 'edit_form': edit_form}, ) + {'mirror': mirror, 'edit_form': edit_form}) @login_required @@ -284,12 +277,12 @@ def repo_detail(request, repo_id): hosts = Host.objects.with_counts('get_num_security_updates', 'get_num_bugfix_updates') hosts_prefetch = Prefetch('host_set', queryset=hosts) repo = Repository.objects.select_related() \ - .prefetch_related(hosts_prefetch) \ - .get(id=repo_id) + .prefetch_related(hosts_prefetch) \ + .get(id=repo_id) return render(request, 'repos/repo_detail.html', - {'repo': repo}, ) + {'repo': repo}) @login_required @@ -311,7 +304,7 @@ def repo_edit(request, repo_id): repo.enable() else: repo.disable() - text = f'Saved changes to Repository {repo!s}' + text = f'Saved changes to Repository {repo}' messages.info(request, text) return redirect(repo.get_absolute_url()) else: @@ -324,7 +317,7 @@ def repo_edit(request, repo_id): return render(request, 'repos/repo_edit.html', - {'repo': repo, 'edit_form': edit_form}, ) + {'repo': repo, 'edit_form': edit_form}) @login_required @@ -337,7 +330,7 @@ def repo_delete(request, repo_id): for mirror in repo.mirror_set.all(): mirror.delete() repo.delete() - text = f'Repository {repo!s} has been deleted' + text = f'Repository {repo} has been deleted' messages.info(request, text) return redirect(reverse('repos:repo_list')) elif 'cancel' in request.POST: @@ -345,7 +338,7 @@ def repo_delete(request, repo_id): return render(request, 'repos/repo_delete.html', - {'repo': repo}, ) + {'repo': repo}) @login_required @@ -362,7 +355,7 @@ def repo_toggle_enabled(request, repo_id): if request.is_ajax(): return HttpResponse(status=204) else: - text = f'Repository {repo!s} has been {status!s}' + text = f'Repository {repo} has been {status}' messages.info(request, text) return redirect(repo.get_absolute_url()) @@ -381,12 +374,24 @@ def repo_toggle_security(request, repo_id): if request.is_ajax(): return HttpResponse(status=204) else: - text = f'Repository {repo!s} has been marked' - text += f' as a {sectype!s} update repo' + text = f'Repository {repo} has been marked' + text += f' as a {sectype} update repo' messages.info(request, text) return redirect(repo.get_absolute_url()) +@login_required +def repo_refresh(request, repo_id): + """ Refresh a repo using a celery task + """ + from repos.tasks import refresh_repo + repo = get_object_or_404(Repository, id=repo_id) + refresh_repo.delay(repo.id) + text = f'Repostory {repo} is being refreshed' + messages.info(request, text) + return redirect(repo.get_absolute_url()) + + class RepositoryViewSet(viewsets.ModelViewSet): """ API endpoint that allows repositories to be viewed or edited. diff --git a/requirements.txt b/requirements.txt index 16d37ee2..9d2baa9e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,18 +1,21 @@ -Django==3.2.25 -django-tagging==0.5.0 -django-extensions==3.2.1 +Django==4.2.24 +django-taggit==4.0.0 +django-extensions==3.2.3 django-bootstrap3==23.1 -progressbar==2.5 -python-debian==0.1.49 -lxml==5.2.2 +python-debian==1.0.1 defusedxml==0.7.1 -PyYAML==6.0.1 -chardet==4.0.0 -requests==2.32.3 -colorama==0.4.4 -djangorestframework==3.13.1 -django-filter==21.1 -humanize==3.13.1 -version-utils==0.3.0 -python-magic==0.4.25 -pymemcache==4.0.0 +PyYAML==6.0.2 +requests==2.32.4 +colorama==0.4.6 +djangorestframework==3.15.2 +django-filter==25.1 +humanize==4.12.1 +version-utils==0.3.2 +python-magic==0.4.27 +gitpython==3.1.44 +tenacity==8.2.3 +celery==5.4.0 +redis==5.2.1 +django-celery-beat==2.7.0 +tqdm==4.67.1 +cvss==3.4 diff --git a/sbin/patchman b/sbin/patchman index df6cd0ca..9cc6048e 100755 --- a/sbin/patchman +++ b/sbin/patchman @@ -21,29 +21,28 @@ import os import sys import argparse -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') -from django.conf import settings from django.core.exceptions import MultipleObjectsReturned from django.db.models import Count from django import setup as django_setup +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') +from django.conf import settings # noqa django_setup() -from datetime import date, datetime -from tagging.models import TaggedItem - +from arch.utils import clean_architectures +from errata.utils import mark_errata_security_updates, enrich_errata, \ + scan_package_updates_for_affected_packages +from errata.tasks import update_errata from hosts.models import Host -from packages.models import Package, PackageName, PackageUpdate -from packages.utils import update_errata, mark_errata_security_updates +from modules.utils import clean_modules +from packages.utils import clean_packages, clean_packageupdates, clean_packagenames from repos.models import Repository -from modules.models import Module -from arch.models import PackageArchitecture, MachineArchitecture +from repos.utils import clean_repos from reports.models import Report -from util import print_nocr, create_pbar, update_pbar, \ - set_verbosity, get_verbosity -from patchman.signals import \ - info_message, warning_message, error_message, debug_message, \ - progress_info_s, progress_update_s +from reports.tasks import clean_reports_with_no_hosts +from security.utils import update_cves, update_cwes +from util import set_verbosity, get_datetime_now +from patchman.signals import info_message def get_host(host=None, action='Performing action'): @@ -51,7 +50,7 @@ def get_host(host=None, action='Performing action'): """ host_obj = None hostdot = host + '.' - text = f'{action!s} for Host {host!s}' + text = f'{action} for Host {host}' try: host_obj = Host.objects.get(hostname__startswith=hostdot) @@ -59,10 +58,10 @@ def get_host(host=None, action='Performing action'): try: host_obj = Host.objects.get(hostname__startswith=host) except Host.DoesNotExist: - text = f'Host {host!s} does not exist' + text = f'Host {host} does not exist' except MultipleObjectsReturned: matches = Host.objects.filter(hostname__startswith=host).count() - text = f'{matches!s} Hosts match hostname "{host!s}"' + text = f'{matches} Hosts match hostname "{host}"' info_message.send(sender=None, text=text) return host_obj @@ -83,7 +82,7 @@ def get_hosts(hosts=None, action='Performing action'): if host_obj is not None: host_objs.append(host_obj) else: - text = f'{action!s} for all Hosts\n' + text = f'{action} for all Hosts\n' info_message.send(sender=None, text=text) host_objs = Host.objects.all() @@ -97,11 +96,11 @@ def get_repos(repo=None, action='Performing action', only_enabled=False): if repo: try: repos.append(Repository.objects.get(id=repo)) - text = f'{action!s} for Repo {repo!s}' + text = f'{action} for Repo {repo}' except Repository.DoesNotExist: - text = f'Repo {repo!s} does not exist' + text = f'Repo {repo} does not exist' else: - text = f'{action!s} for all Repos\n' + text = f'{action} for all Repos\n' if only_enabled: repos = Repository.objects.filter(enabled=True) else: @@ -117,7 +116,7 @@ def refresh_repos(repo=None, force=False): """ repos = get_repos(repo, 'Refreshing metadata', True) for repo in repos: - text = f'Repository {repo.id!s} : {repo!s}' + text = f'Repository {repo.id} : {repo}' info_message.send(sender=None, text=text) repo.refresh(force) info_message.send(sender=None, text='') @@ -141,151 +140,17 @@ def list_hosts(hosts=None): host.show() -def clean_packages(): - """ Remove packages that are no longer in use - """ - packages = Package.objects.filter(mirror__isnull=True, - host__isnull=True, - erratum__isnull=True, - module__isnull=True) - plen = packages.count() - if plen == 0: - info_message.send(sender=None, text='No orphaned Packages found.') - else: - create_pbar(f'Removing {plen!s} orphaned Packages:', plen) - for i, o in enumerate(packages): - p = Package.objects.get(name=o.name, - epoch=o.epoch, - version=o.version, - release=o.release, - arch=o.arch, - packagetype=o.packagetype) - p.delete() - update_pbar(i + 1) - - -def clean_arches(): - """ Remove architectures that are no longer in use - """ - parches = PackageArchitecture.objects.filter(package__isnull=True) - plen = parches.count() - - if plen == 0: - text = 'No orphaned Package Architectures found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {plen!s} orphaned P Arches:', plen) - for i, p in enumerate(parches): - a = PackageArchitecture.objects.get(name=p.name) - a.delete() - update_pbar(i + 1) - - marches = MachineArchitecture.objects.filter(host__isnull=True, - repository__isnull=True, - erratum__isnull=True) - mlen = marches.count() - - if mlen == 0: - text = 'No orphaned Machine Architectures found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {mlen!s} orphaned M Arches:', mlen) - for i, m in enumerate(marches): - a = MachineArchitecture.objects.get(name=m.name) - a.delete() - update_pbar(i + 1) - - -def clean_package_names(): - """ Remove package names that are no longer in use - """ - names = PackageName.objects.filter(package__isnull=True) - nlen = names.count() - - if nlen == 0: - info_message.send(sender=None, text='No orphaned Package names found.') - else: - create_pbar(f'Removing {nlen!s} unused Package names:', nlen) - for i, packagename in enumerate(names): - packagename.delete() - update_pbar(i + 1) - - -def clean_repos(): - """ Remove repositories that contain no mirrors - """ - repos = Repository.objects.filter(mirror__isnull=True) - rlen = repos.count() - - if rlen == 0: - text = 'No Repositories with zero Mirrors found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {rlen!s} empty Repos:', rlen) - for i, repo in enumerate(repos): - repo.delete() - update_pbar(i + 1) - - -def clean_reports(s_host=None): +def clean_reports(hoststr=None): """ Delete old reports for all hosts, specify host for a single host. Reports with non existent hosts are only removed when no host is specified. """ - hosts = get_hosts(s_host, 'Cleaning Reports') - timestamp = date.today() - + hosts = get_hosts(hoststr, 'Cleaning Reports') for host in hosts: - info_message.send(sender=None, text=str(host)) - host.clean_reports(timestamp) - - if s_host is None: - reports = Report.objects.filter(accessed__lt=timestamp) - rlen = reports.count() - if rlen != 0: - create_pbar(f'Removing {rlen!s} extraneous Reports:', - rlen) - for i, report in enumerate(reports): - report.delete() - update_pbar(i + 1) - + host.clean_reports() -def clean_modules(): - """ Delete modules that have no host or no repo - """ - modules = Module.objects.filter(host__isnull=True, repo__isnull=True) - mlen = modules.count() - - if mlen == 0: - text = 'No orphaned Modules found.' - info_message.send(sender=None, text=text) - else: - create_pbar(f'Removing {mlen!s} empty Modules:', mlen) - for i, module in enumerate(modules): - module.delete() - update_pbar(i + 1) - - -def clean_tags(): - """ Delete unused tags - """ - tagged_items = list(TaggedItem.objects.all()) - to_delete = [] - - for t in tagged_items: - hostid = t.object_id - try: - # tags are only used for hosts for now - Host.objects.get(pk=hostid) - except Host.DoesNotExist: - to_delete.append(t) - - tlen = len(to_delete) - if tlen != 0: - create_pbar(f'Removing {tlen!s} unused tagged items', tlen) - for i, t in enumerate(to_delete): - t.delete() - update_pbar(i + 1) + if not hoststr: + clean_reports_with_no_hosts() def host_updates_alt(host=None): @@ -293,13 +158,13 @@ def host_updates_alt(host=None): """ updated_hosts = [] hosts = get_hosts(host, 'Finding updates') - ts = datetime.now().replace(microsecond=0) + ts = get_datetime_now() for host in hosts: info_message.send(sender=None, text=str(host)) if host not in updated_hosts: - host.updated_at = ts host.find_updates() info_message.send(sender=None, text='') + host.updated_at = ts host.save() # only include hosts with the same number of packages @@ -333,7 +198,7 @@ def host_updates_alt(host=None): phost.updated_at = ts phost.save() updated_hosts.append(phost) - text = f'Added the same updates to {phost!s}' + text = f'Added the same updates to {phost}' info_message.send(sender=None, text=text) else: text = 'Updates already added in this run' @@ -370,45 +235,45 @@ def diff_hosts(hosts): repo_diff_AB = reposA.difference(reposB) repo_diff_BA = reposB.difference(reposA) - info_message.send(sender=None, text=f'+ {hostA.hostname!s}') - info_message.send(sender=None, text=f'- {hostB.hostname!s}') + info_message.send(sender=None, text=f'+ {hostA.hostname}') + info_message.send(sender=None, text=f'- {hostB.hostname}') if hostA.os != hostB.os: info_message.send(sender=None, text='\nOperating Systems') - info_message.send(sender=None, text=f'+ {hostA.os!s}') - info_message.send(sender=None, text=f'- {hostB.os!s}') + info_message.send(sender=None, text=f'+ {hostA.os}') + info_message.send(sender=None, text=f'- {hostB.os}') else: info_message.send(sender=None, text='\nNo OS differences') if hostA.arch != hostB.arch: info_message.send(sender=None, text='\nArchitecture') - info_message.send(sender=None, text=f'+ {hostA.arch!s}') - info_message.send(sender=None, text=f'- {hostB.arch!s}') + info_message.send(sender=None, text=f'+ {hostA.arch}') + info_message.send(sender=None, text=f'- {hostB.arch}') else: info_message.send(sender=None, text='\nNo Architecture differences') if hostA.kernel != hostB.kernel: info_message.send(sender=None, text='\nKernels') - info_message.send(sender=None, text=f'+ {hostA.kernel!s}') - info_message.send(sender=None, text=f'- {hostB.kernel!s}') + info_message.send(sender=None, text=f'+ {hostA.kernel}') + info_message.send(sender=None, text=f'- {hostB.kernel}') else: info_message.send(sender=None, text='\nNo Kernel differences') if len(package_diff_AB) != 0 or len(package_diff_BA) != 0: info_message.send(sender=None, text='\nPackages') for package in package_diff_AB: - info_message.send(sender=None, text=f'+ {package!s}') + info_message.send(sender=None, text=f'+ {package}') for package in package_diff_BA: - info_message.send(sender=None, text=f'- {package!s}') + info_message.send(sender=None, text=f'- {package}') else: info_message.send(sender=None, text='\nNo Package differences') if len(repo_diff_AB) != 0 or len(repo_diff_BA) != 0: info_message.send(sender=None, text='\nRepositories') for repo in repo_diff_AB: - info_message.send(sender=None, text=f'+ {repo!s}') + info_message.send(sender=None, text=f'+ {repo}') for repo in repo_diff_BA: - info_message.send(sender=None, text=f'- {repo!s}') + info_message.send(sender=None, text=f'- {repo}') else: info_message.send(sender=None, text='\nNo Repo differences') @@ -419,7 +284,7 @@ def delete_hosts(hosts=None): if hosts: matching_hosts = get_hosts(hosts) for host in matching_hosts: - text = f'Deleting host: {host.hostname!s}:' + text = f'Deleting host: {host.hostname}:' info_message.send(sender=None, text=text) host.delete() @@ -428,11 +293,11 @@ def toggle_host_hro(hosts=None, host_repos_only=True): """ Toggle host_repos_only for a host or matching pattern of hosts """ if host_repos_only: - toggle = "Setting" + toggle = 'Setting' else: - toggle = "Unsetting" + toggle = 'Unsetting' if hosts: - matching_hosts = get_hosts(hosts, f"{toggle!s} host_repos_only") + matching_hosts = get_hosts(hosts, f'{toggle} host_repos_only') for host in matching_hosts: info_message.send(sender=None, text=str(host)) host.host_repos_only = host_repos_only @@ -443,11 +308,11 @@ def toggle_host_check_dns(hosts=None, check_dns=True): """ Toggle check_dns for a host or matching pattern of hosts """ if check_dns: - toggle = "Setting" + toggle = 'Setting' else: - toggle = "Unsetting" + toggle = 'Unsetting' if hosts: - matching_hosts = get_hosts(hosts, f"{toggle!s} check_dns") + matching_hosts = get_hosts(hosts, f'{toggle} check_dns') for host in matching_hosts: info_message.send(sender=None, text=str(host)) host.check_dns = check_dns @@ -460,9 +325,6 @@ def dns_checks(host=None): """ hosts = get_hosts(host, 'Checking rDNS') for host in hosts: - if get_verbosity(): - text = f'{str(host)[0:25].ljust(25)!s}: ' - print_nocr(text) host.check_rdns() @@ -477,9 +339,9 @@ def process_reports(host=None, force=False): try: reports = Report.objects.filter( processed=force, host=host).order_by('created') - text = f'Processing Reports for Host {host!s}' + text = f'Processing Reports for Host {host}' except Report.DoesNotExist: - text = f'No Reports exist for Host {host!s}' + text = f'No Reports exist for Host {host}' else: text = 'Processing Reports for all Hosts' reports = Report.objects.filter(processed=force).order_by('created') @@ -490,42 +352,16 @@ def process_reports(host=None, force=False): report.process(find_updates=False) -def clean_updates(): - """ Removes PackageUpdate objects that are no longer - linked to any hosts - """ - package_updates = list(PackageUpdate.objects.all()) - - for update in package_updates: - if update.host_set.count() == 0: - text = f'Removing unused update {update!s}' - info_message.send(sender=None, text=text) - update.delete() - for duplicate in package_updates: - if update.oldpackage == duplicate.oldpackage and \ - update.newpackage == duplicate.newpackage and \ - update.security == duplicate.security and \ - update.id != duplicate.id: - text = f'Removing duplicate update: {update!s}' - info_message.send(sender=None, text=text) - for host in duplicate.host_set.all(): - host.updates.remove(duplicate) - host.updates.add(update) - host.save() - duplicate.delete() - - -def dbcheck(): +def dbcheck(remove_duplicates=False): """ Runs all clean_* functions to check database consistency """ - clean_updates() - clean_packages() - clean_package_names() - clean_arches() + clean_packageupdates() + clean_packages(remove_duplicates) + clean_packagenames() + clean_architectures() clean_repos() clean_modules() - clean_updates() - clean_tags() + clean_packageupdates() def collect_args(): @@ -586,6 +422,9 @@ def collect_args(): parser.add_argument( '-d', '--dbcheck', action='store_true', help='Perform some sanity checks and clean unused db entries') + parser.add_argument( + '-rd', '--remove-duplicates', action='store_true', + help='Remove duplicates during dbcheck - this may take some time') parser.add_argument( '-n', '--dns-checks', action='store_true', help='Perform reverse DNS checks if enabled for that Host') @@ -596,14 +435,20 @@ def collect_args(): '-D', '--diff', metavar=('hostA', 'hostB'), nargs=2, help='Show differences between two Hosts in diff-like output') parser.add_argument( - '-ue', '--update-errata', action='store_true', - help='Update CentOS errata from https://cefs.steve-meier.de/') + '-e', '--update-errata', action='store_true', + help='Update Errata') + parser.add_argument( + '-E', '--erratum-type', + help='Only update the specified Erratum type (e.g. `yum`, `ubuntu`, `arch`)') parser.add_argument( - '-me', '--mark-errata-security-updates', action='store_true', - help='Mark updates as security updates based on downloaded errata') + '-v', '--update-cves', action='store_true', + help='Update CVEs from https://cve.org') parser.add_argument( - '-e', '--errata', action='store_true', - help='Download CentOS errata from https://cefs.steve-meier.de/') + '--cve', help="Only update the specified CVE (e.g. CVE-2024-1234)") + parser.add_argument( + '--fetch-nist-data', '-nd', action='store_true', + help='Fetch NIST CVE data in addition to MITRE data (rate-limited to 1 API call every 6 seconds)' + ) return parser @@ -622,7 +467,7 @@ def process_args(args): args.clean_updates = True args.dbcheck = True args.dns_checks = True - args.errata = False + args.errata = True if args.list_repos: list_repos(args.repo) return False @@ -659,7 +504,7 @@ def process_args(args): process_reports(args.host, args.force) showhelp = False if args.dbcheck: - dbcheck() + dbcheck(args.remove_duplicates) showhelp = False if args.refresh_repos: refresh_repos(args.repo, args.force) @@ -673,21 +518,21 @@ def process_args(args): host_updates_alt(args.host) showhelp = False recheck = True - if args.dbcheck and recheck: - dbcheck() if args.dns_checks: dns_checks(args.host) showhelp = False - if args.errata: - update_errata(args.force) - mark_errata_security_updates() - showhelp = False if args.update_errata: - update_errata(args.force) - showhelp = False - if args.mark_errata_security_updates: + update_errata(args.erratum_type, args.force, args.repo) + scan_package_updates_for_affected_packages() mark_errata_security_updates() + enrich_errata() + showhelp = False + if args.update_cves: + update_cves(args.cve, args.fetch_nist_data) + update_cwes(args.cve) showhelp = False + if args.dbcheck and recheck: + dbcheck(args.remove_duplicates) return showhelp diff --git a/sbin/patchman-manage b/sbin/patchman-manage index 2d2ef09f..19bd04b4 100755 --- a/sbin/patchman-manage +++ b/sbin/patchman-manage @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Copyright 2019-2021 Marcus Furlong +# Copyright 2019-2025 Marcus Furlong # # This file is part of Patchman. # @@ -19,21 +19,19 @@ import os import sys -if __name__ == '__main__': - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "patchman.settings") + +def main(): + os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'patchman.settings') try: from django.core.management import execute_from_command_line - except ImportError: - # The above import may fail for some other reason. Ensure that the - # issue is really that Django is missing to avoid masking other - # exceptions on Python 2. - try: - import django - except ImportError: - raise ImportError( - "Couldn't import Django. Are you sure it's installed and " - "available on your PYTHONPATH environment variable? Did you " - "forget to activate a virtual environment?" - ) - raise + except ImportError as exc: + raise ImportError( + 'Could not import Django. Are you sure it is installed and ' + 'available on your PYTHONPATH environment variable? Did you ' + 'forget to activate a virtual environment?' + ) from exc execute_from_command_line(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/scripts/rpm-post-install.sh b/scripts/rpm-post-install.sh index 18f28615..24ade8af 100644 --- a/scripts/rpm-post-install.sh +++ b/scripts/rpm-post-install.sh @@ -11,6 +11,8 @@ fi systemctl enable httpd systemctl restart httpd +systemctl enable redis +systemctl start redis patchman-set-secret-key chown apache /etc/patchman/local_settings.py @@ -20,8 +22,12 @@ patchman-manage collectstatic --noinput patchman-manage makemigrations patchman-manage migrate --run-syncdb --fake-initial +sqlite3 /var/lib/patchman/db/patchman.db 'PRAGMA journal_mode=WAL;' chown -R apache:apache /var/lib/patchman +adduser --system --group patchman-celery +usermod -a -G apache patchman-celery +chmod g+w /var/lib/patchman /var/lib/patchman/db /var/lib/patchman/db/patchman.db chcon --type httpd_sys_rw_content_t /var/lib/patchman/db/patchman.db semanage port -a -t http_port_t -p tcp 5672 setsebool -P httpd_can_network_memcache 1 diff --git a/security/__init__.py b/security/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/security/admin.py b/security/admin.py new file mode 100644 index 00000000..196a9468 --- /dev/null +++ b/security/admin.py @@ -0,0 +1,24 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.contrib import admin +from security.models import CWE, CVSS, CVE, Reference + + +admin.site.register(CWE) +admin.site.register(CVSS) +admin.site.register(CVE) +admin.site.register(Reference) diff --git a/security/apps.py b/security/apps.py new file mode 100644 index 00000000..6e578334 --- /dev/null +++ b/security/apps.py @@ -0,0 +1,21 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.apps import AppConfig + + +class SecurityConfig(AppConfig): + name = 'security' diff --git a/security/managers.py b/security/managers.py new file mode 100644 index 00000000..4dfcffaf --- /dev/null +++ b/security/managers.py @@ -0,0 +1,22 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.db import models + + +class CVEManager(models.Manager): + def get_queryset(self): + return super().get_queryset().select_related() diff --git a/security/migrations/0001_initial.py b/security/migrations/0001_initial.py new file mode 100644 index 00000000..5f922c9a --- /dev/null +++ b/security/migrations/0001_initial.py @@ -0,0 +1,48 @@ +# Generated by Django 4.2.18 on 2025-02-08 20:40 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ] + + operations = [ + migrations.CreateModel( + name='CVSS', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('score', models.DecimalField(decimal_places=1, max_digits=3, null=True)), + ('severity', models.CharField(blank=True, max_length=255, null=True)), + ('version', models.DecimalField(decimal_places=1, max_digits=2)), + ('vector_string', models.CharField(blank=True, max_length=255, null=True)), + ], + ), + migrations.CreateModel( + name='CWE', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('cwe_id', models.CharField(max_length=255, unique=True)), + ('name', models.CharField(blank=True, max_length=255, null=True)), + ('description', models.CharField(blank=True, max_length=255, null=True)), + ], + ), + migrations.CreateModel( + name='CVE', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('cve_id', models.CharField(max_length=255, unique=True)), + ('title', models.CharField(blank=True, max_length=255, null=True)), + ('description', models.CharField(max_length=255)), + ('reserved_date', models.DateTimeField(blank=True, null=True)), + ('published_date', models.DateTimeField(blank=True, null=True)), + ('rejected_date', models.DateTimeField(blank=True, null=True)), + ('updated_date', models.DateTimeField(blank=True, null=True)), + ('cvss_scores', models.ManyToManyField(blank=True, to='security.cvss')), + ('cwes', models.ManyToManyField(blank=True, to='security.cwe')), + ], + ), + ] diff --git a/security/migrations/0002_alter_cve_options.py b/security/migrations/0002_alter_cve_options.py new file mode 100644 index 00000000..91e6b620 --- /dev/null +++ b/security/migrations/0002_alter_cve_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-02-11 03:51 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0001_initial'), + ] + + operations = [ + migrations.AlterModelOptions( + name='cve', + options={'ordering': ('cve_id',)}, + ), + ] diff --git a/security/migrations/0003_alter_cve_description_alter_cwe_description.py b/security/migrations/0003_alter_cve_description_alter_cwe_description.py new file mode 100644 index 00000000..0c44a981 --- /dev/null +++ b/security/migrations/0003_alter_cve_description_alter_cwe_description.py @@ -0,0 +1,23 @@ +# Generated by Django 4.2.19 on 2025-02-26 16:01 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0002_alter_cve_options'), + ] + + operations = [ + migrations.AlterField( + model_name='cve', + name='description', + field=models.TextField(blank=True, default=''), + ), + migrations.AlterField( + model_name='cwe', + name='description', + field=models.TextField(blank=True, default=''), + ), + ] diff --git a/security/migrations/0004_alter_cve_options.py b/security/migrations/0004_alter_cve_options.py new file mode 100644 index 00000000..8650801f --- /dev/null +++ b/security/migrations/0004_alter_cve_options.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.19 on 2025-03-04 22:07 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0003_alter_cve_description_alter_cwe_description'), + ] + + operations = [ + migrations.AlterModelOptions( + name='cve', + options={'ordering': ['cve_id']}, + ), + ] diff --git a/security/migrations/0005_reference_cve_references.py b/security/migrations/0005_reference_cve_references.py new file mode 100644 index 00000000..f94cf7d5 --- /dev/null +++ b/security/migrations/0005_reference_cve_references.py @@ -0,0 +1,29 @@ +# Generated by Django 4.2.19 on 2025-03-05 19:57 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0004_alter_cve_options'), + ] + + operations = [ + migrations.CreateModel( + name='Reference', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('ref_type', models.CharField(max_length=255)), + ('url', models.URLField(max_length=765)), + ], + options={ + 'unique_together': {('ref_type', 'url')}, + }, + ), + migrations.AddField( + model_name='cve', + name='references', + field=models.ManyToManyField(blank=True, to='security.reference'), + ), + ] diff --git a/security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py b/security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py new file mode 100644 index 00000000..515c5217 --- /dev/null +++ b/security/migrations/0006_alter_cve_options_alter_cvss_unique_together.py @@ -0,0 +1,21 @@ +# Generated by Django 4.2.19 on 2025-03-10 17:36 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0005_reference_cve_references'), + ] + + operations = [ + migrations.AlterModelOptions( + name='cve', + options={'ordering': ['-cve_id']}, + ), + migrations.AlterUniqueTogether( + name='cvss', + unique_together={('score', 'severity', 'version', 'vector_string')}, + ), + ] diff --git a/security/migrations/0007_remove_cve_title.py b/security/migrations/0007_remove_cve_title.py new file mode 100644 index 00000000..64c79f13 --- /dev/null +++ b/security/migrations/0007_remove_cve_title.py @@ -0,0 +1,17 @@ +# Generated by Django 4.2.20 on 2025-04-20 20:15 + +from django.db import migrations + + +class Migration(migrations.Migration): + + dependencies = [ + ('security', '0006_alter_cve_options_alter_cvss_unique_together'), + ] + + operations = [ + migrations.RemoveField( + model_name='cve', + name='title', + ), + ] diff --git a/security/migrations/__init__.py b/security/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/security/models.py b/security/models.py new file mode 100644 index 00000000..9c097eed --- /dev/null +++ b/security/models.py @@ -0,0 +1,265 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +import json +import re +from cvss import CVSS2, CVSS3, CVSS4 +from time import sleep + +from django.db import models +from django.urls import reverse + +from security.managers import CVEManager +from util import get_url, fetch_content, tz_aware_datetime, error_message + + +class Reference(models.Model): + + ref_type = models.CharField(max_length=255) + url = models.URLField(max_length=765) + + class Meta: + unique_together = ['ref_type', 'url'] + + def __str__(self): + return self.url + + +class CWE(models.Model): + + cwe_id = models.CharField(max_length=255, unique=True) + name = models.CharField(max_length=255, blank=True, null=True) + description = models.TextField(blank=True, default='') + + def __str__(self): + return f'{self.cwe_id} - {self.name}' + + def get_absolute_url(self): + return reverse('security:cwe_detail', args=[self.cwe_id]) + + @property + def int_id(self): + return int(self.cwe_id.split('-')[1]) + + def fetch_cwe_data(self): + int_id = self.int_id + mitre_cwe_url = f'https://cwe-api.mitre.org/api/v1/cwe/{int_id}' + res = get_url(mitre_cwe_url) + data = fetch_content(res, f'Fetching {self.cwe_id} data') + cwe_json = json.loads(data) + if cwe_json == 'at least one CWE not found': + return + cwe = cwe_json[0] + if cwe.get('Type').endswith('weakness'): + weakness_url = f'https://cwe-api.mitre.org/api/v1/cwe/weakness/{int_id}' + res = get_url(weakness_url) + data = fetch_content(res, f'Fetching {self.cwe_id} weakness data') + weakness_json = json.loads(data) + for weakness in weakness_json.get('Weaknesses'): + if int(weakness.get('ID')) == int_id: + self.name = weakness.get('Name') + self.description = weakness.get('Description') + self.save() + + +class CVSS(models.Model): + + score = models.DecimalField(max_digits=3, decimal_places=1, null=True) + severity = models.CharField(max_length=255, blank=True, null=True) + version = models.DecimalField(max_digits=2, decimal_places=1) + vector_string = models.CharField(max_length=255, blank=True, null=True) + + class Meta: + unique_together = ['score', 'severity', 'version', 'vector_string'] + + def __str__(self): + return f'{self.score} ({self.severity}) [{self.vector_string}]' + + +class CVE(models.Model): + + cve_id = models.CharField(max_length=255, unique=True) + description = models.TextField(blank=True, default='') + reserved_date = models.DateTimeField(blank=True, null=True) + published_date = models.DateTimeField(blank=True, null=True) + rejected_date = models.DateTimeField(blank=True, null=True) + updated_date = models.DateTimeField(blank=True, null=True) + cwes = models.ManyToManyField(CWE, blank=True) + cvss_scores = models.ManyToManyField(CVSS, blank=True) + references = models.ManyToManyField(Reference, blank=True) + + objects = CVEManager() + + class Meta: + ordering = ['-cve_id'] + + def __str__(self): + return self.cve_id + + def get_absolute_url(self): + return reverse('security:cve_detail', args=[self.cve_id]) + + def add_cvss_score(self, vector_string, score=None, severity=None, version=None): + if not version: + version = vector_string.split('/')[0].replace('CVSS:', '') + if version.startswith('2'): + cvss_score = CVSS2(vector_string) + elif version.startswith('3'): + cvss_score = CVSS3(vector_string) + elif version.startswith('4'): + cvss_score = CVSS4(vector_string) + if not score: + score = cvss_score.base_score + if not severity: + severity = cvss_score.severities()[0] + try: + cvss, created = CVSS.objects.get_or_create( + version=version, + vector_string=vector_string, + score=score, + severity=severity, + ) + except CVSS.MultipleObjectsReturned: + matching_cvsses = CVSS.objects.filter( + version=version, + vector_string=vector_string, + ) + cvss = matching_cvsses.first() + matching_cvsses.exclude(id=cvss.id).delete() + self.cvss_scores.add(cvss) + + def fetch_cve_data(self, fetch_nist_data=False, sleep_secs=6): + self.fetch_mitre_cve_data() + self.fetch_osv_dev_cve_data() + if fetch_nist_data: + self.fetch_nist_cve_data() + sleep(sleep_secs) # rate limited, see https://nvd.nist.gov/developers/start-here + + def fetch_mitre_cve_data(self): + mitre_cve_url = f'https://cveawg.mitre.org/api/cve/{self.cve_id}' + res = get_url(mitre_cve_url) + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {mitre_cve_url}') + return + data = fetch_content(res, f'Fetching {self.cve_id} MITRE data') + cve_json = json.loads(data) + self.parse_mitre_cve_data(cve_json) + + def fetch_osv_dev_cve_data(self): + osv_dev_cve_url = f'https://api.osv.dev/v1/vulns/{self.cve_id}' + res = get_url(osv_dev_cve_url) + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {osv_dev_cve_url}') + return + data = fetch_content(res, f'Fetching {self.cve_id} OSV data') + cve_json = json.loads(data) + self.parse_osv_dev_cve_data(cve_json) + + def parse_osv_dev_cve_data(self, cve_json): + from security.utils import get_or_create_reference + references = cve_json.get('references') + if references: + for reference in references: + ref_type = reference.get('type').capitalize() + url = reference.get('url') + get_or_create_reference(ref_type, url) + scores = cve_json.get('severity') + if scores: + for score in scores: + self.add_cvss_score(vector_string=score.get('score')) + + def fetch_nist_cve_data(self): + nist_cve_url = f'https://services.nvd.nist.gov/rest/json/cves/2.0?cveId={self.cve_id}' + res = get_url(nist_cve_url) + data = fetch_content(res, f'Fetching {self.cve_id} NIST data') + if res.status_code == 404: + error_message.send(sender=None, text=f'404 - Skipping {self.cve_id} - {nist_cve_url}') + cve_json = json.loads(data) + self.parse_nist_cve_data(cve_json) + + def parse_nist_cve_data(self, cve_json): + from security.utils import get_or_create_reference + vulnerabilites = cve_json.get('vulnerabilities') + for vulnerability in vulnerabilites: + cve = vulnerability.get('cve') + cve_id = cve.get('id') + if cve_id != self.cve_id: + error_message.send(sender=None, text=f'CVE ID mismatch - {self.cve_id} != {cve_id}') + return + metrics = cve.get('metrics') + for metric, score_data in metrics.items(): + if metric.startswith('cvss'): + for scores in score_data: + for key, value in scores.items(): + if key.startswith('cvssData'): + self.add_cvss_score( + vector_string=value.get('vectorString'), + score=value.get('baseScore'), + severity=value.get('baseSeverity'), + version=value.get('version') + ) + references = cve.get('references') + for reference in references: + ref_type = 'Link' + url = reference.get('url') + ref = get_or_create_reference(ref_type=ref_type, url=url) + self.references.add(ref) + + def parse_mitre_cve_data(self, cve_json): + cve_metadata = cve_json.get('cveMetadata') + reserved_date = cve_metadata.get('dateReserved') + if reserved_date: + self.reserved_date = tz_aware_datetime(cve_metadata.get('dateReserved')) + rejected_date = cve_metadata.get('dateRejected') + if rejected_date: + self.rejected_date = tz_aware_datetime(rejected_date) + published_date = cve_metadata.get('datePublished') + if published_date: + self.published_date = tz_aware_datetime(cve_metadata.get('datePublished')) + updated_date = cve_metadata.get('dateUpdated') + if updated_date: + self.updated_date = tz_aware_datetime(cve_metadata.get('dateUpdated')) + cna_container = cve_json.get('containers').get('cna') + descriptions = cna_container.get('descriptions') + if descriptions: + self.description = descriptions[0].get('value') + problem_types = cna_container.get('problemTypes', []) + for problem_type in problem_types: + descriptions = problem_type.get('descriptions') + for description in descriptions: + if description.get('type') == 'CWE': + cwe_id = description.get('cweId') + if cwe_id: + cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) + self.cwes.add(cwe) + cwe_description = description.get('description') + cwe_ids = re.findall(r'CWE-\d+', cwe_description) + for cwe_id in cwe_ids: + cwe, created = CWE.objects.get_or_create(cwe_id=cwe_id) + self.cwes.add(cwe) + metrics = cna_container.get('metrics') + if metrics: + for metric in metrics: + if metric.get('format') == 'CVSS': + for key, value in metric.items(): + if key.startswith('cvss'): + self.add_cvss_score( + vector_string=value.get('vectorString'), + score=value.get('baseScore'), + severity=value.get('baseSeverity'), + version=value.get('version') + ) + self.save() diff --git a/security/serializers.py b/security/serializers.py new file mode 100644 index 00000000..979fc2c9 --- /dev/null +++ b/security/serializers.py @@ -0,0 +1,38 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from rest_framework import serializers + +from security.models import CVE, CWE, Reference + + +class CWESerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = CWE + fields = ('cwe_id', 'title', 'description') + + +class CVESerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = CVE + fields = ('cve_id', 'description', 'cvss_score', 'cwe', + 'registered_date', 'published_date', 'updated_date') + + +class ReferenceSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = Reference + fields = ('id', 'ref_type', 'url') diff --git a/security/tasks.py b/security/tasks.py new file mode 100644 index 00000000..a04bb1c8 --- /dev/null +++ b/security/tasks.py @@ -0,0 +1,51 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from security.models import CVE, CWE + + +@shared_task +def update_cve(cve_id): + """ Task to update a CVE + """ + cve = CVE.objects.get(id=cve_id) + cve.fetch_cve_data() + + +@shared_task +def update_cves(): + """ Task to update all CVEs + """ + for cve in CVE.objects.all(): + update_cve.delay(cve.id) + + +@shared_task +def update_cwe(cwe_id): + """ Task to update a CWE + """ + cwe = CWE.objects.get(id=cwe_id) + cwe.fetch_cwe_data() + + +@shared_task +def update_cwes(): + """ Task to update all CWEa + """ + for cwe in CWE.objects.all(): + update_cwe.delay(cwe.id) diff --git a/security/templates/security/cve_detail.html b/security/templates/security/cve_detail.html new file mode 100644 index 00000000..6c86197a --- /dev/null +++ b/security/templates/security/cve_detail.html @@ -0,0 +1,103 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}CVE - {{ cve }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CVEs
  • {{ cve }}
  • {% endblock %} + +{% block content_title %} CVE - {{ cve }} {% endblock %} + +{% block content %} + + + +
    +
    +
    +
    Repo NameIDRepo NameRepo ID Mirrors Enabled Security
    {{ repo }}{% if repo.id != None %} {{ repo.id }} {% endif %}{{ repo.mirror_set.count }}{% if repo.repo_id %} {{ repo.repo_id }} {% endif %} {{ repo.mirror_set.count }}
    {% yes_no_button_repo_en repo %}
    {% yes_no_button_repo_sec repo %}
    {% yes_no_img repo.auth_required %}
    + + + + + + + + + + + + + + + + + + + + + + + + + +
    CVE ID{{ cve.cve_id }}
    Description{{ cve.description }}
    Reserved{{ cve.reserved_date|date|default_if_none:'' }}
    Rejected{{ cve.rejected_date|date|default_if_none:'' }}
    Published{{ cve.published_date|date|default_if_none:'' }}
    Updated{{ cve.updated_date|date|default_if_none:'' }}
    CVSS Scores + {% for score in cve.cvss_scores.all %} + {% if score.version|stringformat:"d" == "2" %} + {{ score.score }} (CVSS {{ score.version }}) {% bootstrap_icon "link" %}
    + {% else %} + {{ score.score }} - {{ score.severity }} (CVSS {{ score.version }}) {% bootstrap_icon "link" %}
    + {% endif %} + {% endfor %} +
    CWEs + {% for cwe in cve.cwes.all %} + {{ cwe.cwe_id }} - {{ cwe.name }} {% bootstrap_icon "link" %}
    + {% endfor %} +
    Affected Packages{{ affected_packages|length }}
    Fixed Packages{{ fixed_packages|length }}
    Errata{{ cve.erratum_set.count }}
    OSes Affected + + {% for osrelease in osreleases %} + + + {% endfor %} +
    {{ osrelease }} +
    +
    Links + + + + + {% for reference in references %} + + + + + {% endfor %} +
    NISThttps://nvd.nist.gov/vuln/detail/{{ cve.cve_id }} {% bootstrap_icon "link" %}
    MITREhttps://www.cve.org/CVERecord?id={{ cve.cve_id }} {% bootstrap_icon "link" %}
    osv.devhttps://osv.dev/vulnerability/{{ cve.cve_id }} {% bootstrap_icon "link" %}
    {{ reference.ref_type }}{{reference.url}} {% bootstrap_icon "link" %}
    +
    +
    + +
    +
    + {% for package in affected_packages %} + + {{ package }} + + {% endfor %} +
    +
    +
    +
    + {% for package in fixed_packages %} + + {{ package }} + + {% endfor %} +
    +
    + + +{% endblock %} diff --git a/security/templates/security/cve_list.html b/security/templates/security/cve_list.html new file mode 100644 index 00000000..a9027525 --- /dev/null +++ b/security/templates/security/cve_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}CVEs{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CVEs
  • {% endblock %} + +{% block content_title %} CVEs {% endblock %} diff --git a/security/templates/security/cve_table.html b/security/templates/security/cve_table.html new file mode 100644 index 00000000..63347b1e --- /dev/null +++ b/security/templates/security/cve_table.html @@ -0,0 +1,37 @@ +{% load common bootstrap3 %} + + + + + + + + + + + + + + + + + {% for cve in object_list %} + + + + + + + + + + + + + {% endfor %} + +
    CVE IDLinksDescriptionCVSS ScoresCWEsReservedRejectedPublishedUpdatedErrata
    {{ cve.cve_id }} + NIST {% bootstrap_icon "link" %}   + MITRE {% bootstrap_icon "link" %}   + osv.dev {% bootstrap_icon "link" %} + {{ cve.description|truncatechars:60 }}{% for score in cve.cvss_scores.all %} {{ score.score }} {% endfor %}{% for cwe in cve.cwes.all %} {{ cwe.cwe_id }} {% endfor %}{{ cve.reserved_date|date|default_if_none:'' }}{{ cve.rejected_date|date|default_if_none:'' }}{{ cve.published_date|date|default_if_none:'' }}{{ cve.updated_date|date|default_if_none:'' }}{{ cve.erratum_set.count }}
    diff --git a/security/templates/security/cwe_detail.html b/security/templates/security/cwe_detail.html new file mode 100644 index 00000000..d22587cf --- /dev/null +++ b/security/templates/security/cwe_detail.html @@ -0,0 +1,22 @@ +{% extends "base.html" %} + +{% load common bootstrap3 %} + +{% block page_title %}CWE - {{ cwe }} {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CWEs
  • {{ cwe }}
  • {% endblock %} + +{% block content_title %} CWE - {{ cwe }} {% endblock %} + +{% block content %} + +
    + + + + + +
    CWE ID{{ cwe.cwe_id }}
    Name{{ cwe.name }} {% bootstrap_icon "link" %}
    Description{{ cwe.description }}
    Affected CVEs{{ cwe.cve_set.count }}
    +
    + +{% endblock %} diff --git a/security/templates/security/cwe_list.html b/security/templates/security/cwe_list.html new file mode 100644 index 00000000..de74683e --- /dev/null +++ b/security/templates/security/cwe_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}CWEs{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • CWEs
  • {% endblock %} + +{% block content_title %} CWEs {% endblock %} diff --git a/security/templates/security/cwe_table.html b/security/templates/security/cwe_table.html new file mode 100644 index 00000000..85ccd118 --- /dev/null +++ b/security/templates/security/cwe_table.html @@ -0,0 +1,21 @@ +{% load common %} + + + + + + + + + + + {% for cwe in object_list %} + + + + + + + {% endfor %} + +
    CWE IDNameDescriptionCVEs
    {{ cwe.cwe_id }}{{ cwe.name }}{{ cwe.description|truncatechars:120 }}{{ cwe.cve_set.count }}
    diff --git a/security/templates/security/reference_list.html b/security/templates/security/reference_list.html new file mode 100644 index 00000000..2ae1dab3 --- /dev/null +++ b/security/templates/security/reference_list.html @@ -0,0 +1,7 @@ +{% extends "objectlist.html" %} + +{% block page_title %}References{% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • References
  • {% endblock %} + +{% block content_title %} References {% endblock %} diff --git a/security/templates/security/reference_table.html b/security/templates/security/reference_table.html new file mode 100644 index 00000000..a28ff719 --- /dev/null +++ b/security/templates/security/reference_table.html @@ -0,0 +1,19 @@ +{% load common %} + + + + + + + + + + {% for eref in object_list %} + + + + + + {% endfor %} + +
    TypeURLLinked Errata
    {{ eref.ref_type }}{{ eref.url }}{{ eref.erratum_set.count }}
    diff --git a/security/templates/security/security_landing.html b/security/templates/security/security_landing.html new file mode 100644 index 00000000..4a79c139 --- /dev/null +++ b/security/templates/security/security_landing.html @@ -0,0 +1,20 @@ +{% extends "base.html" %} + +{% block page_title %} Security {% endblock %} + +{% block breadcrumbs %} {{ block.super }}
  • Security
  • {% endblock %} + +{% block content_title %} Security {% endblock %} + +{% block content %} + + + +{% endblock %} diff --git a/security/urls.py b/security/urls.py new file mode 100644 index 00000000..c87b9a67 --- /dev/null +++ b/security/urls.py @@ -0,0 +1,30 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.urls import path + +from security import views + +app_name = 'security' + +urlpatterns = [ + path('', views.security_landing, name='security_landing'), + path('cves', views.cve_list, name='cve_list'), + path('cves/', views.cve_detail, name='cve_detail'), + path('cwes', views.cwe_list, name='cwe_list'), + path('cwes/', views.cwe_detail, name='cwe_detail'), + path('references/', views.reference_list, name='reference_list'), +] diff --git a/security/utils.py b/security/utils.py new file mode 100644 index 00000000..343442a7 --- /dev/null +++ b/security/utils.py @@ -0,0 +1,155 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from urllib.parse import urlparse + +from security.models import CVE, CWE, Reference + + +def get_cve_reference(cve_id): + """ Given a CVE ID, return a dictionary with the URL to the CVE record. + """ + url = f'https://www.cve.org/CVERecord?id={cve_id}' + return {'ref_type': 'CVE', 'url': url} + + +def get_or_create_cve(cve_id): + """ Given a CVE ID, get or create a CVE object. + """ + cve, created = CVE.objects.get_or_create(cve_id=cve_id) + return cve + + +def update_cves(cve_id=None, fetch_nist_data=False): + """ Fetch the latest CVE data from the CVE API. + e.g. https://cveawg.mitre.org/api/cve/CVE-2024-1234 + """ + if cve_id: + cve = CVE.objects.get(cve_id=cve_id) + cve.fetch_cve_data(fetch_nist_data, sleep_secs=0) + else: + for cve in CVE.objects.all(): + cve.fetch_cve_data(fetch_nist_data) + + +def update_cwes(cve_id=None): + """ Fetch the latest CWEs from the CWE API. + e.g. https://cwe-api.mitre.org/api/v1/cwe/74,79 + https://cwe-api.mitre.org/api/v1/cwe/weakness/79 + """ + if cve_id: + cve = CVE.objects.get(cve_id=cve_id) + cwes = cve.cwes.all() + else: + cwes = CWE.objects.all() + for cwe in cwes: + cwe.fetch_cwe_data() + + +def fixup_bugzilla_url(url): + bugzilla_hosts = [ + 'bugzilla.redhat.com', 'bugzilla.opensuse.org', 'bugzilla.suse.com', + 'bugs.debian.org', 'bugs.kde.org', 'bugzilla.mozilla.org', 'bugzilla.gnome.org', + ] + if url.hostname in bugzilla_hosts and url.path == '/show_bug.cgi': + bug = url.query.split('=')[1] + path = f'/{bug}' + url = url._replace(path=path, query='') + return url + + +def fixup_rhn_url(url): + if url.hostname == 'rhn.redhat.com': + netloc = url.netloc.replace('rhn', 'access') + path = url.path.replace('.html', '') + url = url._replace(netloc=netloc, path=path) + return url + + +def fixup_ubuntu_usn_url(url): + if ('ubuntu.com' in url.hostname and 'usn/' in url.path) or url.hostname == 'usn.ubuntu.com': + netloc = url.netloc.replace('usn.', '').replace('www.', '') + path = url.path.replace('usn/', 'security/notices/').replace('usn', 'USN').rstrip('/') + usn_id = path.split('/')[-1] + if 'USN' not in usn_id: + path = '/'.join(path.split('/')[:-1]) + '/USN-' + usn_id + url = url._replace(netloc=netloc, path=path) + return url + + +def fixup_reference(ref): + """ Fix up a Security Reference object to normalize the URL and type + """ + url = urlparse(ref.get('url')) + ref_type = ref.get('ref_type') + if 'lists' in url.hostname or 'lists' in url.path: + ref_type = 'Mailing List' + if ref_type == 'bugzilla' or 'bug' in url.hostname or 'bugs' in url.path: + ref_type = 'Bug Tracker' + url = fixup_ubuntu_usn_url(url) + if url.hostname == 'ubuntu.com' and url.path.startswith('/security/notices/USN'): + ref_type = 'USN' + if 'launchpad.net' in url.hostname: + ref_type = 'Bug Tracker' + netloc = url.netloc.replace('bugs.', '') + bug = url.path.split('/')[-1] + path = f'/bugs/{bug}' + url = url._replace(netloc=netloc, path=path) + url = fixup_bugzilla_url(url) + url = fixup_rhn_url(url) + if url.hostname == 'access.redhat.com': + if 'l1d-cache-eviction-and-vector-register-sampling' in url.path or \ + 'security/vulnerabilities/speculativeexecution' in url.path or \ + 'security/vulnerabilities/stackguard' in url.path: + ref_type = 'Link' + elif 'security/cve' in url.path: + return + else: + old_ref = url.path.split('/')[-1] + refs = old_ref.split('-') + if ':' not in url.path: + try: + new_ref = f'{refs[0]}-{refs[1]}:{refs[2]}' + path = url.path.replace(old_ref, new_ref) + url = url._replace(path=path) + except IndexError: + pass + ref_type = refs[0].upper() + final_url = url.geturl() + if final_url in ['https://launchpad.net/bugs/', 'https://launchpad.net/bugs/XXXXXX']: + return + ref['ref_type'] = ref_type + ref['url'] = final_url + return ref + + +def get_or_create_reference(ref_type, url, update_ref_type=False): + """ Get or create a Reference object. + """ + reference = fixup_reference({'ref_type': ref_type, 'url': url}) + if reference: + refs = Reference.objects.filter(url=reference.get('url')) + if refs: + ref = refs.first() + if ref.url != reference.get('url') and update_ref_type: + ref.ref_type = ref_type + ref.save() + else: + ref, created = Reference.objects.get_or_create( + ref_type=reference.get('ref_type'), + url=reference.get('url'), + ) + return ref diff --git a/security/views.py b/security/views.py new file mode 100644 index 00000000..58a686b5 --- /dev/null +++ b/security/views.py @@ -0,0 +1,195 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from django.shortcuts import get_object_or_404, render +from django.contrib.auth.decorators import login_required +from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger +from django.db.models import Q + +from rest_framework import viewsets + +from packages.models import Package +from operatingsystems.models import OSRelease +from security.models import CVE, CWE, Reference +from security.serializers import CVESerializer, CWESerializer, ReferenceSerializer +from util.filterspecs import Filter, FilterBar + + +@login_required +def cwe_list(request): + cwes = CWE.objects.select_related() + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(cwe_id__icontains=term) | \ + Q(name__icontains=term) | \ + Q(description__icontains=term) + query = query & q + cwes = cwes.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(cwes, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + return render(request, + 'security/cwe_list.html', + {'page': page, + 'terms': terms}) + + +@login_required +def cwe_detail(request, cwe_id): + cwe = get_object_or_404(CWE, cwe_id=cwe_id) + return render(request, + 'security/cwe_detail.html', + {'cwe': cwe}) + + +@login_required +def cve_list(request): + cves = CVE.objects.select_related() + + if 'erratum_id' in request.GET: + cves = cves.filter(erratum=request.GET['erratum_id']) + + if 'reference_id' in request.GET: + cves = cves.filter(references=request.GET['reference_id']) + + if 'package_id' in request.GET: + cves = cves.filter(packages=request.GET['package_id']) + + if 'cwe_id' in request.GET: + cves = cves.filter(cwes__cwe_id=request.GET['cwe_id']) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(cve_id__icontains=term) + query = query & q + cves = cves.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(cves, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + return render(request, + 'security/cve_list.html', + {'page': page, + 'terms': terms}) + + +@login_required +def cve_detail(request, cve_id): + cve = get_object_or_404(CVE, cve_id=cve_id) + affected_packages = Package.objects.filter(affected_by_erratum__in=cve.erratum_set.all()).distinct() + fixed_packages = Package.objects.filter(provides_fix_in_erratum__in=cve.erratum_set.all()).distinct() + osreleases = OSRelease.objects.filter(erratum__in=cve.erratum_set.all()).distinct() + references = Reference.objects.filter(Q(erratum__in=cve.erratum_set.all()) | Q(cve=cve)).distinct() + return render(request, + 'security/cve_detail.html', + {'cve': cve, + 'affected_packages': affected_packages, + 'fixed_packages': fixed_packages, + 'osreleases': osreleases, + 'references': references, + }) + + +@login_required +def reference_list(request): + refs = Reference.objects.select_related().order_by('ref_type') + + if 'ref_type' in request.GET: + refs = refs.filter(ref_type=request.GET['ref_type']).distinct() + + if 'erratum_id' in request.GET: + refs = refs.filter(erratum__id=request.GET['erratum_id']) + + if 'search' in request.GET: + terms = request.GET['search'].lower() + query = Q() + for term in terms.split(' '): + q = Q(url__icontains=term) + query = query & q + refs = refs.filter(query) + else: + terms = '' + + page_no = request.GET.get('page') + paginator = Paginator(refs, 50) + + try: + page = paginator.page(page_no) + except PageNotAnInteger: + page = paginator.page(1) + except EmptyPage: + page = paginator.page(paginator.num_pages) + + filter_list = [] + filter_list.append(Filter(request, 'Reference Type', 'ref_type', + Reference.objects.values_list('ref_type', flat=True).distinct())) + filter_bar = FilterBar(request, filter_list) + + return render(request, + 'security/reference_list.html', + {'page': page, + 'filter_bar': filter_bar, + 'terms': terms}) + + +@login_required +def security_landing(request): + return render(request, 'security/security_landing.html') + + +class CWEViewSet(viewsets.ModelViewSet): + """ API endpoint that allows CWEs to be viewed or edited. + """ + queryset = CWE.objects.all() + serializer_class = CWESerializer + + +class CVEViewSet(viewsets.ModelViewSet): + """ API endpoint that allows CVEs to be viewed or edited. + """ + queryset = CVE.objects.all() + serializer_class = CVESerializer + + +class ReferenceViewSet(viewsets.ModelViewSet): + """ API endpoint that allows security references to be viewed or edited. + """ + queryset = Reference.objects.all() + serializer_class = ReferenceSerializer diff --git a/setup.cfg b/setup.cfg index d001be97..7af9ccb0 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,16 +3,16 @@ doc_files = README.md AUTHORS COPYING INSTALL.md install-script = scripts/rpm-install.sh post-install = scripts/rpm-post-install.sh requires = /usr/bin/python3 - python3-django >= 3.2.20 - python3-django-tagging + python3-django >= 4.2.20 + python3-django-taggit python3-django-extensions python3-django-bootstrap3 python3-django-rest-framework python3-django-filter python3-debian python3-rpm - python3-progressbar2 - python3-lxml + python3-tqdm + python3-tenacity python3-defusedxml python3-requests python3-colorama @@ -23,6 +23,12 @@ requires = /usr/bin/python3 python3-pymemcache python3-mod_wsgi python3-importlib-metadata + python3-cvss + python3-redis + redis + celery + python3-django-celery-beat + python3-GitPython policycoreutils-python-utils httpd python3-dnf-plugin-post-transaction-actions diff --git a/util/__init__.py b/util/__init__.py index 5e7e9633..a56ed3b6 100644 --- a/util/__init__.py +++ b/util/__init__.py @@ -15,23 +15,25 @@ # You should have received a copy of the GNU General Public License # along with Patchman. If not, see -import sys import requests import bz2 import magic import zlib import lzma -from colorama import Fore, Style +from datetime import datetime, timezone from enum import Enum from hashlib import md5, sha1, sha256, sha512 -from progressbar import Bar, ETA, Percentage, ProgressBar -from patchman.signals import error_message, info_message +from requests.exceptions import HTTPError, Timeout, ConnectionError +from tenacity import retry, retry_if_exception_type, stop_after_attempt, wait_exponential +from time import time +from tqdm import tqdm +from patchman.signals import error_message, info_message, debug_message + +from django.utils.timezone import make_aware +from django.utils.dateparse import parse_datetime +from django.conf import settings -if ProgressBar.__dict__.get('maxval'): - pbar2 = False -else: - pbar2 = True pbar = None verbose = None @@ -41,7 +43,6 @@ def get_verbosity(): """ Get the global verbosity level """ - global verbose return verbose @@ -55,51 +56,39 @@ def set_verbosity(value): def create_pbar(ptext, plength, ljust=35, **kwargs): """ Create a global progress bar if global verbose is True """ - global pbar, verbose + global pbar if verbose and plength > 0: jtext = str(ptext).ljust(ljust) - if pbar2: - pbar = ProgressBar(widgets=[Style.RESET_ALL + Fore.YELLOW + jtext, - Percentage(), Bar(), ETA()], - max_value=plength).start() - else: - pbar = ProgressBar(widgets=[Style.RESET_ALL + Fore.YELLOW + jtext, - Percentage(), Bar(), ETA()], - maxval=plength).start() + pbar = tqdm(total=plength, desc=jtext, position=0, leave=True, ascii=' >=') return pbar def update_pbar(index, **kwargs): """ Update the global progress bar if global verbose is True """ - global pbar, verbose + global pbar if verbose and pbar: - pbar.update(index) - if pbar2: - pmax = pbar.max_value - else: - pmax = pbar.maxval - if index >= pmax: - pbar.finish() - print_nocr(Fore.RESET) + pbar.update(n=index-pbar.n) + if index >= pbar.total: + pbar.close() pbar = None -def download_url(res, text='', ljust=35): - """ Display a progress bar to download the request content if verbose is +def fetch_content(response, text='', ljust=35): + """ Display a progress bar to fetch the request content if verbose is True. Otherwise, just return the request content """ - global verbose + if not response: + return if verbose: - content_length = res.headers.get('content-length') + content_length = response.headers.get('content-length') if content_length: clen = int(content_length) create_pbar(text, clen, ljust) chunk_size = 16384 i = 0 data = b'' - for chunk in res.iter_content(chunk_size=chunk_size, - decode_unicode=False): + for chunk in response.iter_content(chunk_size=chunk_size, decode_unicode=False): i += len(chunk) if i > clen: update_pbar(clen) @@ -109,42 +98,65 @@ def download_url(res, text='', ljust=35): return data else: info_message.send(sender=None, text=text) - return res.content - - -def print_nocr(text): - """ Print text without a carriage return - """ - print(text, end='') - sys.stdout.softspace = False + return response.content +@retry( + retry=retry_if_exception_type(HTTPError | Timeout | ConnectionResetError), + stop=stop_after_attempt(4), + wait=wait_exponential(multiplier=1, min=1, max=10), + reraise=False, +) def get_url(url, headers={}, params={}): """ Perform a http GET on a URL. Return None on error. """ - res = None + response = None try: - res = requests.get(url, headers=headers, params=params, stream=True) - except requests.exceptions.Timeout: - error_message.send(sender=None, text=f'Timeout - {url!s}') + debug_message.send(sender=None, text=f'Trying {url} headers:{headers} params:{params}') + response = requests.get(url, headers=headers, params=params, stream=True, timeout=30) + debug_message.send(sender=None, text=f'{response.status_code}: {response.headers}') + if response.status_code in [403, 404]: + return response + response.raise_for_status() except requests.exceptions.TooManyRedirects: - error_message.send(sender=None, - text=f'Too many redirects - {url!s}') - except requests.exceptions.RequestException as e: - error_message.send(sender=None, - text=f'Error ({e!s}) - {url!s}') - return res + error_message.send(sender=None, text=f'Too many redirects - {url}') + except ConnectionError: + error_message.send(sender=None, text=f'Connection error - {url}') + return response -def response_is_valid(res): +def response_is_valid(response): """ Check if a http response is valid """ - if res is not None: - return res.ok + if response: + return response.ok else: return False +def has_setting_of_type(setting_name, expected_type): + """ Checks if the Django settings module has the specified attribute + and if it is of the expected type + Returns True if the setting exists and is of the expected type, False otherwise. + """ + if not hasattr(settings, setting_name): + return False + setting_value = getattr(settings, setting_name) + return isinstance(setting_value, expected_type) + + +def get_setting_of_type(setting_name, setting_type, default): + """ Checks if the Django settings module has the specified attribute + and if it is of the expected type + Returns the value if the setting exists and is of the expected type, default otherwise. + """ + if has_setting_of_type(setting_name, setting_type): + setting_value = getattr(settings, setting_name) + return setting_value + else: + return default + + def gunzip(contents): """ gunzip contents in memory and return the data """ @@ -165,7 +177,7 @@ def bunzip2(contents): if e == 'invalid data stream': error_message.send(sender=None, text='bunzip2: ' + e) except ValueError as e: - if e == 'couldn\'t find end of stream': + if e == "couldn't find end of stream": error_message.send(sender=None, text='bunzip2: ' + e) @@ -191,7 +203,7 @@ def extract(data, fmt): m = magic.open(magic.MAGIC_MIME) m.load() mime = m.buffer(data).split(';')[0] - if (mime == 'application/x-xz' or fmt.endswith('xz')): + if mime == 'application/x-xz' or fmt.endswith('xz'): return unxz(data) elif mime == 'application/x-bzip2' or fmt.endswith('bz2'): return bunzip2(data) @@ -212,7 +224,7 @@ def get_checksum(data, checksum_type): elif checksum_type == Checksum.md5: checksum = get_md5(data) else: - text = f'Unknown checksum type: {checksum_type!s}' + text = f'Unknown checksum type: {checksum_type}' error_message.send(sender=None, text=text) return checksum @@ -239,3 +251,39 @@ def get_md5(data): """ Return the md5 checksum for data """ return md5(data).hexdigest() + + +def is_epoch_time(timestamp): + """ Checks if an integer is likely a valid epoch timestamp. + Returns True if the integer is likely a valid epoch timestamp, False otherwise. + """ + try: + ts = int(timestamp) + except ValueError: + return False + current_time = int(time()) + lower_bound = 0 + upper_bound = current_time + 3600 * 24 * 365 # up to a year in the future + return lower_bound <= ts <= upper_bound + + +def tz_aware_datetime(date): + """ Ensure a datetime is timezone-aware + Returns the tz-aware datetime object + """ + if isinstance(date, int) or is_epoch_time(date): + parsed_date = datetime.fromtimestamp(int(date)) + elif isinstance(date, str): + parsed_date = parse_datetime(date) + else: + parsed_date = date + parsed_date = parsed_date.replace(tzinfo=timezone.utc) + if not parsed_date.tzinfo: + parsed_date = make_aware(parsed_date) + return parsed_date + + +def get_datetime_now(): + """ Return the current timezone-aware datetime removing microseconds + """ + return datetime.now().astimezone().replace(microsecond=0) diff --git a/util/filterspecs.py b/util/filterspecs.py index c8b30840..1c845ff3 100644 --- a/util/filterspecs.py +++ b/util/filterspecs.py @@ -22,17 +22,14 @@ def get_query_string(qs): - newqs = [f'{k!s}={v!s}' for k, v in list(qs.items())] - return '?' + '&'.join(newqs).replace(' ', '%20') + new_qs = [f'{k}={v}' for k, v in list(qs.items())] + return '?' + '&'.join(new_qs).replace(' ', '%20') class Filter: - def __init__(self, request, name, filters, header=''): - if header == '': - self.header = name - else: - self.header = header + def __init__(self, request, header, name, filters): + self.header = header if isinstance(filters, tuple): filters = dict(filters) @@ -57,15 +54,14 @@ def output(self, qs): del qs[self.name] output = '
    \n' - output += '
    ' - output += f"{self.header.replace('_', ' ')!s}
    \n" + output += f'
    {self.header}
    \n' output += '
    \n' output += '
    \n' - output += f'{v!s}\n' + output += f'{v}\n' output += '
    ' return output diff --git a/util/tasks.py b/util/tasks.py new file mode 100644 index 00000000..f650e3e2 --- /dev/null +++ b/util/tasks.py @@ -0,0 +1,37 @@ +# Copyright 2025 Marcus Furlong +# +# This file is part of Patchman. +# +# Patchman is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, version 3 only. +# +# Patchman is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Patchman. If not, see + +from celery import shared_task + +from arch.utils import clean_architectures +from modules.utils import clean_modules +from packages.utils import clean_packages, clean_packageupdates, clean_packagenames +from repos.utils import clean_repos, remove_mirror_trailing_slashes + + +@shared_task +def clean_database(remove_duplicate_packages=False): + """ Task to check the database and remove orphaned objects + Runs all clean_* functions to check database consistency + """ + clean_packageupdates() + clean_packages(remove_duplicates=remove_duplicate_packages) + clean_packagenames() + clean_architectures() + clean_repos() + remove_mirror_trailing_slashes() + clean_modules() + clean_packageupdates() diff --git a/util/templates/base.html b/util/templates/base.html index 25dc0578..d732263c 100644 --- a/util/templates/base.html +++ b/util/templates/base.html @@ -10,6 +10,7 @@ {% block page_title %}{% endblock %} + {% block extrahead %}{% endblock %} diff --git a/util/templates/dashboard.html b/util/templates/dashboard.html index 86438315..de9bfc72 100644 --- a/util/templates/dashboard.html +++ b/util/templates/dashboard.html @@ -8,34 +8,34 @@ {% block content %} -{% with count=lonely_oses.count %} +{% with count=noosrelease_osvariants.count %} {% if count > 0 %}
    - -
    - {% gen_table lonely_oses %} + +
    + {% gen_table noosrelease_osvariants %}
    {% endif %} {% endwith %} -{% with count=nohost_oses.count %} +{% with count=nohost_osvariants.count %} {% if count > 0 %}
    - -
    - {% gen_table nohost_oses %} + +
    + {% gen_table nohost_osvariants %}
    {% endif %} {% endwith %} -{% with count=norepo_osgroups.count %} - {% if count > 0 and norepo_osgroups != None %} +{% with count=norepo_osreleases.count %} + {% if count > 0 and norepo_osreleases != None %}
    - -
    - {% gen_table norepo_osgroups %} + +
    + {% gen_table norepo_osreleases %}
    {% endif %} @@ -191,7 +191,7 @@
    {% for checksum in possible_mirrors %} - {{ checksum }} + {{ checksum }} {% endfor %}
    diff --git a/util/templates/navbar.html b/util/templates/navbar.html index 10391b3c..2a2edc0b 100644 --- a/util/templates/navbar.html +++ b/util/templates/navbar.html @@ -13,8 +13,10 @@
  • Dashboard
  • Hosts
  • Repositories
  • -
  • Packages
  • -
  • Operating Systems
  • +
  • Packages
  • +
  • Errata
  • +
  • CVEs
  • +
  • Operating Systems
  • Reports
  • {% if user.is_superuser %}
  • Django Admin
  • diff --git a/util/templates/objectlist.html b/util/templates/objectlist.html index edb31271..f2b4fcf9 100644 --- a/util/templates/objectlist.html +++ b/util/templates/objectlist.html @@ -6,8 +6,9 @@
    - {% searchform terms %} - {% gen_table page.object_list %} + {% get_querydict request as querydict %} + {% searchform terms querydict %} + {% gen_table page.object_list table_template %}
    {% object_count page %}
    diff --git a/util/templates/searchbar.html b/util/templates/searchbar.html index c3ce5327..9930a6b3 100644 --- a/util/templates/searchbar.html +++ b/util/templates/searchbar.html @@ -3,6 +3,9 @@
    + {% for key, value in querydict.items %} + + {% endfor %}
    diff --git a/util/templatetags/common.py b/util/templatetags/common.py index bac898cc..6737c438 100644 --- a/util/templatetags/common.py +++ b/util/templatetags/common.py @@ -16,25 +16,26 @@ # You should have received a copy of the GNU General Public License # along with Patchman If not, see . +import re + from humanize import naturaltime from datetime import datetime, timedelta from urllib.parse import urlencode -from django.conf import settings from django.template import Library from django.template.loader import get_template from django.utils.html import format_html from django.templatetags.static import static from django.core.paginator import Paginator +from util import get_setting_of_type + register = Library() @register.simple_tag def active(request, pattern): - import re - if re.search(f"^{request.META['SCRIPT_NAME']!s}/{pattern!s}", - request.path): + if re.search(fr"^{request.META['SCRIPT_NAME']}/{pattern}", request.path): return 'active' return '' @@ -44,9 +45,9 @@ def yes_no_img(boolean, alt_yes='Active', alt_no='Not Active'): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') if boolean: - html = f'{alt_yes!s}' + html = f'{alt_yes}' else: - html = f'{alt_no!s}' + html = f'{alt_no}' return format_html(html) @@ -55,9 +56,9 @@ def no_yes_img(boolean, alt_yes='Not Required', alt_no='Required'): yes_icon = static('img/icon-yes.gif') no_icon = static('img/icon-no.gif') if not boolean: - html = f'{alt_yes!s}' + html = f'{alt_yes}' else: - html = f'{alt_no!s}' + html = f'{alt_no}' return format_html(html) @@ -68,7 +69,7 @@ def gen_table(object_list, template_name=None): if not template_name: app_label = object_list.model._meta.app_label model_name = object_list.model._meta.verbose_name.replace(' ', '') - template_name = f'{app_label!s}/{model_name.lower()!s}_table.html' + template_name = f'{app_label}/{model_name.lower()}_table.html' template = get_template(template_name) html = template.render({'object_list': object_list}) return html @@ -81,7 +82,17 @@ def object_count(page): name = page.paginator.object_list.model._meta.verbose_name else: name = page.paginator.object_list.model._meta.verbose_name_plural - return f'{page.paginator.count!s} {name!s}' + return f'{page.paginator.count} {name}' + + +@register.simple_tag +def get_querydict(request): + get = request.GET.copy() + if 'page' in get: + del get['page'] + if 'search' in get: + del get['search'] + return get @register.simple_tag @@ -93,17 +104,25 @@ def get_querystring(request): @register.simple_tag -def searchform(terms): +def searchform(terms, querydict): template = get_template('searchbar.html') - html = template.render({'post_url': '.', 'terms': terms}) + html = template.render({'post_url': '.', 'terms': terms, 'querydict': querydict}) return html @register.simple_tag def reports_timedelta(): - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): - days = settings.DAYS_WITHOUT_REPORT - else: - days = 14 + days = get_setting_of_type( + setting_name='DAYS_WITHOUT_REPORT', + setting_type=int, + default=14, + ) return naturaltime(datetime.now() - timedelta(days=days)) + + +@register.simple_tag +def host_count(osrelease): + host_count = 0 + for osvariant in osrelease.osvariant_set.all(): + host_count += osvariant.host_set.count() + return host_count diff --git a/util/views.py b/util/views.py index 4e3ee408..fb27c33b 100644 --- a/util/views.py +++ b/util/views.py @@ -17,7 +17,6 @@ from datetime import datetime, timedelta -from django.conf import settings from django.shortcuts import render from django.contrib.auth.decorators import login_required @@ -26,10 +25,11 @@ from django.db.models.functions import Coalesce from hosts.models import Host -from operatingsystems.models import OS, OSGroup +from operatingsystems.models import OSVariant, OSRelease from repos.models import Repository, Mirror from packages.models import Package, PackageUpdate from reports.models import Report +from util import get_setting_of_type @login_required @@ -42,34 +42,34 @@ def dashboard(request): hosts = Host.objects.with_counts('get_num_security_updates', 'get_num_bugfix_updates') \ - .select_related() - oses = OS.objects.all().prefetch_related('host_set') - osgroups = OSGroup.objects.all() + .select_related() + osvariants = OSVariant.objects.all().prefetch_related('host_set') + osreleases = OSRelease.objects.all() repos = Repository.objects.all().prefetch_related('mirror_set') packages = Package.objects.all() # host issues - if hasattr(settings, 'DAYS_WITHOUT_REPORT') and \ - isinstance(settings.DAYS_WITHOUT_REPORT, int): - days = settings.DAYS_WITHOUT_REPORT - else: - days = 14 + days = get_setting_of_type( + setting_name='DAYS_WITHOUT_REPORT', + setting_type=int, + default=14, + ) last_report_delta = datetime.now() - timedelta(days=days) stale_hosts = hosts.filter(lastreport__lt=last_report_delta) - norepo_hosts = hosts.filter(repos__isnull=True, os__osgroup__repos__isnull=True) # noqa + norepo_hosts = hosts.filter(repos__isnull=True, osvariant__osrelease__repos__isnull=True) # noqa reboot_hosts = hosts.filter(reboot_required=True) secupdate_hosts = hosts.filter(updates__security=True, updates__isnull=False).distinct() # noqa bugupdate_hosts = hosts.exclude(updates__security=True, updates__isnull=False).distinct().filter(updates__security=False, updates__isnull=False).distinct() # noqa diff_rdns_hosts = hosts.exclude(reversedns=F('hostname')).filter(check_dns=True) # noqa - # os issues - lonely_oses = oses.filter(osgroup__isnull=True) - nohost_oses = oses.filter(host__isnull=True) + # os variant issues + noosrelease_osvariants = osvariants.filter(osrelease__isnull=True) + nohost_osvariants = osvariants.filter(host__isnull=True) - # osgroup issues - norepo_osgroups = None + # os release issues + norepo_osreleases = None if hosts.filter(host_repos_only=False).exists(): - norepo_osgroups = osgroups.filter(repos__isnull=True) + norepo_osreleases = osreleases.filter(repos__isnull=True) # mirror issues failed_mirrors = repos.filter(auth_required=False).filter(mirror__last_access_ok=False).filter(mirror__last_access_ok=True).distinct() # noqa @@ -78,7 +78,7 @@ def dashboard(request): # repo issues failed_repos = repos.filter(auth_required=False).filter(mirror__last_access_ok=False).exclude(id__in=[x.id for x in failed_mirrors]).distinct() # noqa - unused_repos = repos.filter(host__isnull=True, osgroup__isnull=True) + unused_repos = repos.filter(host__isnull=True, osrelease__isnull=True) nomirror_repos = repos.filter(mirror__isnull=True) nohost_repos = repos.filter(host__isnull=True) @@ -92,10 +92,10 @@ def dashboard(request): norepo_packages = packages.filter(Exists(nohost_packages), ~Exists(nomirror_packages), ~Exists(nooldpackage_packages)) \ - .distinct() + .distinct() orphaned_packages = packages.filter(~Exists(nohost_packages), ~Exists(nomirror_packages)) \ - .distinct() + .distinct() # report issues unprocessed_reports = Report.objects.filter(processed=False) @@ -104,13 +104,13 @@ def dashboard(request): possible_mirrors = {} mirrors = Mirror.objects.all() \ - .annotate(packages_count=Coalesce(Count('packages', distinct=True), 0)) \ - .select_related() + .annotate(packages_count=Coalesce(Count('packages', distinct=True), 0)) \ + .select_related() for mirror in mirrors: - if mirror.file_checksum != 'yast' and mirror.packages_count > 0: - if mirror.file_checksum not in checksums: - checksums[mirror.file_checksum] = [] - checksums[mirror.file_checksum].append(mirror) + if mirror.packages_checksum != 'yast' and mirror.packages_count > 0: + if mirror.packages_checksum not in checksums: + checksums[mirror.packages_checksum] = [] + checksums[mirror.packages_checksum].append(mirror) for checksum in checksums: first_mirror = checksums[checksum][0] @@ -125,18 +125,23 @@ def dashboard(request): request, 'dashboard.html', {'site': site, - 'lonely_oses': lonely_oses, 'norepo_hosts': norepo_hosts, - 'nohost_oses': nohost_oses, 'diff_rdns_hosts': diff_rdns_hosts, - 'stale_hosts': stale_hosts, 'possible_mirrors': possible_mirrors, + 'noosrelease_osvariants': noosrelease_osvariants, + 'norepo_hosts': norepo_hosts, + 'nohost_osvariants': nohost_osvariants, + 'diff_rdns_hosts': diff_rdns_hosts, + 'stale_hosts': stale_hosts, + 'possible_mirrors': possible_mirrors, 'norepo_packages': norepo_packages, 'nohost_repos': nohost_repos, 'secupdate_hosts': secupdate_hosts, 'bugupdate_hosts': bugupdate_hosts, - 'norepo_osgroups': norepo_osgroups, 'unused_repos': unused_repos, + 'norepo_osreleases': norepo_osreleases, + 'unused_repos': unused_repos, 'disabled_mirrors': disabled_mirrors, 'norefresh_mirrors': norefresh_mirrors, 'failed_mirrors': failed_mirrors, 'orphaned_packages': orphaned_packages, - 'failed_repos': failed_repos, 'nomirror_repos': nomirror_repos, + 'failed_repos': failed_repos, + 'nomirror_repos': nomirror_repos, 'reboot_hosts': reboot_hosts, - 'unprocessed_reports': unprocessed_reports}, ) + 'unprocessed_reports': unprocessed_reports})