From ff2983c207e6ab2a9381e20312355c42307187f6 Mon Sep 17 00:00:00 2001 From: Luke Shumaker Date: Sun, 17 Apr 2016 01:56:43 -0400 Subject: Give things more consistent names --- abslibre | 124 ------------------------ any-to-ours | 71 -------------- check-package-libraries.py | 193 ------------------------------------ createrepos | 8 -- cron-jobs/db-cleanup | 69 +++++++++++++ db-check-nonfree-in-db | 28 ++++++ db-check-package-libraries | 193 ++++++++++++++++++++++++++++++++++++ db-check-unsigned-packages | 38 ++++++++ db-check-unsigned-packages.py | 96 ++++++++++++++++++ db-cleanup | 69 ------------- db-import-any-to-ours | 71 ++++++++++++++ db-import-pkg-archlinux | 210 ++++++++++++++++++++++++++++++++++++++++ db-import-pkg-archlinux.conf | 11 +++ db-import-pkg-archlinuxarm | 199 +++++++++++++++++++++++++++++++++++++ db-import-pkg-archlinuxarm.conf | 7 ++ db-import-src-archlinux | 124 ++++++++++++++++++++++++ db-init | 8 ++ db-libremessages | 0 db-list-unsigned-packages | 38 -------- db-list-unsigned-packages.py | 96 ------------------ db-sync | 210 ---------------------------------------- db-sync-arm | 199 ------------------------------------- db-sync-arm.conf | 7 -- db-sync.conf | 11 --- list_nonfree_in_db.py | 28 ------ mkrepo | 15 --- 26 files changed, 1054 insertions(+), 1069 deletions(-) delete mode 100755 abslibre delete mode 100755 any-to-ours delete mode 100755 check-package-libraries.py delete mode 100755 createrepos create mode 100755 cron-jobs/db-cleanup create mode 100755 db-check-nonfree-in-db create mode 100755 db-check-package-libraries create mode 100755 db-check-unsigned-packages create mode 100755 db-check-unsigned-packages.py delete mode 100755 db-cleanup create mode 100755 db-import-any-to-ours create mode 100755 db-import-pkg-archlinux create mode 100644 db-import-pkg-archlinux.conf create mode 100755 db-import-pkg-archlinuxarm create mode 100644 db-import-pkg-archlinuxarm.conf create mode 100755 db-import-src-archlinux create mode 100755 db-init mode change 100755 => 100644 db-libremessages delete mode 100755 db-list-unsigned-packages delete mode 100755 db-list-unsigned-packages.py delete mode 100755 db-sync delete mode 100755 db-sync-arm delete mode 100644 db-sync-arm.conf delete mode 100644 db-sync.conf delete mode 100755 list_nonfree_in_db.py delete mode 100755 mkrepo diff --git a/abslibre b/abslibre deleted file mode 100755 index 72171f1..0000000 --- a/abslibre +++ /dev/null @@ -1,124 +0,0 @@ -#!/bin/bash - -set -e - -FTP_BASE=/srv/repo/main -ABSLIBRE=/srv/abslibre -ABSGIT=/srv/git/abslibre/abslibre.git -# Remote -# ABSGIT=http://projects.parabolagnulinux.org/abslibre.git -BLACKLIST=/home/repo/blacklist/blacklist.txt -SYNCARGS='-mrtv --no-motd --delete-after --no-p --no-o --no-g --quiet' -BLFILE=/tmp/blacklist.txt - -# Variables from abs.conf -ABSROOT="/srv/abs/" -# DON'T CHANGE. WE NEED IT FOR ABSLIBRE -SYNCSERVER="rsync.archlinux.org" -ARCH="i686" -MIRRORLIST="/etc/pacman.d/mirrorlist" -REPOS=(core extra community testing community-testing !staging !community-staging) - -# Steps -# * Sync abs -# * Download blacklist.txt -# * Sync abslibre from abs excluding from blacklist -# * Create repo.abs.tar.gz tarballs - -function sync_abs() { - for ARCH in any i686 x86_64; do - rsync ${SYNCARGS} ${SYNCSERVER}::abs/${ARCH}/ ${ABSROOT}/${ARCH} || return $? - done - - # fix some permissions - find "${ABSROOT}" -type d -print0 | xargs -0 chmod 755 - find "${ABSROOT}" -type f -print0 | xargs -0 chmod 644 -} - -function get_blacklist() { - printf ":: Updating blacklist...\t" - cat "${BLACKLIST}" | cut -d':' -f1 | sort -u | \ - sed "s/^/**\//" > ${BLFILE} || { - printf "[FAILED]\n" - return 1 - } - - # Prevent using an empty blacklist - [ $(wc -l ${BLFILE} | cut -d " " -f1) -eq 0 ] && return 1 - - printf "[OK]\n" -} - -function sync_abs_libre() { - - # Clone ABSLibre git repo - rm -rf /tmp/abslibre - git clone "$ABSGIT" /tmp/abslibre - - # Sync from ABS and then sync from ABSLibre - printf ":: Syncing ABSLibre...\t" - (rsync ${SYNCARGS} --delete-excluded \ - --exclude-from=${BLFILE} \ - ${ABSROOT} \ - ${ABSLIBRE} \ - && - for ARCH in i686 x86_64; do rsync -v -mrtq --no-motd --no-p --no-o --no-g --quiet --exclude=.git/ /tmp/abslibre/ ${ABSLIBRE}/${ARCH}/; done) || { - printf "[FAILED]\n" - return 1 - } - - # fix some permissions - find "${ABSLIBRE}" -type d -print0 | xargs -0 chmod 755 - find "${ABSLIBRE}" -type f -print0 | xargs -0 chmod 644 - - printf "[OK]\n" -} - -# This part is very hacky and particular to the current setup :P -sync_pre_mips64el() { - pushd /home/fauno/Repos/abslibre-pre-mips64el >/dev/null - - sudo -u fauno sh -c " - rsync ${SYNCARGS} \ - --exclude=.git* \ - --exclude=community-staging \ - --exclude=community-testing \ - --exclude=gnome-unstable \ - --exclude=kde-unstable \ - --exclude=multilib \ - --exclude=multilib-testing \ - --exclude=multilib-staging \ - --exclude=staging \ - --exclude=testing \ - ${ABSLIBRE}/x86_64/ \ - /home/fauno/Repos/abslibre-pre-mips64el/ && - git add . && - git commit -m \"$(date)\" -a - git push origin master - git gc - " -} - -# Create .abs.tar.gz tarballs -create_tarballs() { - for repo in ${ABSLIBRE}/{i686,x86_64}/*; do - baserepo=${repo##*/} - arch=$(basename $(dirname $repo)) - - # Remove the old one - mkdir -p $FTP_BASE/$baserepo/os/$arch/ - rm -fv $FTP_BASE/$baserepo/os/$arch/$baserepo.abs.tar.gz - # Create a new one joining arch and any - # Remove the first part of the path (it could be $repo but any isn't hit) - bsdtar -czf $FTP_BASE/$baserepo/os/$arch/$baserepo.abs.tar.gz \ - -s ":${ABSLIBRE}/[a-z0-9_]\+/[a-z]\+::" \ - $repo/* ${ABSLIBRE}/any/${baserepo}/* - - done -} - -sync_abs -get_blacklist -sync_abs_libre -#sync_pre_mips64el -create_tarballs diff --git a/any-to-ours b/any-to-ours deleted file mode 100755 index a901d54..0000000 --- a/any-to-ours +++ /dev/null @@ -1,71 +0,0 @@ -#!/bin/bash -# Releases 'any' packages from Arch arches to ours - -trap_exit() { - echo - error "$@" - exit 1 -} - -source "$(dirname "$(readlink -e "$0")")/config" -source "$(dirname "$(readlink -e "$0")")/db-libremessages" - -# From makepkg -set -E - -trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT -trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT -trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR - -# The architecture to compare with -BASEARCH='x86_64' - -# Traverse all Arch repos -for _repo in "${ARCHREPOS[@]}"; do - msg "Processing %s..." "${_repo}" - - # Find 'any' packages - # This is hardcoded but it could release other arches... - PKGS=($(find "${FTP_BASE}/${_repo}/os/${BASEARCH}/" \ - -iname '*-any.pkg.tar.?z' \ - -printf "%f ")) - - if [ ${#PKGS[@]} -eq 0 ]; then - msg2 "No '%s' packages here" any - continue - fi - - for _arch in "${OURARCHES[@]}"; do - msg2 "Syncing %s..." "${_arch}" - - # Sync 'any' only and extract the synced packages - SYNCED=($( - rsync -av \ - --include='*-any.pkg.tar.?z' \ - --include='*-any.pkg.tar.?z.sig' \ - --exclude='*' \ - "${FTP_BASE}/${_repo}/os/${BASEARCH}/" \ - "${FTP_BASE}/${_repo}/os/${_arch}/" 2>&1 | \ - grep 'any\.pkg\.tar\..z$' | \ - cut -d ' ' -f 1 )) - - if [ ${#SYNCED[@]} -eq 0 ]; then - msg2 "Already synced (or error happened)" - continue - fi - - msg2 "Synced %d packages: %s" "${#SYNCED[@]}" "${SYNCED[*]}" - - msg2 "Adding to db..." - - pushd "${FTP_BASE}/${_repo}/os/${_arch}/" >/dev/null - - # Add the packages to the db - repo-add "${_repo}${DBEXT}" "${SYNCED[@]}" - - popd >/dev/null - - # Avoid mixups - unset SYNCED PKGS - done -done diff --git a/check-package-libraries.py b/check-package-libraries.py deleted file mode 100755 index bc2349b..0000000 --- a/check-package-libraries.py +++ /dev/null @@ -1,193 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (C) 2012 Michał Masłowski -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - - -""" -Check which libraries are provided or required by a package, store -this in a database, update and list broken packages. - -Dependencies: - -- Python 3.2 or later with SQLite 3 support - -- ``bsdtar`` - -- ``readelf`` -""" - - -import os.path -import re -import sqlite3 -import subprocess -import tempfile - - -#: Regexp matching an interesting dynamic entry. -_DYNAMIC = re.compile(r"^\s*[0-9a-fx]+" - "\s*\((NEEDED|SONAME)\)[^:]*:\s*\[(.+)\]$") - - -def make_db(path): - """Make a new, empty, library database at *path*.""" - con = sqlite3.connect(path) - con.executescript(""" -create table provided( - library varchar not null, - package varchar not null -); -create table used( - library varchar not null, - package varchar not null -); -""") - con.close() - - -def begin(database): - """Connect to *database* and start a transaction.""" - con = sqlite3.connect(database) - con.execute("begin exclusive") - return con - - -def add_provided(con, package, libraries): - """Write that *package* provides *libraries*.""" - for library in libraries: - con.execute("insert into provided (package, library) values (?,?)", - (package, library)) - - -def add_used(con, package, libraries): - """Write that *package* uses *libraries*.""" - for library in libraries: - con.execute("insert into used (package, library) values (?,?)", - (package, library)) - - -def remove_package(con, package): - """Remove all entries for a package.""" - con.execute("delete from provided where package=?", (package,)) - con.execute("delete from used where package=?", (package,)) - - -def add_package(con, package): - """Add entries from a named *package*.""" - # Extract to a temporary directory. This could be done more - # efficiently, since there is no need to store more than one file - # at once. - with tempfile.TemporaryDirectory() as temp: - tar = subprocess.Popen(("bsdtar", "xf", package, "-C", temp)) - tar.communicate() - with open(os.path.join(temp, ".PKGINFO")) as pkginfo: - for line in pkginfo: - if line.startswith("pkgname ="): - pkgname = line[len("pkgname ="):].strip() - break - # Don't list previously removed libraries. - remove_package(con, pkgname) - provided = set() - used = set() - # Search for ELFs. - for dirname, dirnames, filenames in os.walk(temp): - assert dirnames is not None # unused, avoid pylint warning - for file_name in filenames: - path = os.path.join(dirname, file_name) - with open(path, "rb") as file_object: - if file_object.read(4) != b"\177ELF": - continue - readelf = subprocess.Popen(("readelf", "-d", path), - stdout=subprocess.PIPE) - for line in readelf.communicate()[0].split(b"\n"): - match = _DYNAMIC.match(line.decode("ascii")) - if match: - if match.group(1) == "SONAME": - provided.add(match.group(2)) - elif match.group(1) == "NEEDED": - used.add(match.group(2)) - else: - raise AssertionError("unknown entry type " - + match.group(1)) - add_provided(con, pkgname, provided) - add_used(con, pkgname, used) - - -def init(arguments): - """Initialize.""" - make_db(arguments.database) - - -def add(arguments): - """Add packages.""" - con = begin(arguments.database) - for package in arguments.packages: - add_package(con, package) - con.commit() - con.close() - - -def remove(arguments): - """Remove packages.""" - con = begin(arguments.database) - for package in arguments.packages: - remove_package(con, package) - con.commit() - con.close() - - -def check(arguments): - """List broken packages.""" - con = begin(arguments.database) - available = set(row[0] for row - in con.execute("select library from provided")) - for package, library in con.execute("select package, library from used"): - if library not in available: - print(package, "needs", library) - con.close() - - -def main(): - """Get arguments and run the command.""" - from argparse import ArgumentParser - parser = ArgumentParser(prog="check-package-libraries.py", - description="Check packages for " - "provided/needed libraries") - parser.add_argument("-d", "--database", type=str, - help="Database file to use", - default="package-libraries.sqlite") - subparsers = parser.add_subparsers() - subparser = subparsers.add_parser(name="init", - help="initialize the database") - subparser.set_defaults(command=init) - subparser = subparsers.add_parser(name="add", - help="add packages to database") - subparser.add_argument("packages", nargs="+", type=str, - help="package files to add") - subparser.set_defaults(command=add) - subparser = subparsers.add_parser(name="remove", - help="remove packages from database") - subparser.add_argument("packages", nargs="+", type=str, - help="package names to remove") - subparser.set_defaults(command=remove) - subparser = subparsers.add_parser(name="check", - help="list broken packages") - subparser.set_defaults(command=check) - arguments = parser.parse_args() - arguments.command(arguments) - - -if __name__ == "__main__": - main() diff --git a/createrepos b/createrepos deleted file mode 100755 index 8da2455..0000000 --- a/createrepos +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -# Creates the repo structure defined in config - -source "$(dirname "$(readlink -e "$0")")/config" - -mkdir -p -- "${FTP_BASE}"/{"${PKGPOOL}","${SRCPOOL}"} "${CLEANUP_DESTDIR}" "${SOURCE_CLEANUP_DESTDIR}" "${STAGING}" - -"$(dirname "$(readlink -e "$0")")/create-repo" "${PKGREPOS[@]}" diff --git a/cron-jobs/db-cleanup b/cron-jobs/db-cleanup new file mode 100755 index 0000000..ffa2601 --- /dev/null +++ b/cron-jobs/db-cleanup @@ -0,0 +1,69 @@ +#!/bin/bash +# Syncs pools against themselves using database contents as filter to cleanup +# them up +# License: GPLv3 + +# Principles +# * Get repos dbs contents +# * Make them a include list +# * Rsync pools against themselves removing excluded files +# * Instant cleanup! + +trap_exit() { + echo + error "$@" + exit 1 +} + +source "$(dirname "$(readlink -e "$0")")/config" +source "$(dirname "$(readlink -e "$0")")/db-libremessages" + +# From makepkg +set -E + +trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT +trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT +trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR + +EXTRAFLAGS=() +"${CLEANUP_DRYRUN}" && EXTRAFLAGS+=(--dry-run) + +filter=$(mktemp -t "${0##*/}.XXXXXXXXXX") +trap "rm -f -- $(printf %q "$filter")" EXIT + +for _repo in "${PKGREPOS[@]}"; do + for _arch in "${ARCHES[@]}"; do + msg "Getting %s-%s database" "${_repo}" "${_arch}" + + dbfile="${FTP_BASE}/${_repo}/os/${_arch}/${_repo}${DBEXT}" + + if [ ! -r "${dbfile}" ]; then + warning "Not found" + continue + fi + + # Echo the contents into a filter file + bsdtar tf "${dbfile}" | \ + cut -d'/' -f1 | \ + sort -u | \ + sed "s|$|*|" >> "$filter" + + done +done + +msg "Removing old files:" + +for POOL in "${PKGPOOLS[@]}" "${SRCPOOLS[@]}"; do + msg2 '%s' "${POOL}" + + rsync "${EXTRAFLAGS[@]}" -va --delete-excluded \ + --include-from="$filter" \ + --exclude="*" \ + "${FTP_BASE}/${POOL}/" \ + "${FTP_BASE}/${POOL}/" +done + +msg "Removing dead symlinks:" +actions=(-print) +"${CLEANUP_DRYRUN}" || actions+=(-delete) +find -L "${FTP_BASE}/" -type l "${actions[@]}" diff --git a/db-check-nonfree-in-db b/db-check-nonfree-in-db new file mode 100755 index 0000000..a486fa5 --- /dev/null +++ b/db-check-nonfree-in-db @@ -0,0 +1,28 @@ +#!/usr/bin/env python2 +#-*- encoding: utf-8 -*- +from filter import * +import argparse + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + prog="nonfree_in_db", + description="Cleans nonfree files on repo",) + + parser.add_argument("-k", "--blacklist-file", type=str, + help="File containing blacklisted names", + required=True,) + + parser.add_argument("-b", "--database", type=str, + help="dabatase to clean", + required=True,) + + args=parser.parse_args() + + if not (args.blacklist_file and args.database): + parser.print_help() + exit(1) + + blacklist=listado(args.blacklist_file) + pkgs=get_pkginfo_from_db(args.database) + + print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist])) diff --git a/db-check-package-libraries b/db-check-package-libraries new file mode 100755 index 0000000..612fc4f --- /dev/null +++ b/db-check-package-libraries @@ -0,0 +1,193 @@ +#!/usr/bin/env python3 +# Copyright (C) 2012 Michał Masłowski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + + +""" +Check which libraries are provided or required by a package, store +this in a database, update and list broken packages. + +Dependencies: + +- Python 3.2 or later with SQLite 3 support + +- ``bsdtar`` + +- ``readelf`` +""" + + +import os.path +import re +import sqlite3 +import subprocess +import tempfile + + +#: Regexp matching an interesting dynamic entry. +_DYNAMIC = re.compile(r"^\s*[0-9a-fx]+" + "\s*\((NEEDED|SONAME)\)[^:]*:\s*\[(.+)\]$") + + +def make_db(path): + """Make a new, empty, library database at *path*.""" + con = sqlite3.connect(path) + con.executescript(""" +create table provided( + library varchar not null, + package varchar not null +); +create table used( + library varchar not null, + package varchar not null +); +""") + con.close() + + +def begin(database): + """Connect to *database* and start a transaction.""" + con = sqlite3.connect(database) + con.execute("begin exclusive") + return con + + +def add_provided(con, package, libraries): + """Write that *package* provides *libraries*.""" + for library in libraries: + con.execute("insert into provided (package, library) values (?,?)", + (package, library)) + + +def add_used(con, package, libraries): + """Write that *package* uses *libraries*.""" + for library in libraries: + con.execute("insert into used (package, library) values (?,?)", + (package, library)) + + +def remove_package(con, package): + """Remove all entries for a package.""" + con.execute("delete from provided where package=?", (package,)) + con.execute("delete from used where package=?", (package,)) + + +def add_package(con, package): + """Add entries from a named *package*.""" + # Extract to a temporary directory. This could be done more + # efficiently, since there is no need to store more than one file + # at once. + with tempfile.TemporaryDirectory() as temp: + tar = subprocess.Popen(("bsdtar", "xf", package, "-C", temp)) + tar.communicate() + with open(os.path.join(temp, ".PKGINFO")) as pkginfo: + for line in pkginfo: + if line.startswith("pkgname ="): + pkgname = line[len("pkgname ="):].strip() + break + # Don't list previously removed libraries. + remove_package(con, pkgname) + provided = set() + used = set() + # Search for ELFs. + for dirname, dirnames, filenames in os.walk(temp): + assert dirnames is not None # unused, avoid pylint warning + for file_name in filenames: + path = os.path.join(dirname, file_name) + with open(path, "rb") as file_object: + if file_object.read(4) != b"\177ELF": + continue + readelf = subprocess.Popen(("readelf", "-d", path), + stdout=subprocess.PIPE) + for line in readelf.communicate()[0].split(b"\n"): + match = _DYNAMIC.match(line.decode("ascii")) + if match: + if match.group(1) == "SONAME": + provided.add(match.group(2)) + elif match.group(1) == "NEEDED": + used.add(match.group(2)) + else: + raise AssertionError("unknown entry type " + + match.group(1)) + add_provided(con, pkgname, provided) + add_used(con, pkgname, used) + + +def init(arguments): + """Initialize.""" + make_db(arguments.database) + + +def add(arguments): + """Add packages.""" + con = begin(arguments.database) + for package in arguments.packages: + add_package(con, package) + con.commit() + con.close() + + +def remove(arguments): + """Remove packages.""" + con = begin(arguments.database) + for package in arguments.packages: + remove_package(con, package) + con.commit() + con.close() + + +def check(arguments): + """List broken packages.""" + con = begin(arguments.database) + available = set(row[0] for row + in con.execute("select library from provided")) + for package, library in con.execute("select package, library from used"): + if library not in available: + print(package, "needs", library) + con.close() + + +def main(): + """Get arguments and run the command.""" + from argparse import ArgumentParser + parser = ArgumentParser(prog="db-check-package-libraries", + description="Check packages for " + "provided/needed libraries") + parser.add_argument("-d", "--database", type=str, + help="Database file to use", + default="package-libraries.sqlite") + subparsers = parser.add_subparsers() + subparser = subparsers.add_parser(name="init", + help="initialize the database") + subparser.set_defaults(command=init) + subparser = subparsers.add_parser(name="add", + help="add packages to database") + subparser.add_argument("packages", nargs="+", type=str, + help="package files to add") + subparser.set_defaults(command=add) + subparser = subparsers.add_parser(name="remove", + help="remove packages from database") + subparser.add_argument("packages", nargs="+", type=str, + help="package names to remove") + subparser.set_defaults(command=remove) + subparser = subparsers.add_parser(name="check", + help="list broken packages") + subparser.set_defaults(command=check) + arguments = parser.parse_args() + arguments.command(arguments) + + +if __name__ == "__main__": + main() diff --git a/db-check-unsigned-packages b/db-check-unsigned-packages new file mode 100755 index 0000000..0fc053b --- /dev/null +++ b/db-check-unsigned-packages @@ -0,0 +1,38 @@ +#!/bin/bash +# Copyright (C) 2012 Michał Masłowski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +set -e + +# Output a list of repo/package-name-and-version pairs representing +# unsigned packages available for architecture $1 and specified for +# architecture $2 (usually $1 or any, default is to list all). + +. "$(dirname "$(readlink -e "$0")")/config" +. "$(dirname "$(readlink -e "$0")")/db-functions" + +if [ $# -lt 1 ]; then + msg "usage: %s " "${0##*/}" + exit 1 +fi + +arch=$1 +shift + +for repo in "${PKGREPOS[@]}" +do + db="${FTP_BASE}/${repo}/os/${arch}/${repo}.db" + [ -f "$db" ] && "$(dirname "$(readlink -e "$0")")/db-check-unsigned-packages.py" "$repo" "$@" < "$db" +done diff --git a/db-check-unsigned-packages.py b/db-check-unsigned-packages.py new file mode 100755 index 0000000..80cff51 --- /dev/null +++ b/db-check-unsigned-packages.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# Copyright (C) 2012 Michał Masłowski +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + + +""" +Output a list of repo/package-name-and-version pairs representing +unsigned packages in the database at standard input of repo named in +the first argument and specified for architectures listed in the +following arguments (usually the one of the database or any, default +is to list all). + +If the --keyset argument is passed, print the key fingerprint of every +signed package. +""" + + +import base64 +import subprocess +import sys +import tarfile + + +def main(): + """Do the job.""" + check_keys = False + if "--keyset" in sys.argv: + sys.argv.remove("--keyset") + check_keys = True + repo = sys.argv[1] + pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:]) + packages = [] + keys = [] + with tarfile.open(fileobj=sys.stdin.buffer) as archive: + for entry in archive: + if entry.name.endswith("/desc"): + content = archive.extractfile(entry) + skip = False + is_arch = False + key = None + for line in content: + if is_arch: + is_arch = False + if pkgarches and line.strip() not in pkgarches: + skip = True # different architecture + break + if line == b"%PGPSIG%\n": + skip = True # signed + key = b"" + if check_keys: + continue + else: + break + if line == b"%ARCH%\n": + is_arch = True + continue + if key is not None: + if line.strip(): + key += line.strip() + else: + break + if check_keys and key: + key_binary = base64.b64decode(key) + keys.append(key_binary) + packages.append(repo + "/" + entry.name[:-5]) + if skip: + continue + print(repo + "/" + entry.name[:-5]) + if check_keys and keys: + # We have collected all signed package names in packages and + # all keys in keys. Let's now ask gpg to list all signatures + # and find which keys made them. + packets = subprocess.check_output(("gpg", "--list-packets"), + input=b"".join(keys)) + i = 0 + for line in packets.decode("latin1").split("\n"): + if line.startswith(":signature packet:"): + keyid = line[line.index("keyid ") + len("keyid "):] + print(packages[i], keyid) + i += 1 + + +if __name__ == "__main__": + main() diff --git a/db-cleanup b/db-cleanup deleted file mode 100755 index ffa2601..0000000 --- a/db-cleanup +++ /dev/null @@ -1,69 +0,0 @@ -#!/bin/bash -# Syncs pools against themselves using database contents as filter to cleanup -# them up -# License: GPLv3 - -# Principles -# * Get repos dbs contents -# * Make them a include list -# * Rsync pools against themselves removing excluded files -# * Instant cleanup! - -trap_exit() { - echo - error "$@" - exit 1 -} - -source "$(dirname "$(readlink -e "$0")")/config" -source "$(dirname "$(readlink -e "$0")")/db-libremessages" - -# From makepkg -set -E - -trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT -trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT -trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR - -EXTRAFLAGS=() -"${CLEANUP_DRYRUN}" && EXTRAFLAGS+=(--dry-run) - -filter=$(mktemp -t "${0##*/}.XXXXXXXXXX") -trap "rm -f -- $(printf %q "$filter")" EXIT - -for _repo in "${PKGREPOS[@]}"; do - for _arch in "${ARCHES[@]}"; do - msg "Getting %s-%s database" "${_repo}" "${_arch}" - - dbfile="${FTP_BASE}/${_repo}/os/${_arch}/${_repo}${DBEXT}" - - if [ ! -r "${dbfile}" ]; then - warning "Not found" - continue - fi - - # Echo the contents into a filter file - bsdtar tf "${dbfile}" | \ - cut -d'/' -f1 | \ - sort -u | \ - sed "s|$|*|" >> "$filter" - - done -done - -msg "Removing old files:" - -for POOL in "${PKGPOOLS[@]}" "${SRCPOOLS[@]}"; do - msg2 '%s' "${POOL}" - - rsync "${EXTRAFLAGS[@]}" -va --delete-excluded \ - --include-from="$filter" \ - --exclude="*" \ - "${FTP_BASE}/${POOL}/" \ - "${FTP_BASE}/${POOL}/" -done - -msg "Removing dead symlinks:" -actions=(-print) -"${CLEANUP_DRYRUN}" || actions+=(-delete) -find -L "${FTP_BASE}/" -type l "${actions[@]}" diff --git a/db-import-any-to-ours b/db-import-any-to-ours new file mode 100755 index 0000000..a901d54 --- /dev/null +++ b/db-import-any-to-ours @@ -0,0 +1,71 @@ +#!/bin/bash +# Releases 'any' packages from Arch arches to ours + +trap_exit() { + echo + error "$@" + exit 1 +} + +source "$(dirname "$(readlink -e "$0")")/config" +source "$(dirname "$(readlink -e "$0")")/db-libremessages" + +# From makepkg +set -E + +trap 'trap_exit "$(gettext "TERM signal caught. Exiting...")"' TERM HUP QUIT +trap 'trap_exit "$(gettext "Aborted by user! Exiting...")"' INT +trap 'trap_exit "$(gettext "An unknown error has occurred. Exiting...")"' ERR + +# The architecture to compare with +BASEARCH='x86_64' + +# Traverse all Arch repos +for _repo in "${ARCHREPOS[@]}"; do + msg "Processing %s..." "${_repo}" + + # Find 'any' packages + # This is hardcoded but it could release other arches... + PKGS=($(find "${FTP_BASE}/${_repo}/os/${BASEARCH}/" \ + -iname '*-any.pkg.tar.?z' \ + -printf "%f ")) + + if [ ${#PKGS[@]} -eq 0 ]; then + msg2 "No '%s' packages here" any + continue + fi + + for _arch in "${OURARCHES[@]}"; do + msg2 "Syncing %s..." "${_arch}" + + # Sync 'any' only and extract the synced packages + SYNCED=($( + rsync -av \ + --include='*-any.pkg.tar.?z' \ + --include='*-any.pkg.tar.?z.sig' \ + --exclude='*' \ + "${FTP_BASE}/${_repo}/os/${BASEARCH}/" \ + "${FTP_BASE}/${_repo}/os/${_arch}/" 2>&1 | \ + grep 'any\.pkg\.tar\..z$' | \ + cut -d ' ' -f 1 )) + + if [ ${#SYNCED[@]} -eq 0 ]; then + msg2 "Already synced (or error happened)" + continue + fi + + msg2 "Synced %d packages: %s" "${#SYNCED[@]}" "${SYNCED[*]}" + + msg2 "Adding to db..." + + pushd "${FTP_BASE}/${_repo}/os/${_arch}/" >/dev/null + + # Add the packages to the db + repo-add "${_repo}${DBEXT}" "${SYNCED[@]}" + + popd >/dev/null + + # Avoid mixups + unset SYNCED PKGS + done +done diff --git a/db-import-pkg-archlinux b/db-import-pkg-archlinux new file mode 100755 index 0000000..81221fd --- /dev/null +++ b/db-import-pkg-archlinux @@ -0,0 +1,210 @@ +#!/bin/bash +# Syncs Arch repos based on info contained in repo.db files +# License: GPLv3 + +# Principles +# * Get repo.db from an Arch-like repo +# * Generate a list of available packages +# * Create sync whitelist (based on package blacklist) +# * Get packages +# * Check package signatures +# * Check database signatures +# * Sync repo => repo + +# TODO +# * make a tarball of files used for forensics + +set -e + +# Run as `V=true db-import-pkg-archlinux` to get verbose output +VERBOSE=${V} +extra=() +${VERBOSE} && extra+=(-v) + +WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX") +trap "rm -rf -- $(printf '%q' "${WORKDIR}")" EXIT + +# Returns contents of a repo +get_repos() { + # Exclude everything but db files + rsync "${extra[@]}" --no-motd -mrtlH --no-p --include="*/" \ + --include="*.db" \ + --include="*${DBEXT}" \ + --include="*.files" \ + --include="*${FILESEXT}" \ + --exclude="*" \ + --delete-after \ + "rsync://${mirror}/${mirrorpath}/" "$WORKDIR" +} + +get_repo_content() { + # Return all contents + bsdtar tf "${1}" | \ + cut -d "/" -f 1 | \ + sort -u +} + +# Prints blacklisted packages +get_blacklist() { + cut -d ':' -f 1 "${BLACKLIST_FILE}" +} + +# repo +# arch +get_repo_file() { + echo "${WORKDIR}/${1}/os/${2}/${1}" +} + +# Process the databases and get the libre packages +init() { + + # Get the blacklisted packages + blacklist=($(get_blacklist)) + # Store all the whitelist files + whitelists=() + + msg "%d packages in blacklist" ${#blacklist[@]} + + test ${#blacklist[@]} -eq 0 && fatal_error "Empty blacklist" + + # Sync the repos databases + get_repos + + # Traverse all repo-arch pairs + for _repo in "${ARCHREPOS[@]}"; do + for _arch in "${ARCHARCHES[@]}"; do + msg "Processing %s-%s" "${_repo}" "${_arch}" + + db_file=$(get_repo_file "${_repo}" "${_arch}")${DBEXT} + files_file=$(get_repo_file "${_repo}" "${_arch}")${FILESEXT} + + if [ ! -f "${db_file}" ]; then + warning "%s doesn't exist, skipping this repo-arch" "${db_file}" + continue + fi + if [ ! -f "${files_file}" ]; then + warning "%s doesn't exist, skipping this repo-arch" "${files_file}" + continue + fi + + # Remove blacklisted packages and count them + # TODO capture all removed packages for printing on debug mode + msg2 "Removing blacklisted packages from %s database..." .db + LC_ALL=C repo-remove "${db_file}" "${blacklist[@]}" \ + |& sed -n 's/-> Removing/ &/p' + msg2 "Removing blacklisted packages from %s database..." .files + LC_ALL=C repo-remove "${files_file}" "${blacklist[@]}" \ + |& sed -n 's/-> Removing/ &/p' + # Get db contents + db=($(get_repo_content "${db_file}")) + + msg2 "Process clean db for syncing..." + + # Create a whitelist, add * wildcard to end + # TODO due to lack of -arch suffix, the pool sync retrieves every arch even if + # we aren't syncing them + # IMPORTANT: the . in the sed command is needed because an empty + # whitelist would consist of a single * allowing any package to + # pass through + printf '%s\n' "${db[@]}" | sed "s|.$|&*|g" > "/tmp/${_repo}-${_arch}.whitelist" + + msg2 "%d packages in whitelist" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)" + + # Sync excluding everything but whitelist + # We delete here for cleanup + rsync "${extra[@]}" --no-motd -rtlH \ + --delete-after \ + --delete-excluded \ + --delay-updates \ + --include-from="/tmp/${_repo}-${_arch}.whitelist" \ + --exclude="*" \ + "rsync://${mirror}/${mirrorpath}/${_repo}/os/${_arch}/" \ + "${FTP_BASE}/${_repo}/os/${_arch}/" + + # Add a new whitelist + whitelists+=(/tmp/${_repo}-${_arch}.whitelist) + + msg "Putting databases back in place" + rsync "${extra[@]}" --no-motd -rtlH \ + --delay-updates \ + --safe-links \ + "${WORKDIR}/${_repo}/os/${_arch}/" \ + "${FTP_BASE}/${_repo}/os/${_arch}/" + + # Cleanup + unset db + done + done + + + msg "Syncing package pool" + # Concatenate all whitelists, check for single *s just in case + cat "${whitelists[@]}" | grep -v "^\*$" | sort -u > /tmp/any.whitelist + + msg2 "Retrieving %d packages from pool" "$(wc -l /tmp/any.whitelist | cut -d' ' -f1)" + + # Sync + # *Don't delete-after*, this is the job of cleanup scripts. It will remove our + # packages too + local pkgpool + for pkgpool in "${ARCHPKGPOOLS[@]}"; do + rsync "${extra[@]}" --no-motd -rtlH \ + --delay-updates \ + --safe-links \ + --include-from=/tmp/any.whitelist \ + --exclude="*" \ + "rsync://${mirror}/${mirrorpath}/${pkgpool}/" \ + "${FTP_BASE}/${pkgpool}/" + done + + # Sync sources + msg "Syncing source pool" + #sed "s|\.pkg\.tar\.|.src.tar.|" /tmp/any.whitelist > /tmp/any-src.whitelist + #msg2 "Retrieving %d sources from pool" $(wc -l < /tmp/any-src.whitelist) + + # Sync + # *Don't delete-after*, this is the job of cleanup scripts. It will remove our + # packages too + local srcpool + for srcpool in "${ARCHSRCPOOLS[@]}"; do + rsync "${extra[@]}" --no-motd -rtlH \ + --delay-updates \ + --safe-links \ + --include-from=/tmp/any.whitelist \ + --exclude="*" \ + "rsync://${mirror}/${mirrorpath}/${srcpool}/" \ + "${FTP_BASE}/${srcpool}/" + done + + date -u +%s > "${FTP_BASE}/lastsync" + + # Cleanup + unset blacklist whitelists _arch _repo repo_file +} + +trap_exit() { + local signal=$1; shift + echo + error "$@" + trap -- "$signal" + kill "-$signal" "$$" +} + +source "$(dirname "$(readlink -e "$0")")/config" +source "$(dirname "$(readlink -e "$0")")/db-import-pkg-archlinux.conf" +source "$(dirname "$(readlink -e "$0")")/db-libremessages" + +# Check variables presence +for var in DBEXT FILESEXT mirror mirrorpath WORKDIR BLACKLIST_FILE FTP_BASE ARCHSRCPOOLS ARCHPKGPOOLS; do + test -z "${!var}" && fatal_error "Empty %s" "${var}" +done + +# From makepkg +set -E +for signal in TERM HUP QUIT; do + trap "trap_exit $signal '%s signal caught. Exiting...' $signal" "$signal" +done +trap 'trap_exit INT "Aborted by user! Exiting..."' INT +trap 'trap_exit USR1 "An unknown error has occurred. Exiting..."' ERR + +init diff --git a/db-import-pkg-archlinux.conf b/db-import-pkg-archlinux.conf new file mode 100644 index 0000000..24fc44d --- /dev/null +++ b/db-import-pkg-archlinux.conf @@ -0,0 +1,11 @@ +mirror="mirrors.kernel.org" + +## mirrors without sources folder +#mirror="mirrors.niyawe.de" +#mirror="mirror.nl.leaseweb.net" +#mirror="mirror.one.com" +#mirror="mirror.us.leaseweb.net" +#mirror="mirror.bytemark.co.uk" +#mirror="mirror.de.leaseweb.net" + +mirrorpath="archlinux" diff --git a/db-import-pkg-archlinuxarm b/db-import-pkg-archlinuxarm new file mode 100755 index 0000000..c707e5b --- /dev/null +++ b/db-import-pkg-archlinuxarm @@ -0,0 +1,199 @@ +#!/bin/bash +# Syncs Arch repos based on info contained in repo.db files +# License: GPLv3 + +# Principles +# * Get repo.db from an Arch-like repo +# * Generate a list of available packages +# * Create sync whitelist (based on package blacklist) +# * Get packages +# * Check package signatures +# * Check database signatures +# * Sync repo => repo + +# TODO +# * make a tarball of files used for forensics + +set -e + +# Run as `V=true db-import-pkg-archlinux` to get verbose output +VERBOSE=${V} +extra=() +${VERBOSE} && extra+=(-v) + +WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX") +trap "rm -rf -- $(printf '%q' "${WORKDIR}")" EXIT + +# Returns contents of a repo +get_repos() { + # Exclude everything but db files + rsync "${extra[@]}" --no-motd -mrtlH --no-p --include="*/" \ + --include="*.db" \ + --include="*${DBEXT}" \ + --include="*.files" \ + --include="*${FILESEXT}" \ + --exclude="*" \ + --delete-after \ + "rsync://${mirror}/${mirrorpath}/" "$WORKDIR" +} + +get_repo_content() { + # Return all contents + bsdtar tf "${1}" | \ + cut -d "/" -f 1 | \ + sort -u +} + +# Prints blacklisted packages +get_blacklist() { + cut -d ':' -f 1 "${BLACKLIST_FILE}" +} + +# repo +# arch +get_repo_file() { + echo "${WORKDIR}/${2}/${1}/${1}" +} + +# Process the databases and get the libre packages +init() { + + # Get the blacklisted packages + blacklist=($(get_blacklist)) + # Store all the whitelist files + whitelists=() + + msg "%d packages in blacklist" ${#blacklist[@]} + + test ${#blacklist[@]} -eq 0 && fatal_error "Empty blacklist" + + # Sync the repos databases + get_repos + + # Traverse all repo-arch pairs + for _arch in "${OURARCHES[@]}"; do + for _repo in "${ARMREPOS[@]}"; do + msg "Processing %s-%s" "${_repo}" "${_arch}" + + db_file=$(get_repo_file "${_repo}" "${_arch}")${DBEXT} + files_file=$(get_repo_file "${_repo}" "${_arch}")${FILESEXT} + + if [ ! -f "${db_file}" ]; then + warning "%s doesn't exist, skipping this arch-repo" "${db_file}" + continue + fi + if [ ! -f "${files_file}" ]; then + warning "%s doesn't exist, skipping this arch-repo" "${files_file}" + continue + fi + + # Remove blacklisted packages and count them + # TODO capture all removed packages for printing on debug mode + msg2 "Removing blacklisted packages from %s database..." .db + LC_ALL=C repo-remove "${db_file}" "${blacklist[@]}" \ + |& sed -n 's/-> Removing/ &/p' + msg2 "Removing blacklisted packages from %s database..." .files + LC_ALL=C repo-remove "${files_file}" "${blacklist[@]}" \ + |& sed -n 's/-> Removing/ &/p' + # Get db contents + db=($(get_repo_content "${db_file}")) + + msg2 "Process clean db for syncing..." + + # Create a whitelist, add * wildcard to end + # TODO due to lack of -arch suffix, the pool sync retrieves every arch even if + # we aren't syncing them + # IMPORTANT: the . in the sed command is needed because an empty + # whitelist would consist of a single * allowing any package to + # pass through + printf '%s\n' "${db[@]}" | sed "s|.$|&*|g" > "/tmp/${_repo}-${_arch}.whitelist" + + msg2 "%d packages in whitelist" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)" + + msg2 "Retrieving %d packages to pool" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)" + + # Sync excluding everything but whitelist + rsync "${extra[@]}" --no-motd -rtlH \ + --delay-updates \ + --safe-links \ + --include-from="/tmp/${_repo}-${_arch}.whitelist" \ + --exclude="*" \ + "rsync://${mirror}/${mirrorpath}/${_arch}/${_repo}/" \ + "${FTP_BASE}/${PKGPOOLARM}/" + + msg "Putting databases back in place" + rsync "${extra[@]}" --no-motd -rtlH \ + --delay-updates \ + --safe-links \ + "${WORKDIR}/${_arch}/${_repo}/" \ + "${FTP_BASE}/${_repo}/os/${_arch}/" + + # Cleanup + unset db + done + done + + + msg "Generating symbolic links to pool" + + for _arch in "${OURARCHES[@]}"; do + for _repo in "${ARMREPOS[@]}"; do + # Modify whitelist to search packages and create symlinks + sed -i "s/*/-${_arch}.pkg.tar.xz/g" "/tmp/${_repo}-${_arch}.whitelist" + + msg "Putting symlinks in ${_repo}/os/${_arch}" + + while read _pkgfile; do + # Symlink to package + if [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile}" ]; then + ln -sfv "../../../${PKGPOOLARM}/${_pkgfile}" \ + "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile}" + elif [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile/${_arch}/any}" ]; then + ln -sfv "../../../${PKGPOOLARM}/${_pkgfile/${_arch}/any}" \ + "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile/${_arch}/any}" + fi + + # Symlink to signature + if [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile}.sig" ]; then + ln -sfv "../../../${PKGPOOLARM}/${_pkgfile}.sig" \ + "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile}.sig" + elif [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile/${_arch}/any}.sig" ]; then + ln -sfv "../../../${PKGPOOLARM}/${_pkgfile/${_arch}/any}.sig" \ + "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile/${_arch}/any}.sig" + fi + done < "/tmp/${_repo}-${_arch}.whitelist" + done + done + + date -u +%s > "${FTP_BASE}/lastsync" + + # Cleanup + unset blacklist whitelists _arch _repo repo_file _pkgfile +} + +trap_exit() { + local signal=$1; shift + echo + error "$@" + trap -- "$signal" + kill "-$signal" "$$" +} + +source "$(dirname "$(readlink -e "$0")")/config" +source "$(dirname "$(readlink -e "$0")")/db-import-pkg-archlinuxarm.conf" +source "$(dirname "$(readlink -e "$0")")/db-libremessages" + +# Check variables presence +for var in DBEXT FILESEXT mirror mirrorpath WORKDIR BLACKLIST_FILE FTP_BASE ARCHSRCPOOLS ARCHPKGPOOLS; do + test -z "${!var}" && fatal_error "Empty %s" "${var}" +done + +# From makepkg +set -E +for signal in TERM HUP QUIT; do + trap "trap_exit $signal '%s signal caught. Exiting...' $signal" "$signal" +done +trap 'trap_exit INT "Aborted by user! Exiting..."' INT +trap 'trap_exit USR1 "An unknown error has occurred. Exiting..."' ERR + +init diff --git a/db-import-pkg-archlinuxarm.conf b/db-import-pkg-archlinuxarm.conf new file mode 100644 index 0000000..eaa170f --- /dev/null +++ b/db-import-pkg-archlinuxarm.conf @@ -0,0 +1,7 @@ +#mirror="mirror.yandex.ru" +mirror="ftp.halifax.rwth-aachen.de" + +## mirrors without sources folder +## use "archlinuxarm" instead "archlinux-arm" to mirror.yandex.ru + +mirrorpath="archlinux-arm" diff --git a/db-import-src-archlinux b/db-import-src-archlinux new file mode 100755 index 0000000..72171f1 --- /dev/null +++ b/db-import-src-archlinux @@ -0,0 +1,124 @@ +#!/bin/bash + +set -e + +FTP_BASE=/srv/repo/main +ABSLIBRE=/srv/abslibre +ABSGIT=/srv/git/abslibre/abslibre.git +# Remote +# ABSGIT=http://projects.parabolagnulinux.org/abslibre.git +BLACKLIST=/home/repo/blacklist/blacklist.txt +SYNCARGS='-mrtv --no-motd --delete-after --no-p --no-o --no-g --quiet' +BLFILE=/tmp/blacklist.txt + +# Variables from abs.conf +ABSROOT="/srv/abs/" +# DON'T CHANGE. WE NEED IT FOR ABSLIBRE +SYNCSERVER="rsync.archlinux.org" +ARCH="i686" +MIRRORLIST="/etc/pacman.d/mirrorlist" +REPOS=(core extra community testing community-testing !staging !community-staging) + +# Steps +# * Sync abs +# * Download blacklist.txt +# * Sync abslibre from abs excluding from blacklist +# * Create repo.abs.tar.gz tarballs + +function sync_abs() { + for ARCH in any i686 x86_64; do + rsync ${SYNCARGS} ${SYNCSERVER}::abs/${ARCH}/ ${ABSROOT}/${ARCH} || return $? + done + + # fix some permissions + find "${ABSROOT}" -type d -print0 | xargs -0 chmod 755 + find "${ABSROOT}" -type f -print0 | xargs -0 chmod 644 +} + +function get_blacklist() { + printf ":: Updating blacklist...\t" + cat "${BLACKLIST}" | cut -d':' -f1 | sort -u | \ + sed "s/^/**\//" > ${BLFILE} || { + printf "[FAILED]\n" + return 1 + } + + # Prevent using an empty blacklist + [ $(wc -l ${BLFILE} | cut -d " " -f1) -eq 0 ] && return 1 + + printf "[OK]\n" +} + +function sync_abs_libre() { + + # Clone ABSLibre git repo + rm -rf /tmp/abslibre + git clone "$ABSGIT" /tmp/abslibre + + # Sync from ABS and then sync from ABSLibre + printf ":: Syncing ABSLibre...\t" + (rsync ${SYNCARGS} --delete-excluded \ + --exclude-from=${BLFILE} \ + ${ABSROOT} \ + ${ABSLIBRE} \ + && + for ARCH in i686 x86_64; do rsync -v -mrtq --no-motd --no-p --no-o --no-g --quiet --exclude=.git/ /tmp/abslibre/ ${ABSLIBRE}/${ARCH}/; done) || { + printf "[FAILED]\n" + return 1 + } + + # fix some permissions + find "${ABSLIBRE}" -type d -print0 | xargs -0 chmod 755 + find "${ABSLIBRE}" -type f -print0 | xargs -0 chmod 644 + + printf "[OK]\n" +} + +# This part is very hacky and particular to the current setup :P +sync_pre_mips64el() { + pushd /home/fauno/Repos/abslibre-pre-mips64el >/dev/null + + sudo -u fauno sh -c " + rsync ${SYNCARGS} \ + --exclude=.git* \ + --exclude=community-staging \ + --exclude=community-testing \ + --exclude=gnome-unstable \ + --exclude=kde-unstable \ + --exclude=multilib \ + --exclude=multilib-testing \ + --exclude=multilib-staging \ + --exclude=staging \ + --exclude=testing \ + ${ABSLIBRE}/x86_64/ \ + /home/fauno/Repos/abslibre-pre-mips64el/ && + git add . && + git commit -m \"$(date)\" -a + git push origin master + git gc + " +} + +# Create .abs.tar.gz tarballs +create_tarballs() { + for repo in ${ABSLIBRE}/{i686,x86_64}/*; do + baserepo=${repo##*/} + arch=$(basename $(dirname $repo)) + + # Remove the old one + mkdir -p $FTP_BASE/$baserepo/os/$arch/ + rm -fv $FTP_BASE/$baserepo/os/$arch/$baserepo.abs.tar.gz + # Create a new one joining arch and any + # Remove the first part of the path (it could be $repo but any isn't hit) + bsdtar -czf $FTP_BASE/$baserepo/os/$arch/$baserepo.abs.tar.gz \ + -s ":${ABSLIBRE}/[a-z0-9_]\+/[a-z]\+::" \ + $repo/* ${ABSLIBRE}/any/${baserepo}/* + + done +} + +sync_abs +get_blacklist +sync_abs_libre +#sync_pre_mips64el +create_tarballs diff --git a/db-init b/db-init new file mode 100755 index 0000000..8da2455 --- /dev/null +++ b/db-init @@ -0,0 +1,8 @@ +#!/bin/bash +# Creates the repo structure defined in config + +source "$(dirname "$(readlink -e "$0")")/config" + +mkdir -p -- "${FTP_BASE}"/{"${PKGPOOL}","${SRCPOOL}"} "${CLEANUP_DESTDIR}" "${SOURCE_CLEANUP_DESTDIR}" "${STAGING}" + +"$(dirname "$(readlink -e "$0")")/create-repo" "${PKGREPOS[@]}" diff --git a/db-libremessages b/db-libremessages old mode 100755 new mode 100644 diff --git a/db-list-unsigned-packages b/db-list-unsigned-packages deleted file mode 100755 index 095e1e6..0000000 --- a/db-list-unsigned-packages +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# Copyright (C) 2012 Michał Masłowski -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -set -e - -# Output a list of repo/package-name-and-version pairs representing -# unsigned packages available for architecture $1 and specified for -# architecture $2 (usually $1 or any, default is to list all). - -. "$(dirname "$(readlink -e "$0")")/config" -. "$(dirname "$(readlink -e "$0")")/db-functions" - -if [ $# -lt 1 ]; then - msg "usage: %s " "${0##*/}" - exit 1 -fi - -arch=$1 -shift - -for repo in "${PKGREPOS[@]}" -do - db="${FTP_BASE}/${repo}/os/${arch}/${repo}.db" - [ -f "$db" ] && "$(dirname "$(readlink -e "$0")")/db-list-unsigned-packages.py" "$repo" "$@" < "$db" -done diff --git a/db-list-unsigned-packages.py b/db-list-unsigned-packages.py deleted file mode 100755 index 80cff51..0000000 --- a/db-list-unsigned-packages.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (C) 2012 Michał Masłowski -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - - -""" -Output a list of repo/package-name-and-version pairs representing -unsigned packages in the database at standard input of repo named in -the first argument and specified for architectures listed in the -following arguments (usually the one of the database or any, default -is to list all). - -If the --keyset argument is passed, print the key fingerprint of every -signed package. -""" - - -import base64 -import subprocess -import sys -import tarfile - - -def main(): - """Do the job.""" - check_keys = False - if "--keyset" in sys.argv: - sys.argv.remove("--keyset") - check_keys = True - repo = sys.argv[1] - pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:]) - packages = [] - keys = [] - with tarfile.open(fileobj=sys.stdin.buffer) as archive: - for entry in archive: - if entry.name.endswith("/desc"): - content = archive.extractfile(entry) - skip = False - is_arch = False - key = None - for line in content: - if is_arch: - is_arch = False - if pkgarches and line.strip() not in pkgarches: - skip = True # different architecture - break - if line == b"%PGPSIG%\n": - skip = True # signed - key = b"" - if check_keys: - continue - else: - break - if line == b"%ARCH%\n": - is_arch = True - continue - if key is not None: - if line.strip(): - key += line.strip() - else: - break - if check_keys and key: - key_binary = base64.b64decode(key) - keys.append(key_binary) - packages.append(repo + "/" + entry.name[:-5]) - if skip: - continue - print(repo + "/" + entry.name[:-5]) - if check_keys and keys: - # We have collected all signed package names in packages and - # all keys in keys. Let's now ask gpg to list all signatures - # and find which keys made them. - packets = subprocess.check_output(("gpg", "--list-packets"), - input=b"".join(keys)) - i = 0 - for line in packets.decode("latin1").split("\n"): - if line.startswith(":signature packet:"): - keyid = line[line.index("keyid ") + len("keyid "):] - print(packages[i], keyid) - i += 1 - - -if __name__ == "__main__": - main() diff --git a/db-sync b/db-sync deleted file mode 100755 index facfae9..0000000 --- a/db-sync +++ /dev/null @@ -1,210 +0,0 @@ -#!/bin/bash -# Syncs Arch repos based on info contained in repo.db files -# License: GPLv3 - -# Principles -# * Get repo.db from an Arch-like repo -# * Generate a list of available packages -# * Create sync whitelist (based on package blacklist) -# * Get packages -# * Check package signatures -# * Check database signatures -# * Sync repo => repo - -# TODO -# * make a tarball of files used for forensics - -set -e - -# Run as `V=true db-sync` to get verbose output -VERBOSE=${V} -extra=() -${VERBOSE} && extra+=(-v) - -WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX") -trap "rm -rf -- $(printf '%q' "${WORKDIR}")" EXIT - -# Returns contents of a repo -get_repos() { - # Exclude everything but db files - rsync "${extra[@]}" --no-motd -mrtlH --no-p --include="*/" \ - --include="*.db" \ - --include="*${DBEXT}" \ - --include="*.files" \ - --include="*${FILESEXT}" \ - --exclude="*" \ - --delete-after \ - "rsync://${mirror}/${mirrorpath}/" "$WORKDIR" -} - -get_repo_content() { - # Return all contents - bsdtar tf "${1}" | \ - cut -d "/" -f 1 | \ - sort -u -} - -# Prints blacklisted packages -get_blacklist() { - cut -d ':' -f 1 "${BLACKLIST_FILE}" -} - -# repo -# arch -get_repo_file() { - echo "${WORKDIR}/${1}/os/${2}/${1}" -} - -# Process the databases and get the libre packages -init() { - - # Get the blacklisted packages - blacklist=($(get_blacklist)) - # Store all the whitelist files - whitelists=() - - msg "%d packages in blacklist" ${#blacklist[@]} - - test ${#blacklist[@]} -eq 0 && fatal_error "Empty blacklist" - - # Sync the repos databases - get_repos - - # Traverse all repo-arch pairs - for _repo in "${ARCHREPOS[@]}"; do - for _arch in "${ARCHARCHES[@]}"; do - msg "Processing %s-%s" "${_repo}" "${_arch}" - - db_file=$(get_repo_file "${_repo}" "${_arch}")${DBEXT} - files_file=$(get_repo_file "${_repo}" "${_arch}")${FILESEXT} - - if [ ! -f "${db_file}" ]; then - warning "%s doesn't exist, skipping this repo-arch" "${db_file}" - continue - fi - if [ ! -f "${files_file}" ]; then - warning "%s doesn't exist, skipping this repo-arch" "${files_file}" - continue - fi - - # Remove blacklisted packages and count them - # TODO capture all removed packages for printing on debug mode - msg2 "Removing blacklisted packages from %s database..." .db - LC_ALL=C repo-remove "${db_file}" "${blacklist[@]}" \ - |& sed -n 's/-> Removing/ &/p' - msg2 "Removing blacklisted packages from %s database..." .files - LC_ALL=C repo-remove "${files_file}" "${blacklist[@]}" \ - |& sed -n 's/-> Removing/ &/p' - # Get db contents - db=($(get_repo_content "${db_file}")) - - msg2 "Process clean db for syncing..." - - # Create a whitelist, add * wildcard to end - # TODO due to lack of -arch suffix, the pool sync retrieves every arch even if - # we aren't syncing them - # IMPORTANT: the . in the sed command is needed because an empty - # whitelist would consist of a single * allowing any package to - # pass through - printf '%s\n' "${db[@]}" | sed "s|.$|&*|g" > "/tmp/${_repo}-${_arch}.whitelist" - - msg2 "%d packages in whitelist" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)" - - # Sync excluding everything but whitelist - # We delete here for cleanup - rsync "${extra[@]}" --no-motd -rtlH \ - --delete-after \ - --delete-excluded \ - --delay-updates \ - --include-from="/tmp/${_repo}-${_arch}.whitelist" \ - --exclude="*" \ - "rsync://${mirror}/${mirrorpath}/${_repo}/os/${_arch}/" \ - "${FTP_BASE}/${_repo}/os/${_arch}/" - - # Add a new whitelist - whitelists+=(/tmp/${_repo}-${_arch}.whitelist) - - msg "Putting databases back in place" - rsync "${extra[@]}" --no-motd -rtlH \ - --delay-updates \ - --safe-links \ - "${WORKDIR}/${_repo}/os/${_arch}/" \ - "${FTP_BASE}/${_repo}/os/${_arch}/" - - # Cleanup - unset db - done - done - - - msg "Syncing package pool" - # Concatenate all whitelists, check for single *s just in case - cat "${whitelists[@]}" | grep -v "^\*$" | sort -u > /tmp/any.whitelist - - msg2 "Retrieving %d packages from pool" "$(wc -l /tmp/any.whitelist | cut -d' ' -f1)" - - # Sync - # *Don't delete-after*, this is the job of cleanup scripts. It will remove our - # packages too - local pkgpool - for pkgpool in "${ARCHPKGPOOLS[@]}"; do - rsync "${extra[@]}" --no-motd -rtlH \ - --delay-updates \ - --safe-links \ - --include-from=/tmp/any.whitelist \ - --exclude="*" \ - "rsync://${mirror}/${mirrorpath}/${pkgpool}/" \ - "${FTP_BASE}/${pkgpool}/" - done - - # Sync sources - msg "Syncing source pool" - #sed "s|\.pkg\.tar\.|.src.tar.|" /tmp/any.whitelist > /tmp/any-src.whitelist - #msg2 "Retrieving %d sources from pool" $(wc -l < /tmp/any-src.whitelist) - - # Sync - # *Don't delete-after*, this is the job of cleanup scripts. It will remove our - # packages too - local srcpool - for srcpool in "${ARCHSRCPOOLS[@]}"; do - rsync "${extra[@]}" --no-motd -rtlH \ - --delay-updates \ - --safe-links \ - --include-from=/tmp/any.whitelist \ - --exclude="*" \ - "rsync://${mirror}/${mirrorpath}/${srcpool}/" \ - "${FTP_BASE}/${srcpool}/" - done - - date -u +%s > "${FTP_BASE}/lastsync" - - # Cleanup - unset blacklist whitelists _arch _repo repo_file -} - -trap_exit() { - local signal=$1; shift - echo - error "$@" - trap -- "$signal" - kill "-$signal" "$$" -} - -source "$(dirname "$(readlink -e "$0")")/config" -source "$(dirname "$(readlink -e "$0")")/db-sync.conf" -source "$(dirname "$(readlink -e "$0")")/db-libremessages" - -# Check variables presence -for var in DBEXT FILESEXT mirror mirrorpath WORKDIR BLACKLIST_FILE FTP_BASE ARCHSRCPOOLS ARCHPKGPOOLS; do - test -z "${!var}" && fatal_error "Empty %s" "${var}" -done - -# From makepkg -set -E -for signal in TERM HUP QUIT; do - trap "trap_exit $signal '%s signal caught. Exiting...' $signal" "$signal" -done -trap 'trap_exit INT "Aborted by user! Exiting..."' INT -trap 'trap_exit USR1 "An unknown error has occurred. Exiting..."' ERR - -init diff --git a/db-sync-arm b/db-sync-arm deleted file mode 100755 index 952b546..0000000 --- a/db-sync-arm +++ /dev/null @@ -1,199 +0,0 @@ -#!/bin/bash -# Syncs Arch repos based on info contained in repo.db files -# License: GPLv3 - -# Principles -# * Get repo.db from an Arch-like repo -# * Generate a list of available packages -# * Create sync whitelist (based on package blacklist) -# * Get packages -# * Check package signatures -# * Check database signatures -# * Sync repo => repo - -# TODO -# * make a tarball of files used for forensics - -set -e - -# Run as `V=true db-sync` to get verbose output -VERBOSE=${V} -extra=() -${VERBOSE} && extra+=(-v) - -WORKDIR=$(mktemp -dt "${0##*/}.XXXXXXXXXX") -trap "rm -rf -- $(printf '%q' "${WORKDIR}")" EXIT - -# Returns contents of a repo -get_repos() { - # Exclude everything but db files - rsync "${extra[@]}" --no-motd -mrtlH --no-p --include="*/" \ - --include="*.db" \ - --include="*${DBEXT}" \ - --include="*.files" \ - --include="*${FILESEXT}" \ - --exclude="*" \ - --delete-after \ - "rsync://${mirror}/${mirrorpath}/" "$WORKDIR" -} - -get_repo_content() { - # Return all contents - bsdtar tf "${1}" | \ - cut -d "/" -f 1 | \ - sort -u -} - -# Prints blacklisted packages -get_blacklist() { - cut -d ':' -f 1 "${BLACKLIST_FILE}" -} - -# repo -# arch -get_repo_file() { - echo "${WORKDIR}/${2}/${1}/${1}" -} - -# Process the databases and get the libre packages -init() { - - # Get the blacklisted packages - blacklist=($(get_blacklist)) - # Store all the whitelist files - whitelists=() - - msg "%d packages in blacklist" ${#blacklist[@]} - - test ${#blacklist[@]} -eq 0 && fatal_error "Empty blacklist" - - # Sync the repos databases - get_repos - - # Traverse all repo-arch pairs - for _arch in "${OURARCHES[@]}"; do - for _repo in "${ARMREPOS[@]}"; do - msg "Processing %s-%s" "${_repo}" "${_arch}" - - db_file=$(get_repo_file "${_repo}" "${_arch}")${DBEXT} - files_file=$(get_repo_file "${_repo}" "${_arch}")${FILESEXT} - - if [ ! -f "${db_file}" ]; then - warning "%s doesn't exist, skipping this arch-repo" "${db_file}" - continue - fi - if [ ! -f "${files_file}" ]; then - warning "%s doesn't exist, skipping this arch-repo" "${files_file}" - continue - fi - - # Remove blacklisted packages and count them - # TODO capture all removed packages for printing on debug mode - msg2 "Removing blacklisted packages from %s database..." .db - LC_ALL=C repo-remove "${db_file}" "${blacklist[@]}" \ - |& sed -n 's/-> Removing/ &/p' - msg2 "Removing blacklisted packages from %s database..." .files - LC_ALL=C repo-remove "${files_file}" "${blacklist[@]}" \ - |& sed -n 's/-> Removing/ &/p' - # Get db contents - db=($(get_repo_content "${db_file}")) - - msg2 "Process clean db for syncing..." - - # Create a whitelist, add * wildcard to end - # TODO due to lack of -arch suffix, the pool sync retrieves every arch even if - # we aren't syncing them - # IMPORTANT: the . in the sed command is needed because an empty - # whitelist would consist of a single * allowing any package to - # pass through - printf '%s\n' "${db[@]}" | sed "s|.$|&*|g" > "/tmp/${_repo}-${_arch}.whitelist" - - msg2 "%d packages in whitelist" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)" - - msg2 "Retrieving %d packages to pool" "$(wc -l /tmp/${_repo}-${_arch}.whitelist | cut -d' ' -f1)" - - # Sync excluding everything but whitelist - rsync "${extra[@]}" --no-motd -rtlH \ - --delay-updates \ - --safe-links \ - --include-from="/tmp/${_repo}-${_arch}.whitelist" \ - --exclude="*" \ - "rsync://${mirror}/${mirrorpath}/${_arch}/${_repo}/" \ - "${FTP_BASE}/${PKGPOOLARM}/" - - msg "Putting databases back in place" - rsync "${extra[@]}" --no-motd -rtlH \ - --delay-updates \ - --safe-links \ - "${WORKDIR}/${_arch}/${_repo}/" \ - "${FTP_BASE}/${_repo}/os/${_arch}/" - - # Cleanup - unset db - done - done - - - msg "Generating symbolic links to pool" - - for _arch in "${OURARCHES[@]}"; do - for _repo in "${ARMREPOS[@]}"; do - # Modify whitelist to search packages and create symlinks - sed -i "s/*/-${_arch}.pkg.tar.xz/g" "/tmp/${_repo}-${_arch}.whitelist" - - msg "Putting symlinks in ${_repo}/os/${_arch}" - - while read _pkgfile; do - # Symlink to package - if [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile}" ]; then - ln -sfv "../../../${PKGPOOLARM}/${_pkgfile}" \ - "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile}" - elif [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile/${_arch}/any}" ]; then - ln -sfv "../../../${PKGPOOLARM}/${_pkgfile/${_arch}/any}" \ - "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile/${_arch}/any}" - fi - - # Symlink to signature - if [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile}.sig" ]; then - ln -sfv "../../../${PKGPOOLARM}/${_pkgfile}.sig" \ - "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile}.sig" - elif [ -f "${FTP_BASE}/${PKGPOOLARM}/${_pkgfile/${_arch}/any}.sig" ]; then - ln -sfv "../../../${PKGPOOLARM}/${_pkgfile/${_arch}/any}.sig" \ - "${FTP_BASE}/${_repo}/os/${_arch}/${_pkgfile/${_arch}/any}.sig" - fi - done < "/tmp/${_repo}-${_arch}.whitelist" - done - done - - date -u +%s > "${FTP_BASE}/lastsync" - - # Cleanup - unset blacklist whitelists _arch _repo repo_file _pkgfile -} - -trap_exit() { - local signal=$1; shift - echo - error "$@" - trap -- "$signal" - kill "-$signal" "$$" -} - -source "$(dirname "$(readlink -e "$0")")/config" -source "$(dirname "$(readlink -e "$0")")/db-sync-arm.conf" -source "$(dirname "$(readlink -e "$0")")/db-libremessages" - -# Check variables presence -for var in DBEXT FILESEXT mirror mirrorpath WORKDIR BLACKLIST_FILE FTP_BASE ARCHSRCPOOLS ARCHPKGPOOLS; do - test -z "${!var}" && fatal_error "Empty %s" "${var}" -done - -# From makepkg -set -E -for signal in TERM HUP QUIT; do - trap "trap_exit $signal '%s signal caught. Exiting...' $signal" "$signal" -done -trap 'trap_exit INT "Aborted by user! Exiting..."' INT -trap 'trap_exit USR1 "An unknown error has occurred. Exiting..."' ERR - -init diff --git a/db-sync-arm.conf b/db-sync-arm.conf deleted file mode 100644 index eaa170f..0000000 --- a/db-sync-arm.conf +++ /dev/null @@ -1,7 +0,0 @@ -#mirror="mirror.yandex.ru" -mirror="ftp.halifax.rwth-aachen.de" - -## mirrors without sources folder -## use "archlinuxarm" instead "archlinux-arm" to mirror.yandex.ru - -mirrorpath="archlinux-arm" diff --git a/db-sync.conf b/db-sync.conf deleted file mode 100644 index 24fc44d..0000000 --- a/db-sync.conf +++ /dev/null @@ -1,11 +0,0 @@ -mirror="mirrors.kernel.org" - -## mirrors without sources folder -#mirror="mirrors.niyawe.de" -#mirror="mirror.nl.leaseweb.net" -#mirror="mirror.one.com" -#mirror="mirror.us.leaseweb.net" -#mirror="mirror.bytemark.co.uk" -#mirror="mirror.de.leaseweb.net" - -mirrorpath="archlinux" diff --git a/list_nonfree_in_db.py b/list_nonfree_in_db.py deleted file mode 100755 index a486fa5..0000000 --- a/list_nonfree_in_db.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python2 -#-*- encoding: utf-8 -*- -from filter import * -import argparse - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - prog="nonfree_in_db", - description="Cleans nonfree files on repo",) - - parser.add_argument("-k", "--blacklist-file", type=str, - help="File containing blacklisted names", - required=True,) - - parser.add_argument("-b", "--database", type=str, - help="dabatase to clean", - required=True,) - - args=parser.parse_args() - - if not (args.blacklist_file and args.database): - parser.print_help() - exit(1) - - blacklist=listado(args.blacklist_file) - pkgs=get_pkginfo_from_db(args.database) - - print(" ".join([pkg["name"] for pkg in pkgs if pkg["name"] in blacklist])) diff --git a/mkrepo b/mkrepo deleted file mode 100755 index b11dc0b..0000000 --- a/mkrepo +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# Author: Nicolás Reynolds -# License: GPLv3+ -# Description: A script to quickly create new [repos] - -source "$(dirname "$(readlink -e "$0")")/config" - -for repo in "$@"; do - echo ":: Creating [$repo]" - for arch in "${ARCHES[@]}"; do - mkdir -pv "${FTP_BASE}/${repo}/os/${arch}" - done -done - -echo ":: All done. Add the repo to the ParabolaWeb admin page." -- cgit v1.2.3