summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoshua Ismael Haase Hernández <hahj87@gmail.com>2011-04-13 00:52:10 -0500
committerJoshua Ismael Haase Hernández <hahj87@gmail.com>2011-04-13 00:52:10 -0500
commit36c0426277b03c3af60e94458c11d70a32a2030c (patch)
tree532bfb100863f31664b3747d74bb31aeeeb93823
parent3e27d11f68571bce92138f6cbfcaecac75fa1644 (diff)
parent748600bf8dfba34b336ad642a6b26dae674db85f (diff)
Fixed more errors and refactoring
-rwxr-xr-xclean_repo.py44
-rw-r--r--db-functions487
-rwxr-xr-xfilter.py62
-rwxr-xr-xmain.sh3
-rwxr-xr-xmkpending.py50
-rw-r--r--test/test_filter.py7
6 files changed, 557 insertions, 96 deletions
diff --git a/clean_repo.py b/clean_repo.py
index d4e06fc..eccfd01 100755
--- a/clean_repo.py
+++ b/clean_repo.py
@@ -3,6 +3,37 @@
from repm.filter import *
import argparse
+def mkpending(path_to_db, repo, prefix=config["pending"]):
+ """ Determine wich packages are pending for license auditing."""
+ if "~" in path_to_db:
+ path_to_db=(os.path.expanduser(path_to_db))
+
+ search = tuple(listado(config["blacklist"]) +
+ listado(config["whitelist"]))
+
+ pkgs=list(pkginfo_from_db(path_to_db))
+
+ filename=prefix + "-" + repo + ".txt"
+ try:
+ fsock=open(filename, "rw")
+ pkgs=[pkg for pkg in pkginfo_from_db(path_to_db)
+ if pkg["name"] not in listado(filename)]
+ for line in fsock.readlines():
+ if line:
+ pkg=Package()
+ pkg["name"]=line.split(":")[0]
+ pkg["license"]=":".join(line.split(":")[1:])
+ pkgs.append(pkg)
+ pkgs=[pkg for pkg in pkgs if pkg["name"] not in search
+ and "custom" in pkg["license"]]
+ fsock.write("\n".join([pkg["name"] + ":" + pkg["license"]
+ for pkg in pkgs]) + "\n")
+ except(IOError):
+ raise NonValidFile("Can't read or write %s" % filename)
+ finally:
+ fsock.close()
+ return pkgs
+
def remove_from_blacklist(path_to_db, blacklisted_names,
debug=config["debug"]):
""" Check the blacklist and remove packages on the db"""
@@ -37,13 +68,18 @@ if __name__ == "__main__":
help="directory to clean")
args=parser.parse_args()
- if args.directory:
- cleanup_nonfree_in_dir(args.directory, listado(config["blacklist"]))
-
if args.database:
+ repo=os.path.basename(args.database).split(".")[0]
pkgs=pkginfo_from_db(args.database)
remove_from_blacklist(args.database, pkgs,
tuple(listado(config["blacklist"]) +
- listado(config["pending"])))
+ listado(config["pending"] +
+ "-" + repo + ".txt")))
+ mkpending(args.database, args.repo)
+
+ if args.directory:
+ cleanup_nonfree_in_dir(args.directory,
+ listado(config["blacklist"]))
+
if not args.directory and not args.database:
parser.print_help()
diff --git a/db-functions b/db-functions
new file mode 100644
index 0000000..1780e4f
--- /dev/null
+++ b/db-functions
@@ -0,0 +1,487 @@
+#!/bin/bash
+
+# Some PKGBUILDs need CARCH to be set
+CARCH="x86_64"
+
+# Useful functions
+UMASK=""
+set_umask () {
+ [ "$UMASK" == "" ] && UMASK="$(umask)"
+ export UMASK
+ umask 002
+}
+
+restore_umask () {
+ umask $UMASK >/dev/null
+}
+
+# set up general environment
+WORKDIR=$(mktemp -d /tmp/$(basename $0).XXXXXXXXXX)
+LOCKS=()
+
+# check if messages are to be printed using color
+unset ALL_OFF BOLD BLUE GREEN RED YELLOW
+if [[ -t 2 ]]; then
+ ALL_OFF="$(tput sgr0)"
+ BOLD="$(tput bold)"
+ BLUE="${BOLD}$(tput setaf 4)"
+ GREEN="${BOLD}$(tput setaf 2)"
+ RED="${BOLD}$(tput setaf 1)"
+ YELLOW="${BOLD}$(tput setaf 3)"
+fi
+readonly ALL_OFF BOLD BLUE GREEN RED YELLOW
+
+plain() {
+ local mesg=$1; shift
+ printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+msg() {
+ local mesg=$1; shift
+ printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+msg2() {
+ local mesg=$1; shift
+ printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "${YELLOW}==> WARNING:${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "${RED}==> ERROR${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+##
+# usage : in_array( $needle, $haystack )
+# return : 0 - found
+# 1 - not found
+##
+in_array() {
+ local needle=$1; shift
+ [[ -z $1 ]] && return 1 # Not Found
+ local item
+ for item in "$@"; do
+ [[ $item = $needle ]] && return 0 # Found
+ done
+ return 1 # Not Found
+}
+
+script_lock() {
+ local LOCKDIR="$TMPDIR/.scriptlock.$(basename $0)"
+ if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
+ local _owner="$(stat -c %U $LOCKDIR)"
+ error "Script $(basename $0) is already locked by $_owner."
+ exit 1
+ else
+ set_umask
+ return 0
+ fi
+}
+
+script_unlock() {
+ local LOCKDIR="$TMPDIR/.scriptlock.$(basename $0)"
+ if [ ! -d "$LOCKDIR" ]; then
+ warning "Script $(basename $0) was not locked!"
+ restore_umask
+ return 1
+ else
+ rmdir "$LOCKDIR"
+ restore_umask
+ return 0
+ fi
+}
+
+cleanup() {
+ local l
+ local repo
+ local arch
+
+ trap - EXIT INT QUIT TERM
+ for l in ${LOCKS[@]}; do
+ repo=${l%.*}
+ arch=${l#*.}
+ if [ -d "$TMPDIR/.repolock.$repo.$arch" ]; then
+ msg "Removing left over lock from [${repo}] (${arch})"
+ repo_unlock $repo $arch
+ fi
+ done
+ if [ -d "$TMPDIR/.scriptlock.$(basename $0)" ]; then
+ msg "Removing left over lock from $(basename $0)"
+ script_unlock
+ fi
+ rm -rf "$WORKDIR"
+ [ "$1" ] && exit $1
+}
+
+abort() {
+ msg 'Aborting...'
+ cleanup 0
+}
+
+die() {
+ error "$*"
+ cleanup 1
+}
+
+trap abort INT QUIT TERM HUP
+trap cleanup EXIT
+
+
+#repo_lock <repo-name> <arch> [timeout]
+repo_lock () {
+ local LOCKDIR="$TMPDIR/.repolock.$1.$2"
+ local LOCKFILE="${FTP_BASE}/${1}/os/${2}/${1}${DBEXT}.lck"
+ local _count
+ local _trial
+ local _timeout
+ local _lockblock
+ local _owner
+
+ # This is the lock file used by repo-add and repo-remove
+ if [ -f "${LOCKFILE}" ]; then
+ error "Repo [${1}] (${2}) is already locked by repo-{add,remove} process $(cat $LOCKFILE)"
+ return 1
+ fi
+
+ if [ $# -eq 2 ]; then
+ _lockblock=true
+ _trial=0
+ elif [ $# -eq 3 ]; then
+ _lockblock=false
+ _timeout=$3
+ let _trial=$_timeout/$LOCK_DELAY
+ fi
+
+ _count=0
+ while [ $_count -le $_trial ] || $_lockblock ; do
+ if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
+ _owner="$(stat -c %U $LOCKDIR)"
+ warning "Repo [${1}] (${2}) is already locked by $_owner. "
+ msg2 "Retrying in $LOCK_DELAY seconds..."
+ else
+ LOCKS[${#LOCKS[*]}]="$1.$2"
+ set_umask
+ return 0
+ fi
+ sleep $LOCK_DELAY
+ let _count=$_count+1
+ done
+
+ error "Repo [${1}] (${2}) is already locked by $_owner. Giving up!"
+ return 1
+}
+
+repo_unlock () { #repo_unlock <repo-name> <arch>
+ local LOCKDIR="$TMPDIR/.repolock.$1.$2"
+ if [ ! -d "$LOCKDIR" ]; then
+ warning "Repo lock [${1}] (${2}) was not locked!"
+ restore_umask
+ return 1
+ else
+ rmdir "$LOCKDIR"
+ restore_umask
+ return 0
+ fi
+}
+
+# usage: _grep_pkginfo pkgfile pattern
+_grep_pkginfo() {
+ local _ret
+
+ _ret="$(bsdtar -xOqf "$1" .PKGINFO | /bin/grep -m 1 "^${2} = ")"
+ echo "${_ret#${2} = }"
+}
+
+
+# Get the package base or name as fallback
+getpkgbase() {
+ local _base
+
+ _base="$(_grep_pkginfo "$1" "pkgbase")"
+ if [ -z "$_base" ]; then
+ getpkgname "$1"
+ else
+ echo "$_base"
+ fi
+}
+
+issplitpkg() {
+ local _base
+
+ _base="$(_grep_pkginfo "$1" "pkgbase")"
+ if [ -z "$_base" ]; then
+ return 1
+ else
+ return 0
+ fi
+}
+
+# Get the package name
+getpkgname() {
+ local _name
+
+ _name="$(_grep_pkginfo "$1" "pkgname")"
+ if [ -z "$_name" ]; then
+ error "Package '$1' has no pkgname in the PKGINFO. Fail!"
+ exit 1
+ fi
+
+ echo "$_name"
+}
+
+# Get the pkgver-pkgrel of this package
+getpkgver() {
+ local _ver
+
+ _ver="$(_grep_pkginfo "$1" "pkgver")"
+ if [ -z "$_ver" ]; then
+ error "Package '$1' has no pkgver in the PKGINFO. Fail!"
+ exit 1
+ fi
+
+ echo "$_ver"
+}
+
+getpkgarch() {
+ local _ver
+
+ _ver="$(_grep_pkginfo "$1" "arch")"
+ if [ -z "$_ver" ]; then
+ error "Package '$1' has no arch in the PKGINFO. Fail!"
+ exit 1
+ fi
+
+ echo "$_ver"
+}
+
+getpkgfile() {
+ if [[ ${#} -ne 1 ]]; then
+ error 'No canonical package found!'
+ exit 1
+ elif [ ! -f "${1}" ]; then
+ error "Package ${1} not found!"
+ exit 1
+ fi
+
+ echo ${1}
+}
+
+getpkgfiles() {
+ local f
+ if [ ! -z "$(echo ${@%\.*} | sed "s/ /\n/g" | sort | uniq -D)" ]; then
+ error 'Duplicate packages found!'
+ exit 1
+ fi
+
+ for f in ${@}; do
+ if [ ! -f "${f}" ]; then
+ error "Package ${f} not found!"
+ exit 1
+ fi
+ done
+
+ echo ${@}
+}
+
+check_pkgfile() {
+ local pkgfile=$1
+
+ local pkgname="$(getpkgname ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgver="$(getpkgver ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgarch="$(getpkgarch ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+
+ in_array "${pkgarch}" ${ARCHES[@]} 'any' || return 1
+
+ if echo "$(basename ${pkgfile})" | grep -q "${pkgname}-${pkgver}-${pkgarch}"; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+check_pkgsvn() {
+ local pkgfile="${1}"
+ local _pkgbase="$(getpkgbase ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local _pkgname="$(getpkgname ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local _pkgver="$(getpkgver ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local _pkgarch="$(getpkgarch ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local repo="${2}"
+
+ in_array "${repo}" ${PKGREPOS[@]} || return 1
+
+ if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
+ mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
+ svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
+ "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
+ [ $? -ge 1 ] && return 1
+ fi
+
+ local svnver="$(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo "${pkgver}-${pkgrel}")"
+ [ "${svnver}" == "${_pkgver}" ] || return 1
+
+ local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
+ in_array "${_pkgname}" ${svnnames[@]} || return 1
+
+ return 0
+}
+
+check_splitpkgs() {
+ local repo="${1}"
+ shift
+ local pkgfiles=(${@})
+ local pkgfile
+ local pkgdir
+ local svnname
+
+ mkdir -p "${WORKDIR}/check_splitpkgs/"
+ pushd "${WORKDIR}/check_splitpkgs" >/dev/null
+
+ for pkgfile in ${pkgfiles[@]}; do
+ issplitpkg "${pkgfile}" || continue
+ local _pkgbase="$(getpkgbase ${pkgfile})"
+ msg2 "Checking $_pkgbase"
+ local _pkgname="$(getpkgname ${pkgfile})"
+ local _pkgarch="$(getpkgarch ${pkgfile})"
+ mkdir -p "${repo}/${_pkgarch}/${_pkgbase}"
+ echo "${_pkgname}" >> "${repo}/${_pkgarch}/${_pkgbase}/staging"
+
+ if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
+ mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
+
+ cp -r ${SVNREPO}/$repo/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null 2>&1 || \
+ cp -r ${SVNREPO}/libre/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null 2>&1 || \
+ cp -r ${SVNREPO}/libre-testing/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/$_pkgbase">/dev/null 2>&1
+
+ [[ $? -ge 1 ]] && {
+ echo "Failed $_pkgbase-$_pkgver-$_pkgarch"
+ return 1
+ }
+ fi
+
+ local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
+ for svnname in ${svnnames[@]}; do
+ echo "${svnname}" >> "${repo}/${_pkgarch}/${_pkgbase}/svn"
+ done
+ done
+ popd >/dev/null
+
+ for pkgdir in "${WORKDIR}/check_splitpkgs/${repo}"/*/*; do
+ [ ! -d "${pkgdir}" ] && continue
+ sort -u "${pkgdir}/staging" -o "${pkgdir}/staging"
+ sort -u "${pkgdir}/svn" -o "${pkgdir}/svn"
+ if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/svn")" ]; then
+ return 1
+ fi
+ done
+
+ return 0
+}
+
+check_pkgrepos() {
+ local pkgfile=$1
+
+ local pkgname="$(getpkgname ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgver="$(getpkgver ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgarch="$(getpkgarch ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
+ [ -f "${FTP_BASE}/${PKGPOOL}/$(basename ${pkgfile})" ] && return 1
+
+ local repo
+ local arch
+ for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/$(basename ${pkgfile})" ] && return 1
+ done
+ done
+
+ return 0
+}
+
+#usage: chk_license ${license[@]}"
+chk_license() {
+ local l
+ for l in ${@}; do
+ in_array ${l} ${ALLOWED_LICENSES[@]} && return 0
+ done
+
+ return 1
+}
+
+check_repo_permission() {
+ local repo=$1
+
+ [ ${#PKGREPOS[@]} -eq 0 ] && return 1
+ [ -z "${PKGPOOL}" ] && return 1
+
+ in_array "${repo}" ${PKGREPOS[@]} || return 1
+
+ [ -w "$FTP_BASE/${PKGPOOL}" ] || return 1
+
+ local arch
+ for arch in ${ARCHES}; do
+ local dir="${FTP_BASE}/${repo}/os/${arch}/"
+ [ -w "${dir}" ] || return 1
+ [ -f "${dir}"${repo}${DBEXT} -a ! -w "${dir}"${repo}${DBEXT} ] && return 1
+ done
+
+ return 0
+}
+
+set_repo_permission() {
+ local repo=$1
+ local arch=$2
+ local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+
+ if [ -w "${dbfile}" ]; then
+ local group=$(stat --printf='%G' "$(dirname "${dbfile}")")
+ chgrp $group "${dbfile}" || error "Could not change group of ${dbfile} to $group"
+ chmod g+w "${dbfile}" || error "Could not set write permission for group $group to ${dbfile}"
+ else
+ error "You don't have permission to change ${dbfile}"
+ fi
+}
+
+arch_repo_add() {
+ local repo=$1
+ local arch=$2
+ local pkgs=(${@:3})
+
+ # package files might be relative to repo dir
+ pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null
+ repo-add -q "${repo}${DBEXT}" ${pkgs[@]} >/dev/null \
+ || error "repo-add ${repo}${DBEXT} ${pkgs[@]}"
+ popd >/dev/null
+ set_repo_permission "${repo}" "${arch}"
+}
+
+arch_repo_remove() {
+ local repo=$1
+ local arch=$2
+ local pkgs=(${@:3})
+ local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+
+ if [ ! -f "${dbfile}" ]; then
+ error "No database found at '${dbfile}'"
+ return 1
+ fi
+ repo-remove -q "${dbfile}" ${pkgs[@]} >/dev/null \
+ || error "repo-remove ${dbfile} ${pkgs[@]}"
+ set_repo_permission "${repo}" "${arch}"
+}
diff --git a/filter.py b/filter.py
index 48e2d93..1d70a63 100755
--- a/filter.py
+++ b/filter.py
@@ -38,7 +38,7 @@ def pkginfo_from_filename(filename):
pkg["name"] = "-".join(fileattrs)
return pkg
-def pkginfo_from_desc(filename):
+def pkginfo_from_desc(info_from_desc, pkg=Package()):
""" Returns pkginfo from desc file.
Parameters:
@@ -48,14 +48,7 @@ def pkginfo_from_desc(filename):
Returns:
----------
pkg -> Package object"""
- if not os.path.isfile(filename):
- raise NonValidFile
- try:
- f=open(filename)
- info=f.read().rsplit()
- finally:
- f.close()
- pkg = Package()
+ info=info_from_desc.rsplit()
info_map={"name" :("%NAME%" , None),
"version" :("%VERSION%" , 0 ),
"release" :("%VERSION%" , 1 ),
@@ -128,7 +121,7 @@ def pkginfo_from_files_in_dir(directory):
return tuple(package_list)
def pkginfo_from_db(path_to_db):
- """ Get PKGINFO from db.
+ """ Get pkginfo from db.
Parameters:
----------
@@ -136,37 +129,30 @@ def pkginfo_from_db(path_to_db):
Output:
----------
- None """
+ package_list -> tuple of Package objects"""
package_list=list()
if not os.path.isfile(path_to_db):
raise NonValidFile(path_to_db + " is not a file")
- check_output(["mkdir", "-p", config["archdb"]])
-
try:
- db_open_tar = tarfile.open(path_to_db, 'r:gz')
+ dbsock = tarfile.open(path_to_db, 'r:gz')
+ desc_files=[desc for desc in db_open_tar.getnames()
+ if "/desc" in desc]
+ for name in desc_files:
+ desc=dbsock.extractfile(name)
+ package_list.append(pkginfo_from_desc(desc.read()))
except tarfile.ReadError:
- raise NonValidFile("No valid db_file %s or not readable" % path_to_db)
+ raise NonValidFile("No valid db_file %s or not readable"
+ % path_to_db)
return(tuple())
-
- for file in db_open_tar.getmembers():
- db_open_tar.extract(file, config["archdb"])
- db_open_tar.close()
- # Get info from file
- for dir_ in glob(config["archdb"] + "/*"):
- if isdir(dir_) and isfile(dir_ + "/desc"):
- package_list.append(pkginfo_from_desc(
- os.path.join(dir_,"desc")))
- check_output(["rm", "-r", config["archdb"]])
- if config["debug"]:
- printf(package_list)
+ finally:
+ db_open_tar.close()
return package_list
-def generate_exclude_list_from_blacklist(packages_iterable,
- blacklisted_names,
- exclude_file=config["rsync_blacklist"],
- debug=config["debug"]):
+def rsyncBlacklist_from_blacklist(packages_iterable,
+ blacklisted_names,
+ exclude_file=config["rsync_blacklist"]):
""" Generate an exclude list for rsync
Parameters:
@@ -183,19 +169,19 @@ def generate_exclude_list_from_blacklist(packages_iterable,
if isinstance(pkg, Package)
and pkg["name"] in blacklisted_names]
- if debug:
- return pkgs
try:
fsock = open(exclude_file,"w")
- try:
- fsock.write("\n".join(a))
- finally:
- fsock.close()
+ fsock.write("\n".join(pkgs) + "\n")
except IOError:
printf("%s wasnt written" % exclude_file)
+ exit(1)
+ finally:
+ fsock.close()
+ return pkgs
+
if __name__ == "__main__":
cmd=generate_rsync_command(rsync_list_command)
a=run_rsync(cmd)
packages=pkginfo_from_rsync_output(a)
- generate_exclude_list_from_blacklist(packages,listado(blacklist))
+ rsyncBlaclist_from_blacklist(packages,listado(blacklist))
diff --git a/main.sh b/main.sh
index 46f2f75..9f41a95 100755
--- a/main.sh
+++ b/main.sh
@@ -31,9 +31,8 @@ for repo in $(echo ${repo_list} | tr ':' ' '); do
msg2 "Syncing ${repo} ${arch}"
${rsync_post_command} --exclude-from=${rsync_blacklist} \
${mirror}${mirropath}/${repo} ${repodir}/${repo}
- msg2 "Making pending list for ${repo} ${arch}"
- run_python_cmd "mkpending.py -r ${repo} -b ${repodir}/${repo}/os/${arch}"
msg2 "Cleaning ${repo} ${arch}"
+ # This also generates pending lists
run_python_cmd "clean_repo.py -b ${repodir}/${repo}/os/${arch}/${repo}.db.tar.gz -d ${repodir}/${repo}/os/${arch}/"
done
done
diff --git a/mkpending.py b/mkpending.py
deleted file mode 100755
index 6022206..0000000
--- a/mkpending.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-from repm.filter import *
-import argparse
-
-def make_pending(path_to_db, repo, prefix=config["pending"]):
- """ Determine wich packages are pending for license auditing."""
- filename=prefix + "-" + repo + ".txt"
- try:
- fsock=open(filename, "rw")
- if os.path.isfile(filename):
- pkgs=[pkg for pkg in packages_iterable if pkg["name"] not in
- listado(filename)]
- fsock.write("\n".join([pkg["name"] + ":" + pkg["license"]
- for pkg in pkgs]) + "\n")
- except(IOError):
- print("Can't read %s" % filename)
- exit(1)
- finally:
- fsock.close()
-
- if "~" in path_to_db:
- path_to_db=(os.path.expanduser(path_to_db))
-
- packages_iterable=pkginfo_from_db(path_to_db)
- search = tuple(listado(config["blacklist"]) +
- listado(config["whitelist"]))
-
- pkgs=[pkg for pkg in packages_iterable
- if "custom" in pkg["license"]
- and pkg["name"] not in search]
- return pkgs
-
-def write_pending(packages_iterable, repo, prefix=config["pending"]):
- """ Write a pending file with the info of the packages """
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(
- description="Clean a repo db and packages")
- parser.add_argument("-b", "--database", type=str, required=True,
- help="database to check")
- parser.add_argument("-r", "--repo", type=str, required=True,
- help="repo of database")
- args=parser.parse_args()
-
- if args.database and args.repo:
- pkgs=make_pending(args.database)
- write_pending(pkgs, args.repo)
- else:
- parser.print_help()
diff --git a/test/test_filter.py b/test/test_filter.py
index 5601d57..b6d5766 100644
--- a/test/test_filter.py
+++ b/test/test_filter.py
@@ -142,7 +142,8 @@ class generateRsyncBlacklist(unittest.TestCase):
self.assertEqual(listado("blacklist_sample"),["alex","gmime22"])
def testExcludeFiles(self):
- a=generate_exclude_list_from_blacklist(self.example_package_list,listado("blacklist_sample"),debug=True)
+ a=rsyncBlacklist_from_blacklist(self.example_package_list,
+ listado("blacklist_sample"))
b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]]
self.assertEqual(a,b)
@@ -155,7 +156,9 @@ class pkginfo_from_descKnownValues(unittest.TestCase):
"license" : "GPL",
"location": "binutils-2.21-4-x86_64.pkg.tar.xz",
"depends" : False,}
- pkggen=pkginfo_from_desc("desc")
+ fsock=open("desc")
+ pkggen=pkginfo_from_desc(fsock.read())
+ fsock.close()
def testPkginfoFromDesc(self):
if self.pkggen is None:
self.fail("return value is None")