summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore3
-rwxr-xr-xclean_repo.py85
-rwxr-xr-x[-rw-r--r--]config.py96
-rwxr-xr-xconfig.sh26
-rwxr-xr-xcptobin.sh2
-rw-r--r--db-functions487
-rwxr-xr-x[-rw-r--r--]filter.py86
-rwxr-xr-xget_license.sh10
-rwxr-xr-xlibremessages77
-rw-r--r--local_config.example35
-rwxr-xr-xmain.sh41
-rw-r--r--pato2.py207
-rw-r--r--test/test_filter.py13
13 files changed, 892 insertions, 276 deletions
diff --git a/.gitignore b/.gitignore
index e645833..69dec40 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
*~
-*.pyc \ No newline at end of file
+*.pyc
+local_config
diff --git a/clean_repo.py b/clean_repo.py
new file mode 100755
index 0000000..eccfd01
--- /dev/null
+++ b/clean_repo.py
@@ -0,0 +1,85 @@
+#! /usr/bin/python
+#-*- encoding: utf-8 -*-
+from repm.filter import *
+import argparse
+
+def mkpending(path_to_db, repo, prefix=config["pending"]):
+ """ Determine wich packages are pending for license auditing."""
+ if "~" in path_to_db:
+ path_to_db=(os.path.expanduser(path_to_db))
+
+ search = tuple(listado(config["blacklist"]) +
+ listado(config["whitelist"]))
+
+ pkgs=list(pkginfo_from_db(path_to_db))
+
+ filename=prefix + "-" + repo + ".txt"
+ try:
+ fsock=open(filename, "rw")
+ pkgs=[pkg for pkg in pkginfo_from_db(path_to_db)
+ if pkg["name"] not in listado(filename)]
+ for line in fsock.readlines():
+ if line:
+ pkg=Package()
+ pkg["name"]=line.split(":")[0]
+ pkg["license"]=":".join(line.split(":")[1:])
+ pkgs.append(pkg)
+ pkgs=[pkg for pkg in pkgs if pkg["name"] not in search
+ and "custom" in pkg["license"]]
+ fsock.write("\n".join([pkg["name"] + ":" + pkg["license"]
+ for pkg in pkgs]) + "\n")
+ except(IOError):
+ raise NonValidFile("Can't read or write %s" % filename)
+ finally:
+ fsock.close()
+ return pkgs
+
+def remove_from_blacklist(path_to_db, blacklisted_names,
+ debug=config["debug"]):
+ """ Check the blacklist and remove packages on the db"""
+ if "~" in path_to_db:
+ path_to_db=(os.path.expanduser(path_to_db))
+
+ pkgs=[pkg for pkg in pkginfo_from_db(path_to_db) if
+ pkg["name"] in blacklisted_names]
+ if pkgs:
+ lista=" ".join(pkgs)
+ cmd = "repo-remove " + path_to_db + " " + lista
+ printf(cmd)
+ a = check_output(cmd)
+ if debug:
+ printf(a)
+ return pkgs, cmd
+
+def cleanup_nonfree_in_dir(directory, blacklisted_names):
+ if "~" in directory:
+ directory=(os.path.expanduser(directory))
+ pkgs=pkginfo_from_files_in_dir(directory)
+ for package in pkgs:
+ if package["name"] in blacklisted_names:
+ os.remove(package["location"])
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser(
+ description="Clean a repo db and packages")
+ parser.add_argument("-b", "--database", type=str,
+ help="dabatase to clean")
+ parser.add_argument("-d", "--directory", type=str,
+ help="directory to clean")
+ args=parser.parse_args()
+
+ if args.database:
+ repo=os.path.basename(args.database).split(".")[0]
+ pkgs=pkginfo_from_db(args.database)
+ remove_from_blacklist(args.database, pkgs,
+ tuple(listado(config["blacklist"]) +
+ listado(config["pending"] +
+ "-" + repo + ".txt")))
+ mkpending(args.database, args.repo)
+
+ if args.directory:
+ cleanup_nonfree_in_dir(args.directory,
+ listado(config["blacklist"]))
+
+ if not args.directory and not args.database:
+ parser.print_help()
diff --git a/config.py b/config.py
index 3e00eb0..8b48c66 100644..100755
--- a/config.py
+++ b/config.py
@@ -1,58 +1,69 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-from user import home
-import commands
+try:
+ from subprocess import check_output
+except(ImportError):
+ from commands import getoutput
+ def check_output(*popenargs,**kwargs):
+ cmd=" ".join(*popenargs)
+ return getoutput(cmd)
import os
-time__ = commands.getoutput("date +%Y%m%d-%H:%M")
+stringvars=("mirror", "mirrorpath", "logname", "tempdir", "archdb",
+ "repodir", "blacklist", "whitelist", "pending",
+ "rsync_blacklist",)
+listvars=("repo_list", "dir_list", "arch_list", "other",)
+boolvars=("output", "debug",)
-# Mirror Parameters
-mirror = "mirrors.eu.kernel.org"
-mirrorpath = "::mirrors/archlinux"
+config=dict()
-# Directories and files
+def exit_if_none(var):
+ if os.environ.get(var) is None:
+ exit("%s is not defined" % var)
-## Optionals
-path = home + "/parabolagnulinux.org"
-docs = path + "/docs"
-logdir = path + "/log"
+for var in stringvars:
+ exit_if_none(var)
+ config[var]=os.environ.get(var)
-## Must be defined
-logname= logdir + "/" + time__ + "-repo-maintainer.log"
-freedir= path + "/free/"
-repodir= path + "/repo"
-tmp = home + "/tmp"
-archdb = tmp + "/db"
+for var in listvars:
+ exit_if_none(var)
+ config[var]=tuple(os.environ.get(var).split(":"))
-free_path= path + "/free/"
-
-# Repo, arch, and other folders to use for repo
-# This are tuples, so **always keep a comma before closing parenthesis **
-repo_list = ("core", "extra", "community", "testing", "community-testing", "multilib",)
-dir_list = ("pool",)
-arch_list = ("i686", "x86_64",)
-other = ("any",)
-
-# Output
-output = True
-verbose = True
-
-# Files
-blacklist = docs + "/blacklist.txt"
-whitelist = docs + "/whitelist.txt"
-pending = docs + "/pending"
-rsync_blacklist = docs + "/rsyncBlacklist"
+for var in boolvars:
+ exit_if_none(var)
+ if os.environ.get(var) == "True":
+ config[var]=True
+ elif os.environ.get(var) =="False":
+ config[var]=False
+ else:
+ print('%s is not True or False' % var)
# Rsync commands
-
rsync_list_command="rsync -a --no-motd --list-only "
-rsync_update_command="rsync -av --delay-updates --exclude='*.{abs|db}.tar.*' "
-rsync_post_command="rsync -av --delete --exclude='*.abs.tar.*' "
+
+def printf(text,output=config["output"]):
+ """Guarda el texto en la variable log y puede imprimir en pantalla."""
+ log_file = open(config["logname"], 'a')
+ log_file.write("\n" + str(text) + "\n")
+ log_file.close()
+ if output:
+ print (str(text) + "\n")
+
+del exit_if_none
# Classes and Exceptions
-class NonValidFile(ValueError): pass
-class NonValidDir(ValueError): pass
-class NonValidCommand(ValueError): pass
+class NonValidFile(ValueError):
+ def __init__(self):
+ ValueError.__init__(self)
+ printf(self.message)
+class NonValidDir(ValueError):
+ def __init__(self):
+ ValueError.__init__(self)
+ printf(self.message)
+class NonValidCommand(ValueError):
+ def __init__(self):
+ ValueError.__init__(self)
+ printf(self.message)
class Package:
""" An object that has information about a package. """
@@ -91,3 +102,6 @@ class Package:
else:
return True
+if __name__=="__main__":
+ for key in config.keys():
+ print("%s : %s" % (key,config[key]))
diff --git a/config.sh b/config.sh
new file mode 100755
index 0000000..9a44f50
--- /dev/null
+++ b/config.sh
@@ -0,0 +1,26 @@
+#!/bin/sh
+# -*- coding: utf-8 -*-
+source local_config
+
+function run_python_cmd {
+ env \
+ mirror=${mirror} \
+ mirrorpath=${mirrorpath} \
+ logname=${logname} \
+ tempdir=${tempdir} \
+ archdb=${archdb} \
+ repodir=${repodir} \
+ blacklist=${blacklist} \
+ whitelist=${whitelist} \
+ pending=${pending} \
+ rsync_blacklist=${rsync_blacklist} \
+ repo_list=${repo_list} \
+ dir_list=${dir_list} \
+ arch_list=${arch_list} \
+ other=${other} \
+ output=${output} \
+ debug=${debug} \
+ $1
+}
+
+source libremessages \ No newline at end of file
diff --git a/cptobin.sh b/cptobin.sh
new file mode 100755
index 0000000..068d765
--- /dev/null
+++ b/cptobin.sh
@@ -0,0 +1,2 @@
+#!/bin/bash
+cp -f clean_repo.py config.py config.sh get_license.sh main.sh mkpending.py filter.py local_config ~/usr/bin/ \ No newline at end of file
diff --git a/db-functions b/db-functions
new file mode 100644
index 0000000..1780e4f
--- /dev/null
+++ b/db-functions
@@ -0,0 +1,487 @@
+#!/bin/bash
+
+# Some PKGBUILDs need CARCH to be set
+CARCH="x86_64"
+
+# Useful functions
+UMASK=""
+set_umask () {
+ [ "$UMASK" == "" ] && UMASK="$(umask)"
+ export UMASK
+ umask 002
+}
+
+restore_umask () {
+ umask $UMASK >/dev/null
+}
+
+# set up general environment
+WORKDIR=$(mktemp -d /tmp/$(basename $0).XXXXXXXXXX)
+LOCKS=()
+
+# check if messages are to be printed using color
+unset ALL_OFF BOLD BLUE GREEN RED YELLOW
+if [[ -t 2 ]]; then
+ ALL_OFF="$(tput sgr0)"
+ BOLD="$(tput bold)"
+ BLUE="${BOLD}$(tput setaf 4)"
+ GREEN="${BOLD}$(tput setaf 2)"
+ RED="${BOLD}$(tput setaf 1)"
+ YELLOW="${BOLD}$(tput setaf 3)"
+fi
+readonly ALL_OFF BOLD BLUE GREEN RED YELLOW
+
+plain() {
+ local mesg=$1; shift
+ printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+msg() {
+ local mesg=$1; shift
+ printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+msg2() {
+ local mesg=$1; shift
+ printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@"
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "${YELLOW}==> WARNING:${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "${RED}==> ERROR${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+##
+# usage : in_array( $needle, $haystack )
+# return : 0 - found
+# 1 - not found
+##
+in_array() {
+ local needle=$1; shift
+ [[ -z $1 ]] && return 1 # Not Found
+ local item
+ for item in "$@"; do
+ [[ $item = $needle ]] && return 0 # Found
+ done
+ return 1 # Not Found
+}
+
+script_lock() {
+ local LOCKDIR="$TMPDIR/.scriptlock.$(basename $0)"
+ if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
+ local _owner="$(stat -c %U $LOCKDIR)"
+ error "Script $(basename $0) is already locked by $_owner."
+ exit 1
+ else
+ set_umask
+ return 0
+ fi
+}
+
+script_unlock() {
+ local LOCKDIR="$TMPDIR/.scriptlock.$(basename $0)"
+ if [ ! -d "$LOCKDIR" ]; then
+ warning "Script $(basename $0) was not locked!"
+ restore_umask
+ return 1
+ else
+ rmdir "$LOCKDIR"
+ restore_umask
+ return 0
+ fi
+}
+
+cleanup() {
+ local l
+ local repo
+ local arch
+
+ trap - EXIT INT QUIT TERM
+ for l in ${LOCKS[@]}; do
+ repo=${l%.*}
+ arch=${l#*.}
+ if [ -d "$TMPDIR/.repolock.$repo.$arch" ]; then
+ msg "Removing left over lock from [${repo}] (${arch})"
+ repo_unlock $repo $arch
+ fi
+ done
+ if [ -d "$TMPDIR/.scriptlock.$(basename $0)" ]; then
+ msg "Removing left over lock from $(basename $0)"
+ script_unlock
+ fi
+ rm -rf "$WORKDIR"
+ [ "$1" ] && exit $1
+}
+
+abort() {
+ msg 'Aborting...'
+ cleanup 0
+}
+
+die() {
+ error "$*"
+ cleanup 1
+}
+
+trap abort INT QUIT TERM HUP
+trap cleanup EXIT
+
+
+#repo_lock <repo-name> <arch> [timeout]
+repo_lock () {
+ local LOCKDIR="$TMPDIR/.repolock.$1.$2"
+ local LOCKFILE="${FTP_BASE}/${1}/os/${2}/${1}${DBEXT}.lck"
+ local _count
+ local _trial
+ local _timeout
+ local _lockblock
+ local _owner
+
+ # This is the lock file used by repo-add and repo-remove
+ if [ -f "${LOCKFILE}" ]; then
+ error "Repo [${1}] (${2}) is already locked by repo-{add,remove} process $(cat $LOCKFILE)"
+ return 1
+ fi
+
+ if [ $# -eq 2 ]; then
+ _lockblock=true
+ _trial=0
+ elif [ $# -eq 3 ]; then
+ _lockblock=false
+ _timeout=$3
+ let _trial=$_timeout/$LOCK_DELAY
+ fi
+
+ _count=0
+ while [ $_count -le $_trial ] || $_lockblock ; do
+ if ! mkdir "$LOCKDIR" >/dev/null 2>&1 ; then
+ _owner="$(stat -c %U $LOCKDIR)"
+ warning "Repo [${1}] (${2}) is already locked by $_owner. "
+ msg2 "Retrying in $LOCK_DELAY seconds..."
+ else
+ LOCKS[${#LOCKS[*]}]="$1.$2"
+ set_umask
+ return 0
+ fi
+ sleep $LOCK_DELAY
+ let _count=$_count+1
+ done
+
+ error "Repo [${1}] (${2}) is already locked by $_owner. Giving up!"
+ return 1
+}
+
+repo_unlock () { #repo_unlock <repo-name> <arch>
+ local LOCKDIR="$TMPDIR/.repolock.$1.$2"
+ if [ ! -d "$LOCKDIR" ]; then
+ warning "Repo lock [${1}] (${2}) was not locked!"
+ restore_umask
+ return 1
+ else
+ rmdir "$LOCKDIR"
+ restore_umask
+ return 0
+ fi
+}
+
+# usage: _grep_pkginfo pkgfile pattern
+_grep_pkginfo() {
+ local _ret
+
+ _ret="$(bsdtar -xOqf "$1" .PKGINFO | /bin/grep -m 1 "^${2} = ")"
+ echo "${_ret#${2} = }"
+}
+
+
+# Get the package base or name as fallback
+getpkgbase() {
+ local _base
+
+ _base="$(_grep_pkginfo "$1" "pkgbase")"
+ if [ -z "$_base" ]; then
+ getpkgname "$1"
+ else
+ echo "$_base"
+ fi
+}
+
+issplitpkg() {
+ local _base
+
+ _base="$(_grep_pkginfo "$1" "pkgbase")"
+ if [ -z "$_base" ]; then
+ return 1
+ else
+ return 0
+ fi
+}
+
+# Get the package name
+getpkgname() {
+ local _name
+
+ _name="$(_grep_pkginfo "$1" "pkgname")"
+ if [ -z "$_name" ]; then
+ error "Package '$1' has no pkgname in the PKGINFO. Fail!"
+ exit 1
+ fi
+
+ echo "$_name"
+}
+
+# Get the pkgver-pkgrel of this package
+getpkgver() {
+ local _ver
+
+ _ver="$(_grep_pkginfo "$1" "pkgver")"
+ if [ -z "$_ver" ]; then
+ error "Package '$1' has no pkgver in the PKGINFO. Fail!"
+ exit 1
+ fi
+
+ echo "$_ver"
+}
+
+getpkgarch() {
+ local _ver
+
+ _ver="$(_grep_pkginfo "$1" "arch")"
+ if [ -z "$_ver" ]; then
+ error "Package '$1' has no arch in the PKGINFO. Fail!"
+ exit 1
+ fi
+
+ echo "$_ver"
+}
+
+getpkgfile() {
+ if [[ ${#} -ne 1 ]]; then
+ error 'No canonical package found!'
+ exit 1
+ elif [ ! -f "${1}" ]; then
+ error "Package ${1} not found!"
+ exit 1
+ fi
+
+ echo ${1}
+}
+
+getpkgfiles() {
+ local f
+ if [ ! -z "$(echo ${@%\.*} | sed "s/ /\n/g" | sort | uniq -D)" ]; then
+ error 'Duplicate packages found!'
+ exit 1
+ fi
+
+ for f in ${@}; do
+ if [ ! -f "${f}" ]; then
+ error "Package ${f} not found!"
+ exit 1
+ fi
+ done
+
+ echo ${@}
+}
+
+check_pkgfile() {
+ local pkgfile=$1
+
+ local pkgname="$(getpkgname ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgver="$(getpkgver ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgarch="$(getpkgarch ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+
+ in_array "${pkgarch}" ${ARCHES[@]} 'any' || return 1
+
+ if echo "$(basename ${pkgfile})" | grep -q "${pkgname}-${pkgver}-${pkgarch}"; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+check_pkgsvn() {
+ local pkgfile="${1}"
+ local _pkgbase="$(getpkgbase ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local _pkgname="$(getpkgname ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local _pkgver="$(getpkgver ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local _pkgarch="$(getpkgarch ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local repo="${2}"
+
+ in_array "${repo}" ${PKGREPOS[@]} || return 1
+
+ if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
+ mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
+ svn export -q "${SVNREPO}/${_pkgbase}/repos/${repo}-${_pkgarch}/PKGBUILD" \
+ "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null
+ [ $? -ge 1 ] && return 1
+ fi
+
+ local svnver="$(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo "${pkgver}-${pkgrel}")"
+ [ "${svnver}" == "${_pkgver}" ] || return 1
+
+ local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
+ in_array "${_pkgname}" ${svnnames[@]} || return 1
+
+ return 0
+}
+
+check_splitpkgs() {
+ local repo="${1}"
+ shift
+ local pkgfiles=(${@})
+ local pkgfile
+ local pkgdir
+ local svnname
+
+ mkdir -p "${WORKDIR}/check_splitpkgs/"
+ pushd "${WORKDIR}/check_splitpkgs" >/dev/null
+
+ for pkgfile in ${pkgfiles[@]}; do
+ issplitpkg "${pkgfile}" || continue
+ local _pkgbase="$(getpkgbase ${pkgfile})"
+ msg2 "Checking $_pkgbase"
+ local _pkgname="$(getpkgname ${pkgfile})"
+ local _pkgarch="$(getpkgarch ${pkgfile})"
+ mkdir -p "${repo}/${_pkgarch}/${_pkgbase}"
+ echo "${_pkgname}" >> "${repo}/${_pkgarch}/${_pkgbase}/staging"
+
+ if [ ! -f "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" ]; then
+ mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}"
+
+ cp -r ${SVNREPO}/$repo/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null 2>&1 || \
+ cp -r ${SVNREPO}/libre/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}" >/dev/null 2>&1 || \
+ cp -r ${SVNREPO}/libre-testing/$_pkgbase/PKGBUILD "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/$_pkgbase">/dev/null 2>&1
+
+ [[ $? -ge 1 ]] && {
+ echo "Failed $_pkgbase-$_pkgver-$_pkgarch"
+ return 1
+ }
+ fi
+
+ local svnnames=($(. "${WORKDIR}/pkgbuilds/${repo}-${_pkgarch}/${_pkgbase}"; echo ${pkgname[@]}))
+ for svnname in ${svnnames[@]}; do
+ echo "${svnname}" >> "${repo}/${_pkgarch}/${_pkgbase}/svn"
+ done
+ done
+ popd >/dev/null
+
+ for pkgdir in "${WORKDIR}/check_splitpkgs/${repo}"/*/*; do
+ [ ! -d "${pkgdir}" ] && continue
+ sort -u "${pkgdir}/staging" -o "${pkgdir}/staging"
+ sort -u "${pkgdir}/svn" -o "${pkgdir}/svn"
+ if [ ! -z "$(comm -13 "${pkgdir}/staging" "${pkgdir}/svn")" ]; then
+ return 1
+ fi
+ done
+
+ return 0
+}
+
+check_pkgrepos() {
+ local pkgfile=$1
+
+ local pkgname="$(getpkgname ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgver="$(getpkgver ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+ local pkgarch="$(getpkgarch ${pkgfile})"
+ [ $? -ge 1 ] && return 1
+
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
+ [ -f "${FTP_BASE}/${PKGPOOL}/$(basename ${pkgfile})" ] && return 1
+
+ local repo
+ local arch
+ for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkgname}-${pkgver}-${pkgarch}"${PKGEXT} ] && return 1
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/$(basename ${pkgfile})" ] && return 1
+ done
+ done
+
+ return 0
+}
+
+#usage: chk_license ${license[@]}"
+chk_license() {
+ local l
+ for l in ${@}; do
+ in_array ${l} ${ALLOWED_LICENSES[@]} && return 0
+ done
+
+ return 1
+}
+
+check_repo_permission() {
+ local repo=$1
+
+ [ ${#PKGREPOS[@]} -eq 0 ] && return 1
+ [ -z "${PKGPOOL}" ] && return 1
+
+ in_array "${repo}" ${PKGREPOS[@]} || return 1
+
+ [ -w "$FTP_BASE/${PKGPOOL}" ] || return 1
+
+ local arch
+ for arch in ${ARCHES}; do
+ local dir="${FTP_BASE}/${repo}/os/${arch}/"
+ [ -w "${dir}" ] || return 1
+ [ -f "${dir}"${repo}${DBEXT} -a ! -w "${dir}"${repo}${DBEXT} ] && return 1
+ done
+
+ return 0
+}
+
+set_repo_permission() {
+ local repo=$1
+ local arch=$2
+ local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+
+ if [ -w "${dbfile}" ]; then
+ local group=$(stat --printf='%G' "$(dirname "${dbfile}")")
+ chgrp $group "${dbfile}" || error "Could not change group of ${dbfile} to $group"
+ chmod g+w "${dbfile}" || error "Could not set write permission for group $group to ${dbfile}"
+ else
+ error "You don't have permission to change ${dbfile}"
+ fi
+}
+
+arch_repo_add() {
+ local repo=$1
+ local arch=$2
+ local pkgs=(${@:3})
+
+ # package files might be relative to repo dir
+ pushd "${FTP_BASE}/${repo}/os/${arch}" >/dev/null
+ repo-add -q "${repo}${DBEXT}" ${pkgs[@]} >/dev/null \
+ || error "repo-add ${repo}${DBEXT} ${pkgs[@]}"
+ popd >/dev/null
+ set_repo_permission "${repo}" "${arch}"
+}
+
+arch_repo_remove() {
+ local repo=$1
+ local arch=$2
+ local pkgs=(${@:3})
+ local dbfile="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+
+ if [ ! -f "${dbfile}" ]; then
+ error "No database found at '${dbfile}'"
+ return 1
+ fi
+ repo-remove -q "${dbfile}" ${pkgs[@]} >/dev/null \
+ || error "repo-remove ${dbfile} ${pkgs[@]}"
+ set_repo_permission "${repo}" "${arch}"
+}
diff --git a/filter.py b/filter.py
index 668822b..1d70a63 100644..100755
--- a/filter.py
+++ b/filter.py
@@ -4,6 +4,18 @@ from glob import glob
from repm.config import *
from repm.pato2 import *
+def listado(filename,start=0,end=None):
+ """Obtiene una lista de paquetes de un archivo."""
+ fsock = open(filename,"r")
+ lista = fsock.read().split("\n")
+ fsock.close()
+ if end is not None:
+ return [pkg.split(":")[start:end].rstrip()
+ for pkg in lista if pkg]
+ else:
+ return [pkg.split(":")[start].rstrip()
+ for pkg in lista if pkg]
+
def pkginfo_from_filename(filename):
""" Generates a Package object with info from a filename,
filename can be relative or absolute
@@ -26,7 +38,7 @@ def pkginfo_from_filename(filename):
pkg["name"] = "-".join(fileattrs)
return pkg
-def pkginfo_from_desc(filename):
+def pkginfo_from_desc(info_from_desc, pkg=Package()):
""" Returns pkginfo from desc file.
Parameters:
@@ -36,14 +48,7 @@ def pkginfo_from_desc(filename):
Returns:
----------
pkg -> Package object"""
- if not os.path.isfile(filename):
- raise NonValidFile
- try:
- f=open(filename)
- info=f.read().rsplit()
- finally:
- f.close()
- pkg = Package()
+ info=info_from_desc.rsplit()
info_map={"name" :("%NAME%" , None),
"version" :("%VERSION%" , 0 ),
"release" :("%VERSION%" , 1 ),
@@ -116,10 +121,38 @@ def pkginfo_from_files_in_dir(directory):
return tuple(package_list)
def pkginfo_from_db(path_to_db):
- """ """
+ """ Get pkginfo from db.
+
+ Parameters:
+ ----------
+ path_to_db -> str Path to file
-def generate_exclude_list_from_blacklist(packages_iterable, blacklisted_names,
- exclude_file=rsync_blacklist, debug=verbose):
+ Output:
+ ----------
+ package_list -> tuple of Package objects"""
+ package_list=list()
+
+ if not os.path.isfile(path_to_db):
+ raise NonValidFile(path_to_db + " is not a file")
+
+ try:
+ dbsock = tarfile.open(path_to_db, 'r:gz')
+ desc_files=[desc for desc in db_open_tar.getnames()
+ if "/desc" in desc]
+ for name in desc_files:
+ desc=dbsock.extractfile(name)
+ package_list.append(pkginfo_from_desc(desc.read()))
+ except tarfile.ReadError:
+ raise NonValidFile("No valid db_file %s or not readable"
+ % path_to_db)
+ return(tuple())
+ finally:
+ db_open_tar.close()
+ return package_list
+
+def rsyncBlacklist_from_blacklist(packages_iterable,
+ blacklisted_names,
+ exclude_file=config["rsync_blacklist"]):
""" Generate an exclude list for rsync
Parameters:
@@ -132,26 +165,23 @@ def generate_exclude_list_from_blacklist(packages_iterable, blacklisted_names,
Output:
----------
None """
- a=list()
-
- for package in packages_iterable:
- if not isinstance(package, Package):
- raise ValueError(" %s is not a Package object " % package)
- if package["name"] in blacklisted_names:
- a.append(package["location"])
+ pkgs=[pkg["location"] for pkg in packages_iterable
+ if isinstance(pkg, Package)
+ and pkg["name"] in blacklisted_names]
- if debug:
- return a
try:
fsock = open(exclude_file,"w")
- try:
- fsock.write("\n".join(a))
- finally:
- fsock.close()
+ fsock.write("\n".join(pkgs) + "\n")
except IOError:
- printf("%s wasnt written" % blacklist_file)
+ printf("%s wasnt written" % exclude_file)
+ exit(1)
+ finally:
+ fsock.close()
+ return pkgs
+
if __name__ == "__main__":
- a=run_rsync(rsync_list_command)
+ cmd=generate_rsync_command(rsync_list_command)
+ a=run_rsync(cmd)
packages=pkginfo_from_rsync_output(a)
- generate_exclude_list_from_blacklist(packages,listado(blacklist))
+ rsyncBlaclist_from_blacklist(packages,listado(blacklist))
diff --git a/get_license.sh b/get_license.sh
index a7241a1..0da58cb 100755
--- a/get_license.sh
+++ b/get_license.sh
@@ -31,12 +31,12 @@ rm -rf $dir/*
tempdir=$(mktemp -d)
cd $tempdir
-a=($(cut -d: -f1 $docs/pending*.txt))
-echo ${a[@]}
+pending=($(cut -d: -f1 $docs/pending*.txt))
+echo ${pending[@]}
-for x in ${a[@]}; do
- b=( $(ls $repo/*/os/*/$x*) )
- for y in ${b[@]}; do
+for pkg in ${pending[@]}; do
+ pkg_in_repo=( $(ls ${repo}/*/os/*/${pkg}*) )
+ for y in ${pkg_in_repo[@]}; do
echo "chmod +r $y"
chmod +r $y
echo "tar -xf $y usr/share/licenses"
diff --git a/libremessages b/libremessages
new file mode 100755
index 0000000..9fbbc2b
--- /dev/null
+++ b/libremessages
@@ -0,0 +1,77 @@
+# Copyright (c) 2006-2010 Pacman Development Team <pacman-dev@archlinux.org>
+# Copyright (c) 2002-2006 by Judd Vinet <jvinet@zeroflux.org>
+# Copyright (c) 2005 by Aurelien Foret <orelien@chez.com>
+# Copyright (c) 2006 by Miklos Vajna <vmiklos@frugalware.org>
+# Copyright (c) 2005 by Christian Hamar <krics@linuxforum.hu>
+# Copyright (c) 2006 by Alex Smith <alex@alex-smith.me.uk>
+# Copyright (c) 2006 by Andras Voroskoi <voroskoi@frugalware.org>
+# Copyright (c) 2011 by Joshua Haase <hahj87@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+# gettext initialization
+export TEXTDOMAIN='libretools'
+export TEXTDOMAINDIR='/usr/share/locale'
+
+# check if messages are to be printed using color
+unset ALL_OFF BOLD BLUE GREEN RED YELLOW
+
+if tput setaf 0 &>/dev/null; then
+ ALL_OFF="$(tput sgr0)"
+ BOLD="$(tput bold)"
+ BLUE="${BOLD}$(tput setaf 4)"
+ GREEN="${BOLD}$(tput setaf 2)"
+ RED="${BOLD}$(tput setaf 1)"
+ YELLOW="${BOLD}$(tput setaf 3)"
+ PURPLE="${ALL_OFF}$(tput setaf 5)"
+else
+ ALL_OFF="\033[1;0m"
+ BOLD="\033[1;1m"
+ BLUE="${BOLD}\033[1;34m"
+ GREEN="${BOLD}\033[1;32m"
+ RED="${BOLD}\033[1;31m"
+ YELLOW="${BOLD}\033[1;33m"
+ PURPLE="${BOLD}\033[1;30;40m"
+fi
+
+stdnull() {
+ local action=$1;
+ eval "${action} >/dev/null 2>&1"
+}
+
+plain() {
+ local mesg=$1; shift
+ printf "${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+msg() {
+ local mesg=$1; shift
+ printf "${GREEN}==>${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+msg2() {
+ local mesg=$1; shift
+ printf "${BLUE} ->${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+warning() {
+ local mesg=$1; shift
+ printf "${YELLOW}==> $(gettext "WARNING:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
+error() {
+ local mesg=$1; shift
+ printf "${RED}==> $(gettext "ERROR:")${ALL_OFF}${BOLD} ${mesg}${ALL_OFF}\n" "$@" >&2
+}
+
diff --git a/local_config.example b/local_config.example
new file mode 100644
index 0000000..8015ee2
--- /dev/null
+++ b/local_config.example
@@ -0,0 +1,35 @@
+# Mirror options
+mirror="mirrors.eu.kernel.org"
+mirrorpath="::mirrors/archlinux"
+
+# Directories: they should end without /
+## Optionals
+paraboladir=~/parabolagnulinux.org
+logtime=$(date -u +%Y%m%d-%H:%M)
+## Must be defined
+tempdir=~/tmp
+archdb=${tempdir}/db
+docs_dir=${paraboladir}/docs
+repodir=${paraboladir}/repo
+# End Directories
+
+# Files
+logname=${paraboladir}/log/${logtime}-repo-maintainer.log
+blacklist=${docs_dir}/blacklist.txt
+whitelist=${docs_dir}/whitelist.txt
+pending=${docs_dir}/pending
+rsync_blacklist=${docs_dir}/rsyncBlacklist
+
+# Repos, arches, and dirs for repo
+repo_list="core:extra:community:testing:community-testing:multilib"
+dir_list="pool"
+arch_list="i686:x86_64"
+other="any"
+
+# Output options
+output="True"
+debug="False"
+
+# Rsync commands
+rsync_update_command="rsync -av --delay-updates --exclude='*.{abs|db}.tar.*' "
+rsync_post_command="rsync -av --delete --exclude='*.abs.tar.*' " \ No newline at end of file
diff --git a/main.sh b/main.sh
new file mode 100755
index 0000000..9f41a95
--- /dev/null
+++ b/main.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+# -*- coding: utf-8 -*-
+
+source config.sh
+
+function mkrsexclude {
+ local error=1
+ while ${error}; do
+ run_python_cmd "filter.py"
+ error=$?
+ done
+}
+
+msg "Cleaning $tempdir"
+stdnull "rm -r $tempdir/* "
+
+msg "Generating exclude list for rsync"
+mkrsexclude
+
+msg "Syncing repos without delete"
+# rsync_update_command does not sync db or abs
+${rsync_update_command} --exclude-from=${rsync_blacklist} \
+ ${mirror}${mirropath}/{$(echo ${repo_list} | tr ':' ','),\
+ $(echo ${dir_list} | tr ':' ',')} ${repodir}
+
+msg "Syncing each repo and cleaning"
+msg2 "Remove pending files"
+stdnull "rm -rf ${pending}*"
+for repo in $(echo ${repo_list} | tr ':' ' '); do
+ for arch in $(echo ${arch_list} | tr ':' ' '); do
+ msg2 "Syncing ${repo} ${arch}"
+ ${rsync_post_command} --exclude-from=${rsync_blacklist} \
+ ${mirror}${mirropath}/${repo} ${repodir}/${repo}
+ msg2 "Cleaning ${repo} ${arch}"
+ # This also generates pending lists
+ run_python_cmd "clean_repo.py -b ${repodir}/${repo}/os/${arch}/${repo}.db.tar.gz -d ${repodir}/${repo}/os/${arch}/"
+ done
+done
+
+msg "Checking licenses"
+get_license.sh
diff --git a/pato2.py b/pato2.py
index 0d77d6b..6daa8b8 100644
--- a/pato2.py
+++ b/pato2.py
@@ -27,159 +27,15 @@
from repm.config import *
from repm.filter import *
import tarfile
-from glob import glob
from os.path import isdir, isfile
-def printf(text,output_=output):
- """Guarda el texto en la variable log y puede imprimir en pantalla."""
- log_file = open(logname, 'a')
- log_file.write("\n" + str(text) + "\n")
- log_file.close()
- if output_: print (str(text) + "\n")
-
-def listado(filename_):
- """Obtiene una lista de paquetes de un archivo."""
- archivo = open(filename_,"r")
- lista = archivo.read().split("\n")
- archivo.close()
- return [pkg.split(":")[0].rstrip() for pkg in lista if pkg]
-
-def db(repo_,arch_):
- """Construye un nombre para sincronizar una base de datos."""
- return "/%s/os/%s/%s.db.tar.gz" % (repo_, arch_, repo_)
-
-def packages(repo_, arch_, expr="*"):
- """ Get packages on a repo, arch folder """
- return tuple( glob( repodir + "/" + repo_ + "/os/" + arch_ + "/" + expr ) )
-
-def sync_all_repo(debug=verbose):
- cmd=generate_rsync_command(rsync_list_command)
- rsout=run_rsync(cmd)
- pkgs=pkginfo_from_rsync_output(rsout)
- generate_exclude_list_from_blacklist(pkgs,listado(blacklist),debug=False)
- cmd=generate_rsync_command(rsync_update_command,blacklist_file=rsync_blacklist)
- a=run_rsync(cmd)
- cmd=generate_rsync_command(rsync_post_command,blacklist_file=rsync_blacklist)
- b=run_rsync(cmd)
- if debug:
- printf(a)
- printf(b)
-
-def get_from_desc(desc, var,db_tar_file=False):
- """ Get a var from desc file """
- desc = desc.split("\n")
- return desc[desc.index(var)+1]
-
-def get_info(repo_,arch_,db_tar_file=False,verbose_=verbose):
- """ Makes a list of package name, file and license """
- info=list()
- # Extract DB tar.gz
- commands.getoutput("mkdir -p " + archdb)
- if not db_tar_file:
- db_tar_file = repodir + db(repo_,arch_)
- if isfile(db_tar_file):
- try:
- db_open_tar = tarfile.open(db_tar_file, 'r:gz')
- except tarfile.ReadError:
- printf("No valid db_file %s" % db_tar_file)
- return(tuple())
- else:
- printf("No db_file %s" % db_tar_file)
- return(tuple())
- for file in db_open_tar.getmembers():
- db_open_tar.extract(file, archdb)
- db_open_tar.close()
- # Get info from file
- for dir_ in glob(archdb + "/*"):
- if isdir(dir_) and isfile(dir_ + "/desc"):
- pkg_desc_file = open(dir_ + "/desc", "r")
- desc = pkg_desc_file.read()
- pkg_desc_file.close()
- info.append(( get_from_desc(desc,"%NAME%"),
- dir_.split("/")[-1],
- get_from_desc(desc,"%LICENSE%") ))
- if verbose_: printf(info)
- commands.getoutput("rm -r %s/*" % archdb)
- return tuple(info)
-
-def make_pending(repo_,arch_,info_):
- """ Si los paquetes no están en blacklist ni whitelist y la licencia contiene "custom" los agrega a pending"""
- search = tuple( listado(blacklist) + listado (whitelist) )
- if verbose: printf("blaclist + whitelist= " + str(search) )
- lista_=list()
- for (name,pkg_,license_) in info_:
- if "custom" in license_:
- if name not in search:
- lista_.append( (name, license_ ) )
- elif not name:
- printf( pkg_ + " package has no %NAME% attibute " )
- if verbose: printf( lista_ )
- a=open( pending + "-" + repo_ + ".txt", "w" ).write(
- "\n".join([name + ":" + license_ for (name,license_) in lista_]) + "\n")
-
-def remove_from_blacklist(repo_,arch_,info_,blacklist_):
- """ Check the blacklist and remove packages on the db"""
- lista_=list()
- pack_=list()
- for (name_, pkg_, license_) in info_:
- if name_ in blacklist_:
- lista_.append(name_)
- for p in packages(repo_,arch_,pkg_ + "*"):
- pack_.append(p)
- if lista_:
- lista_=" ".join(lista_)
- com_ = "repo-remove " + repodir + db(repo_,arch_) + " " + lista_
- printf(com_)
- a = commands.getoutput(com_)
- if verbose: printf(a)
-
-def cleanup_nonfree_in_dir(directory,blacklisted_names):
- pkgs=pkginfo_from_files_in_dir(directory)
- for package in pkgs:
- if package["name"] in blacklisted_names:
- os.remove(package["location"])
-
-def link(repo_,arch_,file_):
- """ Makes a link in the repo for the package """
- cmd_="ln -f " + file_ + " " + repodir + "/" + repo_ + "/os/" + arch_
- a=commands.getoutput(cmd_)
- if verbose:
- printf(cmd_ + a)
-
-def add_free_repo(verbose_=verbose):
- cmd_=os.path.join(home,"/usr/bin/sync-free")
- printf(cmd_)
- a=commands.getoutput(cmd_)
- if verbose_: printf(a)
- for repo_ in repo_list:
- for arch_ in arch_list:
- lista_=list()
- for file_ in glob(freedir + repo_ + "/os/" + arch_ + "/*.pkg.tar.*"):
- lista_.append(file_)
- for dir_ in other:
- for file_ in glob(freedir + repo_ + "/os/" + dir_ + "/*.pkg.tar.*"):
- lista_.append(file_)
-
- printf(lista_)
-
- if lista_:
- lista_=" ".join(lista_)
- if verbose: printf(lista_)
- cmd_="repo-add " + repodir + db(repo_,arch_) + " " + lista_
- printf(cmd_)
- a=commands.getoutput(cmd_)
- if verbose: printf(a)
-
-def get_licenses(verbose_=verbose):
- """ Extract the license from packages in repo_,arch_ and in pending_ file"""
- cmd_=home + "/usr/bin/get_license.sh"
- printf(cmd_)
- a=commands.getoutput(cmd_)
- if verbose_: printf(a)
-
-def generate_rsync_command(base_command, dir_list=(repo_list + dir_list), destdir=repodir,
- source=mirror+mirrorpath, blacklist_file=False):
- """ Generates an rsync command for executing it by combining all parameters.
+def generate_rsync_command(base_command,
+ dir_list=(config["repo_list"] +
+ config["dir_list"]),
+ destdir=config["repodir"],
+ source=config["mirror"] +config["mirrorpath"]):
+ """ Generates an rsync command for executing
+ it by combining all parameters.
Parameters:
----------
@@ -192,57 +48,16 @@ def generate_rsync_command(base_command, dir_list=(repo_list + dir_list), destdi
Return:
----------
rsync_command -> str """
- from os.path import isfile, isdir
-
- if blacklist_file and not isfile(blacklist_file):
- print(blacklist_file + " is not a file")
- raise NonValidFile
-
if not os.path.isdir(destdir):
print(destdir + " is not a directory")
raise NonValidDir
dir_list="{" + ",".join(dir_list) + "}"
+ return " ".join((base_command, os.path.join(source, dir_list),
+ destdir))
- if blacklist_file:
- return " ".join((base_command, "--exclude-from="+blacklist_file,
- os.path.join(source, dir_list), destdir))
- return " ".join((base_command, os.path.join(source, dir_list), destdir))
-
-def run_rsync(command,debug=verbose):
+def run_rsync(command,debug=config["debug"]):
""" Runs rsync and gets returns it's output """
if debug:
printf("rsync_command: " + command)
- return commands.getoutput(command)
-
-if __name__ == "__main__":
- from time import time
- start_time = time()
- def minute():
- return str(round((time() - start_time)/60, 1))
-
- printf(" Cleaning %s folder " % (tmp) )
- commands.getoutput("rm -r %s/*" % tmp)
- printf(" Syncing repo")
- sync_all_repo(True)
-
- printf(" Updating databases and pending files lists: minute %s \n" % minute() )
- for repo in repo_list:
- for arch in arch_list:
- printf( "\n" + repo + "-" + arch + "\n" )
- printf( "Get info: minute %s " % minute() )
- info=get_info(repo,arch)
- printf( "Make pending: minute %s" % minute() )
- make_pending(repo,arch,info)
- printf( "Update DB: minute %s" % minute() )
- remove_from_blacklist(
- repo, arch, info, tuple( listado(blacklist) + listado(pending + "-" + repo + ".txt") ) )
-
- printf("Adding Parabola Packages: minute %s\n" % minute() )
- add_free_repo(True)
-
- printf("Extracting licenses in pending: minute %s" % minute() )
- get_licenses()
-
- printf("\n\nDelay: %s minutes \n" % minute())
-
+ return check_output(command.split())
diff --git a/test/test_filter.py b/test/test_filter.py
index 1906b87..b6d5766 100644
--- a/test/test_filter.py
+++ b/test/test_filter.py
@@ -142,7 +142,8 @@ class generateRsyncBlacklist(unittest.TestCase):
self.assertEqual(listado("blacklist_sample"),["alex","gmime22"])
def testExcludeFiles(self):
- a=generate_exclude_list_from_blacklist(self.example_package_list,listado("blacklist_sample"),debug=True)
+ a=rsyncBlacklist_from_blacklist(self.example_package_list,
+ listado("blacklist_sample"))
b=[self.example_package_list[0]["location"],self.example_package_list[2]["location"]]
self.assertEqual(a,b)
@@ -155,7 +156,9 @@ class pkginfo_from_descKnownValues(unittest.TestCase):
"license" : "GPL",
"location": "binutils-2.21-4-x86_64.pkg.tar.xz",
"depends" : False,}
- pkggen=pkginfo_from_desc("desc")
+ fsock=open("desc")
+ pkggen=pkginfo_from_desc(fsock.read())
+ fsock.close()
def testPkginfoFromDesc(self):
if self.pkggen is None:
self.fail("return value is None")
@@ -169,21 +172,21 @@ class pkginfo_from_db(unittest.TestCase):
"release" : "2",
"arch" : "x86_64",
"license" : ("LGPL",),
- "location": "acl-2.2.49-2-x86_64.pkg.tar.xz"
+ "location": "acl-2.2.49-2-x86_64.pkg.tar.xz",
"depends" : ("attr>=2.4.41"),}
example_package_list[1].package_info={ "name" : "glibc",
"version" : "2.13",
"release" : "4",
"arch" : "x86_64",
"license" : ("GPL","LGPL"),
- "location": "glibc-2.13-4-x86_64.pkg.tar.xz"
+ "location": "glibc-2.13-4-x86_64.pkg.tar.xz",
"depends" : ("linux-api-headers>=2.6.37","tzdata",),}
example_package_list[2].package_info={ "name" : "",
"version" : "2.2.26",
"release" : "1",
"arch" : "x86_64",
"license" : False,
- "location": ""
+ "location": "",
"depends" : False,}