...
 
Commits (48)
......@@ -143,32 +143,31 @@ upload_files() {
"${files[@]}" $_ssh_user@$_rsync_server:/srv/www/rsync.chakralinux.org/packages/$repo/$(get_arch)/
}
# downloads and signs the databases to the temporary folder
# upload the new files to the server and cleanup the temp folder
# remotely sign the database
# TODO: check the agent socket is passed and working
sign_online_database() {
# downloads (and eventually signs) the databases to the temporary folder
status_start "downloading and signing the database"
dir=$(mktemp -d)
wget -q $_rsync_server/packages/$1/$(get_arch)/$1.db.tar.xz -O "$dir/$1.db.tar.xz"
wget -q $_rsync_server/packages/$1/$(get_arch)/$1.db.tar.gz -O "$dir/$1.db.tar.gz"
gpg --batch --detach-sign "$dir/$1.db.tar.xz"
gpg --batch --detach-sign "$dir/$1.db.tar.gz"
# remove current signature file and remotely execute gpg --detach-sign command to sign the databases
# enable verbose to indicate the procedure
local _remote_socket=$(ssh $_ssh_user@$_rsync_server -p $_ssh_port gpgconf --list-dir agent-socket)
local _local_socket=$(gpgconf --list-dir agent-socket)
status_start "Remotely signing database: $1\n"
for ext in gz xz; do
ssh -R $_remote_socket:$_local_socket $_ssh_user@$_rsync_server -p $_ssh_port \
gpg --batch --yes --detach-sign packages/$1/$(get_arch)/$1.db.tar.$ext
done
newline
status_done
upload_files $1 "$dir/"*.db.tar.*
rm -r "$dir"
}
sign_online_pkg() {
# downloads (and eventually signs) the databases to the temporary folder
status_start "downloading and signing the pkg " $2
dir=$(mktemp -d)
wget -q $_rsync_server/packages/$1/$(get_arch)/$2 -O "$dir/$2"
gpg --batch --detach-sign "$dir/$2"
# remotely execute gpg --detach-sign command to sign the package
local _remote_socket=$(ssh $_ssh_user@$_rsync_server -p $_ssh_port gpgconf --list-dir agent-socket)
local _local_socket=$(gpgconf --list-dir agent-socket)
status_start "Remotely signing pkg: $1/$2 \n"
ssh -R $_remote_socket:$_local_socket $_ssh_user@$_rsync_server -p $_ssh_port \
gpg -v --batch --yes --detach-sign "packages/$1/$(get_arch)/$2"
newline
status_done
upload_files $1 "$dir/"*.sig*
rm -r "$dir"
}
# copies the files from the temporary folder to a given folder and performs a repo-clean there
......
......@@ -47,6 +47,12 @@ warning() {
printf "\033[1;33m ::\033[1;0m\033[1;0m $*\033[1;0m\n"
}
bold() {
bold=$(tput bold)
normal=$(tput sgr0)
printf "${bold}$1${normal}"
}
error() {
printf "\033[1;31m ::\033[1;0m\033[1;0m $*\033[1;0m\n"
}
......
......@@ -153,7 +153,7 @@ if [ "$_args" = "" ] ; then
error " and a source repo, both target and source args are optional."
error " single names like «attica» or simple regexp like ^kde are allowed"
error " you can also provide a comma separated list like kde,calligra."
error " syntax: move.sh <pattern> <source repo> <target repo>"
error " syntax: move.sh <pattern1,pattern2,...> <source repo> <target repo>"
newline
exit 1
fi
......
......@@ -56,7 +56,7 @@ done
get_upload_list() {
# noglob because apparently it is very difficult to generate find arguments and quote them properly
upload_list=( $(set -o noglob; find ~/_repo/ -name '*.pkg.tar.*' \( $(find_args ${args[@]}) \) -print) )
upload_list=( $(set -o noglob; find ~/_repo/$CHROOT -name '*.pkg.tar.*' \( $(find_args ${args[@]}) \) -print) )
if [[ -z "$upload_list" ]] ; then
error "No packages found in «_repo/$CHROOT», there's nothing to upload"
exit 1
......@@ -141,7 +141,7 @@ confirm_targets() {
local cnt_sig=$(echo "${pkgs[@]}" | tr ' ' '\n' | grep '.sig$' -c)
newline
warning "You are about to upload ($cnt) packages and ($cnt_sig) signatures to «$repo-$(get_arch)»:"
warning "You are about to upload ($cnt) packages and ($cnt_sig) signatures to «$(bold "$repo")-$(get_arch)»:"
echo "${pkgs[@]}" | tr ' ' '\n'
newline
......@@ -166,7 +166,7 @@ rm_old_pkgs() {
local -a duplicates
local -a repeated=( $(printf "%s\n" "${pkgnames[@]}" | sort | uniq --repeated) )
for p in ${repeated[@]}; do
local -a pkg=( $(ls /chakra/_repo/$repo/$p*z) )
local -a pkg=( $(ls -v /chakra/_repo/$CHROOT/$p*z) )
# drop last element
unset pkg[${#pkg[@]}-1]
......@@ -179,7 +179,7 @@ rm_old_pkgs() {
local -a pkgname_sigs=( ${filename_sigs[@]%-*-*-*} )
local -a repeated_sigs=( $(printf "%s\n" "${pkgname_sigs[@]}" | sort | uniq --repeated) )
for p in ${repeated_sigs[@]}; do
local -a sign=( $(ls /chakra/_repo/*/$p*sig) )
local -a sign=( $(ls -v /chakra/_repo/$CHROOT/$p*sig) )
# drop last element
unset sign[${#sign[@]}-1]
......@@ -193,7 +193,7 @@ rm_old_pkgs() {
newline
warning "There are old version of the following package(s) in the local repository:"
ls /chakra/_repo/*/${repeated[@]}*
(cd /chakra/_repo/$CHROOT/ && ls -v ${repeated[@]/%/*.xz})
newline
rm -i ${duplicates[@]}
......
......@@ -72,7 +72,7 @@ actual_chroot() {
local gnupg_chroot=chakra/.config/gnupg
if [[ -d $gnupg ]]; then
if [[ ! -d "$gnupg_chroot" ]]; then
warning "/chakra/config/.gnupg does not exist"
warning "/chakra/.config/gnupg does not exist"
msg "To be able to sign packages you will need to import your public key!"
msg "$ gpg --import /usr/share/chakra/signatures/<user>.asc"
install -dm700 $gnupg_chroot
......@@ -109,10 +109,8 @@ actual_chroot() {
msg "Found $ssh, will bind it to container!"
binds+=(--bind="$ssh":/chakra/.ssh)
if [[ -S "$SSH_AUTH_SOCK" ]]; then
local auth_sock_dir=$(dirname $SSH_AUTH_SOCK)
binds+=(--bind="$auth_sock_dir")
binds+=(--bind="$SSH_AUTH_SOCK")
setenvs+=(--setenv=SSH_AUTH_SOCK=$SSH_AUTH_SOCK)
setenvs+=(--setenv=SSH_AGENT_PID=$SSH_AGENT_PID)
fi
else
msg "I can't find anything about $ssh, exiting automatic bind process..."
......
......@@ -37,3 +37,16 @@ BindReadOnly=/var/lib/chakra/bin:/usr/local/bin
# Agent binding/forwarding
```
+ Use systemd-networkd and systemd-resolved inside chroot
+ systemd-nspawn@.service (machinectl) use virtual network by default
tmpfiles.conf: install to /usr/lib/tmpfiles.d/chakra.conf of the container
d source
d packages
d srcpackages
makepkg.config
define: SRCDEST, PKGDEST, SRCPKGDEST, PACKAGER, GPGKEY
......@@ -29,8 +29,8 @@ version="2.0"
# Remote paths
rsync_server=rsync.chakralinux.org
pkgsource="https://$rsync_server/packages/"
remote="ssh://git@git.chakralinux.org"
remote_n="git://git.chakralinux.org"
remote="git@code.chakralinux.org:packages"
remote_n="https://code.chakralinux.org"
repos=(
'core'
......@@ -69,10 +69,9 @@ chakralive_pkgs=(
'python3'
)
function usage ()
{
echo "Usage : $0 [options] PACMAN_CONF [NAME]
echo "Usage : ${0##*/} [options] PACMAN_CONF [NAME]
PACMAN_CONF: The path to a pacman configuration file which contains the
list of repositories that should be enabled in the chroot.
......@@ -108,7 +107,19 @@ install_packages() {
create_chroot() {
status_start "Configuring system..."
sudo cp /etc/resolv.conf "${chroot_dir}/etc" &>/dev/null
# the network has two approach:
# 1. use system network
# 2. use systemd's way and set up host-guest virtual ethernet set up
##
# 1. diverge into 2 catagories
# 1-1. installing to systemd machine dir, we want it be 2.
# 1-2. installing to $PWD/chroots/, is done via enter_chroot.sh
##
# 1-1.
# check if a machine install.
if [[ $basedir -ne '/var/lib/machines' ]]; then
sudo cp /etc/resolv.conf "${chroot_dir}/etc" &>/dev/null
fi
if [[ ! -f "${chroot_dir}/etc/${pacman_conf}.bak" ]]; then
sudo mv "${chroot_dir}/etc/pacman.conf"{,.bak}
sudo cp "$pacman_conf" "${chroot_dir}/etc/pacman.conf" &>/dev/null
......@@ -124,8 +135,8 @@ create_chroot() {
if [ "$(grep "^${USER}:" "${chroot_dir}/etc/passwd" | cut -d ":" -f1)" != "${USER}" ] ; then
title "User setup"
status_start "Adding user: ${USER}..."
sudo systemd-nspawn -qjD "$chroot_dir" \
useradd -g users -u "$(id -u)" --home-dir "/$chakrafolder" --create-home "$USER"
sudo systemd-nspawn -qjD $chroot_dir \
/usr/sbin/useradd -g users -u "$(id -u)" --home-dir "/$chakrafolder" --create-home "$USER"
status_done
status_start "Setting up /etc/sudoers..."
......@@ -134,6 +145,16 @@ create_chroot() {
else
msg "Found user: ${USER}"
fi
# Setting up virtual ethernet for machine install
if [[ $basedir -eq '/var/lib/machines' ]]; then
status_start "Setting up chroot network"
sudo systemd-nspawn -qjD "$chroot_dir" \
systemctl enable systemd-networkd \
systemctl enable systemd-resolvd \
ln -sf /run/systemd/resolve/resolv.conf /etc/resolv.conf
status_done
fi
}
clone_repositories() {
......@@ -148,7 +169,9 @@ clone_repositories() {
notice "Skipping '$repo', because '$dst' already exists."
continue
fi
# git clone https://code.chakralinux.org/packages/core.git for read-only
# git clone git@code.chakralinux.org:packages/core.git for write access
git clone "$remote/$repo" "$dst"
#TODO is there a special exit code if permission denied?
if ! git clone "$remote/$repo" "$dst"; then
error "Failed to clone the git repository '$repo' (do you have access rights?)"
......@@ -186,7 +209,7 @@ preconfigure_buildscripts() {
fi
status_done
status_start "Enable local repository..."
sudo sed "/@CHROOT_NAME@/ s/^#\s*//" -i "$chroot_dir/etc/pacman.conf"
sudo sed "/@CHROOT_NAME@/ s/^#\\s*//" -i "$chroot_dir/etc/pacman.conf"
sudo sed "s/@CHROOT_NAME@/$chroot_name/g" -i "$chroot_dir/etc/pacman.conf"
status_done
}
......@@ -251,7 +274,7 @@ while [[ $# -gt 0 ]]; do
-* ) error "\n Option does not exist : $opt\n"
usage; exit 1 ;;
*) args+=($opt); shift ;;
*) args+=("$opt"); shift ;;
esac
done
......@@ -267,14 +290,39 @@ if [[ ! -e "$pacman_conf" ]]; then
fi
chroot_name=${args[1]:-$(basename "$pacman_conf" .conf)}
##
# base directory: parent direcotry of the chroot roots in host OS
# default to systemd-nspawn machine directory
# else, we create "chroot" under current directory, where our chroots would be in
##
if [[ -d /var/lib/machines ]]; then
basedir=/var/lib/machines
else
basedir=chroots
fi
chroot_dir="$basedir/$chroot_name"
##
# chroot_dir: chroot directory, where the guest (chroot) root is, contains:
# - chakra-stable: for [desktop], [gtk]
# - chakra-testing: for [core], and kde-related group packaging
# - chakra-lib32: for [lib32]
##
chroot_dir="$basedir/chakra-$chroot_name"
##
# chakra folder: the name of folder where we put chakra helpers in
# also used for recogniton inside guest system
# default to "chakra"
##
chakrafolder="chakra"
chakradir="${chroot_dir}/${chakrafolder}"
##
# chakra directory: where chakra-specific configurations, helper scripts and pkg source lies in
# default to /var/lib/chakra/
# fallback to $PWD/chakra
##
chakradir=/var/lib/chakra
#chakradir="$basedir/${chakrafolder}"
# Check needed executables
if ! hash git 2>/dev/null ; then
......@@ -313,10 +361,14 @@ if hash pacman 2>/dev/null ; then
elif hash pacman.static 2>/dev/null ; then
pacman="pacman.static"
msg "Using pacman.static"
if ! hash pacstrap 2>/dev/null ; then
error "This script needs the package 'arch-install-scripts' installed, please install it before continuing."
exit 1
fi
else
error "You need either 'pacman' or 'pacman.static' in /usr/bin."
error "Can't proceed, stopping... "
exit 0
exit 1
fi
# Check if chroot already exists
......
--- /usr/bin/makepkg
+++ makepkg-chakra
@@ -2,6 +2,12 @@
#
# makepkg - make packages compatible for use with pacman
# Generated from makepkg.sh.in; do not edit by hand.
+# Modified by the Chakra Linux team
+#
+# Copyright (c) 2012-2017 by Samir Benmendil <ram-z@chakralinux.org>
+# Copyright (c) 2011 by Manuel Tortosa
+# Copyright (c) 2010 by Phil Miller
+# Copyright (c) 2006-2010 by Jan Mette
#
# Copyright (c) 2006-2016 Pacman Development Team <pacman-dev@archlinux.org>
# Copyright (c) 2002-2006 by Judd Vinet <jvinet@zeroflux.org>
@@ -41,8 +47,8 @@ unset CDPATH
# Ensure GREP_OPTIONS doesn't screw with our grep calls
unset GREP_OPTIONS
-declare -r makepkg_version='5.0.2'
-declare -r confdir='/etc'
+declare -r makepkg_version='5.0.2-chakra'
+declare -r confdir="$HOME"
declare -r BUILDSCRIPT='PKGBUILD'
declare -r startdir="$PWD"
@@ -51,7 +57,7 @@ LIBRARY=${LIBRARY:-'/usr/share/makepkg'}
build_options=('ccache' 'distcc' 'buildflags' 'makeflags')
splitpkg_overrides=('pkgdesc' 'arch' 'url' 'license' 'groups' 'depends'
'optdepends' 'provides' 'conflicts' 'replaces' 'backup'
- 'options' 'install' 'changelog')
+ 'options' 'install' 'changelog' 'hooks' 'screenshot' 'categories')
readonly -a build_options splitpkg_overrides
known_hash_algos=('md5' 'sha1' 'sha224' 'sha256' 'sha384' 'sha512' 'whirlpool')
@@ -1147,10 +1153,13 @@ write_pkginfo() {
printf "pkgdesc = %s\n" "$spd"
printf "url = %s\n" "$url"
+ printf "screenshot = %s\n" "$screenshot"
printf "builddate = %s\n" "$builddate"
printf "packager = %s\n" "$packager"
printf "size = %s\n" "$size"
printf "arch = %s\n" "$pkgarch"
+ printf "gitrepo = %s\n" "$(basename $(git rev-parse --show-toplevel))"
+ printf "gitfolder = %s\n" "$(basename $(dirname $(realpath "$BUILDFILE")))"
mapfile -t provides < <(find_libprovides)
mapfile -t depends < <(find_libdepends)
@@ -1163,8 +1172,10 @@ write_pkginfo() {
[[ $backup ]] && printf "backup = %s\n" "${backup[@]}"
[[ $depends ]] && printf "depend = %s\n" "${depends[@]}"
[[ $optdepends ]] && printf "optdepend = %s\n" "${optdepends[@]//+([[:space:]])/ }"
+ [[ $categories ]] && printf "categories = %s\n" "${categories[@]}"
[[ $makedepends ]] && printf "makedepend = %s\n" "${makedepends[@]}"
[[ $checkdepends ]] && printf "checkdepend = %s\n" "${checkdepends[@]}"
+ [[ $hooks ]] && printf "hooks = %s\n" "${hooks[@]}"
}
write_buildinfo() {
@@ -1272,6 +1283,8 @@ create_package() {
rm -f "${pkg_file/$PKGDEST/$startdir}.sig"
ln -s "$pkg_file.sig" "${pkg_file/$PKGDEST/$startdir}.sig"
fi
+ msg2 "Updating local repository"
+ repo-add "$PKGDEST/local-$CHROOT.db.tar" "$pkg_file" > /dev/null
fi
if (( ret )); then
#!/usr/bin/env python3
# vim:expandtab:autoindent:tabstop=4:shiftwidth=4:filetype=python:textwidth=0:
# Originally written by Luca Giambonini
# Copyright (C) 2014 Chakra Linux
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
"""
usage:
akbm [options] {--repo-name|--repo-clean|--repo-add|--repo-remove}
akbm [options] --repo-name REPONAME --repo-add PACKAGE
akbm [options] --repo-name REPONAME --repo-remove PACKAGE
akbm [options] --repo-name REPONAME --repo-move PACKAGE
akbm [options] --repo-name REPONAME --repo-dest REPODEST --repo-move PACKAGE
akbm [options] --repo-clean REPONAME
akbm [options] --repo-lock REPONAME
akbm [options] --repo-unlock REPONAME
"""
# library imports
import os
import sys
import fcntl
import time
import pwd
import grp
from optparse import OptionParser
from sanity_checks import *
from subprocess import call
import tarfile
import contextlib
#import gnupg
import fcntl
import signal, errno
from itertools import groupby
from operator import itemgetter
# external libraries
import posterior
#from filelock import FileLock # https://github.com/dmfrey/FileLock (alternative: https://github.com/smontanaro/pylockfile)
import lzma
# all of the variables below are substituted by the build system
__VERSION__ = "1.2.0"
SYSCONFDIR = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), "..", "etc")
PYTHONDIR = os.path.dirname(os.path.realpath(sys.argv[0]))
PACMAN_DB_EXT = '.db.tar.gz'
AKABEI_DB_EXT = '.db.tar.xz'
CHAKRA_REPO = '/srv/www/rsync.chakralinux.org/packages'
#GNUPG_CONF = '/home/almack/.gnupg'
SUCCESS = '::SUCCESS::'
ERROR = '::ERROR::'
# http://code.activestate.com/recipes/65203/
def databaselock(filename):
if os.path.exists(filename):
print('Database is locked with %s' % filename, 'which is owned by', pwd.getpwuid(os.stat(filename).st_uid).pw_name)
status_error()
sys.exit(-1)
else:
try:
fcntl.flock(open(filename, 'w'), fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print("can't immediately write-lock the file ($!), blocking ...")
# IOError: [Errno 11] Resource temporarily unavailable
#if exc_value[0] == 11:
# raise LockException(LockException.LOCK_FAILED, exc_value[1])
#else:
# raise
#if e.errno != errno.EINTR:
# raise exc_value
def databaseunlock(filename):
fcntl.flock(open(filename, 'w'), fcntl.LOCK_UN)
os.remove(filename)
def cli_progress(cur_val, end_val, front_text="Percent", end_text="", bar_length=40):
percent = float(cur_val) / end_val
hashes = '#' * int(round(percent * bar_length))
spaces = ' ' * (bar_length - len(hashes))
sys.stdout.write("\r{0}: [{1}] {2}% {3}".format(front_text, hashes + spaces, int(round(percent * 100)), end_text))
sys.stdout.flush()
# color print messages (TODO move to an external class)
def title(mesg):
newline()
print ("\033[1;33m>>>\033[1;0m\033[1;1m %s\033[1;0m" % mesg)
newline()
def title2(mesg):
print ("\033[1;33m>>>\033[1;0m\033[1;1m %s\033[1;0m" % mesg)
def msg(mesg):
print ("\033[1;32m ::\033[1;0m\033[1;0m %s\033[1;0m" % mesg)
def warning(mesg):
print ("\033[1;33m ::\033[1;0m\033[1;0m %s\033[1;0m" % mesg)
def error(mesg):
print ("\033[1;31m ::\033[1;0m\033[1;0m %s\033[1;0m" % mesg)
def newline():
print ("\n")
def status_start(mesg):
newline()
print ("\033[1;32m ::\033[1;0m\033[1;0m %s\033[1;0m" % mesg)
newline()
def status_success():
print ("\033[1;32m ::SUCCESS:: \033[1;0m")
def status_error():
print ("\033[1;31m ::ERROR:: \033[1;0m")
def status_warning():
print ("\033[1;33m ::WARNING:: \033[1;0m")
def command_parse(config_opts):
"""return options and args from parsing the command line"""
parser = OptionParser(usage=__doc__, version=__VERSION__)
# modes (basic commands)
parser.add_option("--repo-name", action="store", type="string", dest="dbname",
help="The repo name")
parser.add_option("--repo-dest", action="store", type="string", dest="dbdest",
help="The destination repo name")
parser.add_option("-a", "--repo-add", action="store_const", const="repo-add",
dest="mode",
help="Add a pkg(s) to the specified db")
parser.add_option("-r", "--repo-remove", action="store_const", const="repo-remove",
dest="mode",
help="Remove a pkg(s) from the specified db")
parser.add_option("-m", "--repo-move", action="store_const", const="repo-move",
dest="mode",
help="Move a pkg(s) from the specified repo name to the destination")
parser.add_option("--repo-clean", action="store_const", const="repo-clean",
dest="mode",
help="Clean the db")
parser.add_option("--repo-check-upload", action="store_const", const="repo-check-upload",
dest="mode",
help="Sanity checks for a new upload")
parser.add_option("-l", "--lock", action="store_const", const="repo-lock",
dest="mode",
help="Lock the db")
parser.add_option("-u", "--unlock", action="store_const", const="repo-unlock",
dest="mode",
help="Remove the lock of the db")
parser.add_option("--recreate", action="store_const", const="repo-recreate",
dest="mode",
help="[TODO] Recreate the whole db")
# options
parser.add_option("--arch", action ="store", dest="arch",
default='x86_64', help="Set the architecture, default: %default")
# verbosity
parser.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1, help="verbose")
parser.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="quiet")
# debug
parser.add_option("-n", "--dry-run", action="store_true", default=False,
dest="dryrun", help="perform a trial run with no changes made")
(options, args) = parser.parse_args()
# handle old-style commands
if len(args) and args[0] in ('add', 'remove',
'clean', 'clean'):
options.mode = args[0]
args = args[1:]
return (options, args)
#decorate(traceLog())
def setchown(dbname, path):
groups = dict()
groups['core'] = 'packagers'
groups['desktop'] = 'packagers'
groups['lib32'] = 'packagers'
groups['gtk'] = 'packagers'
groups['unstable'] = 'packagers'
groups['testing'] = 'packagers'
groups['staging'] = 'packagers'
groups['kde-staging'] = 'packagers'
groups['kde-unstable'] = 'packagers'
# we can not change the user owner (only root), but only the group
# alternative: uid = os.stat(filepath).st_uid
uid = os.getuid()
gid = grp.getgrnam(groups[dbname]).gr_gid
try:
os.chown(path, uid, gid)
except:
error("oh no! chown error: uid: %s gid: %s path: %s" % (uid, gid, path))
try:
os.chmod(path, 0o664)
except:
error("oh no! chmod 0x664 error")
msg("Set Owner to: %s of: %s" % (groups[dbname], path))
def removeExtensions(pkg):
return pkg[:pkg.rindex("-")]
def extractName(pkgfile):
hyphens = [i for i, x in enumerate(pkgfile) if x == "-"]
return pkgfile[:hyphens[-2]]
def extractVersion(pkgfile):
hyphens = [i for i, x in enumerate(pkgfile) if x == "-"]
return pkgfile[hyphens[-2] + 1: hyphens[-1]]
def extractRelease(pkgfile):
return pkgfile[pkgfile.rindex("-") + 1:]
"""
def create_signature(filetosign):
# check if the .sig pkg alread exist, in case remove it
#http://stackoverflow.com/questions/11227349/python-gnupg-sign-and-verify
gpg = gnupg.GPG(gnupghome=GNUPG_CONF)
stream = open(filetosign, "rb")
# wb = Opens a file for writing only in binary format. Overwrites the file if the file exists. If the file does not exist, creates a new file for writing.
outstream = open(filetosign + ".sig", "wb")
sign = gpg.sign_file(stream, detach=True, binary=True, passphrase='')
outstream.write(sign.data)
outstream.close()
msg("Signing file: %s" % (filetosign + ".sig"))
"""
#decorate(traceLog())
def do_add_pkg(options, dbname, arch, pkgs):
"""add a pkg(s) to the specified repository"""
status_start("[SERVER] add package(s)")
# skip signatures and keep only the packages
pkgs = [i for i in pkgs if not ".sig" in i ]
for pkg in pkgs:
msg("adding this pkg: %s" % pkg)
if options.dryrun: return
dbpath = os.path.join(CHAKRA_REPO, dbname, arch, dbname)
mydb = posterior.Database
with mydb.lock(dbpath + ".joblock"):
# pacman
#call(["repo-add", "-n", dbpath + PACMAN_DB_EXT] + pkgs) #ToDo sign the pkg
call(["repo-add", "-n", dbpath + PACMAN_DB_EXT] + [ os.path.join(CHAKRA_REPO, dbname, arch, pkg) for pkg in pkgs ])
for pkg in pkgs:
setchown(dbname, os.path.join(CHAKRA_REPO, dbname, arch, pkg))
# create a signature if not already present
#if not os.path.exists(os.path.join(CHAKRA_REPO, dbname, arch, pkg + ".sig")):
#create_signature(os.path.join(CHAKRA_REPO, dbname, arch, pkg))
#setchown(dbname, os.path.join(CHAKRA_REPO, dbname, arch, pkg + ".sig"))
if os.path.exists(os.path.join(CHAKRA_REPO, dbname, arch, pkg + ".sig")):
setchown(dbname, os.path.join(CHAKRA_REPO, dbname, arch, pkg + ".sig"))
else:
warning("no signature found for %s" % pkg)
setchown(dbname, dbpath + PACMAN_DB_EXT)
# akabei
call(["akabei-create-db", "add", dbpath + AKABEI_DB_EXT] + [ os.path.join(CHAKRA_REPO, dbname, arch, pkg) for pkg in pkgs ])
setchown(dbname, dbpath + AKABEI_DB_EXT)
#create_signature(dbpath + AKABEI_DB_EXT)
#setchown(dbname, dbpath + AKABEI_DB_EXT + ".sig")
# ToDo set permission for delta files
#Todo:
# sign database with current user key
status_success()
#decorate(traceLog())
def do_remove_pkg(options, dbname, arch, pkgs):
"""remove a pkg(s) to the specified repository"""
status_start("[SERVER] remove package(s)")
finalpkgsList = []
for pkg in pkgs:
# sanitize the pkgs names
finalpkgsList.append(extractName(removeExtensions(pkg)))
msg("removing this pkg(s): %s" % finalpkgsList)
if options.dryrun: return
dbpath = os.path.join(CHAKRA_REPO, dbname, arch, dbname)
mydb = posterior.Database
with mydb.lock(dbpath + ".joblock"):
# pacman
call(["repo-remove", "-n", dbpath + PACMAN_DB_EXT] + finalpkgsList)
setchown(dbname, dbpath + PACMAN_DB_EXT)
# akabei
call(["akabei-create-db", "remove", dbpath + AKABEI_DB_EXT] + finalpkgsList)
setchown(dbname, dbpath + AKABEI_DB_EXT)
#create_signature(dbpath + AKABEI_DB_EXT)
setchown(dbname, dbpath + AKABEI_DB_EXT + ".sig")
status_success()
# internal function
def _move_pkg(options, dbsrcName, dbdestName, arch, pkgs):
# move the real pkgs first (with signatures), then remove from the current db and later add to the final db
for pkg in pkgs:
os.rename(os.path.join(CHAKRA_REPO, dbsrcName, arch, pkg), os.path.join(CHAKRA_REPO, dbdestName, arch, pkg))
if os.path.isfile(os.path.join(CHAKRA_REPO, dbsrcName, arch, pkg + ".sig")):
os.rename(os.path.join(CHAKRA_REPO, dbsrcName, arch, pkg + ".sig"), os.path.join(CHAKRA_REPO, dbdestName, arch, pkg + ".sig"))
#decorate(traceLog())
def do_move_pkg(options, dbsrcName, dbdestName, arch, pkgs):
"""move a pkg(s) to the specified repository"""
status_start("[SERVER] move package(s)")
packages = [] # create an empty list for collect the pkg that must be moved
# if dbdest is specified, then move all pkgs there
# else
# Loop tough the pkgs
# for every pkg check the git repo
# move single pkg to the correct repo
# of the destination repo exist then move all there
# else, automatically determine the destination repository
if dbdestName:
warning("pkg(s) to be moved from [%s] to [%s] are:\n%s" % (dbsrcName, dbdestName, "\n".join(str(x) for x in pkgs)))
if options.dryrun: return
# move the files to the new directory
_move_pkg(options, dbsrcName, dbdestName, arch, pkgs)
# remove the pkg(s) to the current db
do_remove_pkg(options, dbsrcName, arch, pkgs)
# add the pkg(s) to the destination db
do_add_pkg(options, dbdestName, arch, pkgs)
else:
msg("Destination not specified, will be automatically detected...")
for index, pkg in enumerate(pkgs):
with contextlib.closing(lzma.LZMAFile(os.path.join(CHAKRA_REPO, dbsrcName, arch, pkg))) as xz:
with tarfile.open(fileobj=xz) as f:
try:
pkginfo = posterior.Tarball(name=None, tarfileobj=f)
except Exception as e:
print(e)
# pkginfo = posterior.Tarball(os.path.join(CHAKRA_REPO, dbsrcName, arch, pkg)) #only with python >3.3
# clean the git repo string, can be desktop-testing, or kde-next-unstable
for postfix in ("-testing", "-staging", "-unstable"):
pkginfo.gitrepo = pkginfo.gitrepo.replace(postfix, "")
#warning("pkg: %s from \t[%s] -> [%s]" % (pkg, dbsrcName, pkginfo.gitrepo))
if not pkginfo.gitrepo:
error("the pkg %s does not provide the target info you should expecify a destination repo, exiting..." % (pkg))
status_error()
return
# HACK: convert the destination repo to the correct one
# TODO: remove this lines when the transition is finished
# this is a temporary solution introduced after the repository migration
if pkginfo.gitrepo == "platform":
pkginfo.gitrepo = "core"
if pkginfo.gitrepo in ("apps", "games"):
pkginfo.gitrepo = "desktop"
if pkginfo.gitrepo == "extra":
pkginfo.gitrepo = "gtk"
# append to the list the new pkg found
packages.append ([pkg, dbsrcName, pkginfo.gitrepo])
# this is time consuming process, show a progress bar
cli_progress(index, len(pkgs), "Detecting destinations", extractName(pkg))
# I need to sort the destination repo because
# later I should lock one repo at time
# and do all the operations once
groups = []
uniquerepokeys = []
data = sorted(packages, key=itemgetter(2))
for k, g in groupby(data, key=itemgetter(2)):
groups.append(list(g)) # Store group iterator as a list
uniquerepokeys.append(k)
# lock only source repository, destination repo are locked inside the loop
if not options.dryrun:
do_lock_db(options, dbsrcName, arch)
# pkg[0] = pkg name + extesion
# pkg[1] = source repository name
# pkg[2] = destination repository name
# groups[index] = list of pkgs
for index, to_repo in enumerate(uniquerepokeys):
newline()
title2("repo: %s" % to_repo)
for pkg in groups[index]:
msg("%s from \t[%s] -> [%s]" % (pkg[0],pkg[1],pkg[2]))
if not options.dryrun:
_move_pkg(options, pkg[1], pkg[2], arch, [x[0] for x in groups[index]])
do_remove_pkg(options, pkg[1], arch, [x[0] for x in groups[index]])
do_add_pkg(options, pkg[2], arch, [x[0] for x in groups[index]])
# unlock source repo
if not options.dryrun:
do_unlock_db(options, dbsrcName, arch)
if not options.dryrun:
status_success()
#decorate(traceLog())
def do_clean_db(options, dbname, arch):
status_start("[SERVER] clean database")
# -d <dir> Perform check on <dir>. If no argument is supplied, defaults to the current working directory.
# -f Prints the file that would be removed, without touching them.
# -h This message
dbpath = os.path.join(CHAKRA_REPO, dbname, arch)
if options.dryrun:
call(["akabei-clean-repo", "-f", "-d", dbpath])
return
else:
call(["akabei-clean-repo", "-d", dbpath])
status_success()
#decorate(traceLog())
def do_lock_db(options, dbname, arch):
status_start("[SERVER] lock database")
dbpath = os.path.join(CHAKRA_REPO, dbname, arch, dbname)
# let's lock the file
databaselock(dbpath + ".lock")
status_success()
#decorate(traceLog())
def do_unlock_db(options, dbname, arch):
status_start("[SERVER] unlock database")
dbpath = os.path.join(CHAKRA_REPO, dbname, arch, dbname)
# let's lock the file
databaseunlock(dbpath + ".lock")
status_success()
def do_check_upload(options, dbname, arch, args):
status_start("[SERVER] sanity check upload")
if options.dryrun:
status_success()
return 0
# We always run all checks, no matter what each individual
# check returns, and store the final result here. This will
# be set to False if at least one check does not pass.
passed = True
pkgs = missingSignature(args)
if pkgs:
status_warning()
print('Missing signature for the following packages: %s' % str(pkgs))
passed = False
if options.dbname != 'lib32':
lib32_packages = checkLib32Package(CHAKRA_REPO, arch, args)
if lib32_packages:
status_warning()
print('The following lib32 packages exist, so they should be updated as soon '
'as possible if not done already:', lib32_packages)
passed = False
dups = checkDuplicates(CHAKRA_REPO, arch, options.dbname, args)
if dups:
status_warning()
print('The following packages already exist on the target repo: %s. Please verify '
'you have updated the pkgver or pkgrel correctly.' % dups)
passed = False
if passed:
status_success()
return passed
#decorate(traceLog())
def do_recreate_db(options, dbname, arch):
status_start("[SERVER] recreate database")
# akabei-create-db generate apps.db.tar.xz
# akabei-create-db add apps.db.tar.xz *.pkg.tar.xz
def main(ret):
config_opts = {}
"Main executable entry point."
(options, args) = command_parse(config_opts)
# prepare real path for the database
#options.dbname = os.path.abspath(options.dbname)
dbpath = os.path.join(CHAKRA_REPO, options.dbname, options.arch, options.dbname)
if options.dbdest:
dbdestpath = os.path.join(CHAKRA_REPO, options.dbname, options.arch, options.dbname)
# remove the extension (for pacman is db.tar.gz, for akabei is db.tar.xz)
#options.dbname = options.dbname.splitext(".")[0]
# consistency checks
if not os.path.exists(dbpath + ".db.tar.gz"):
error("could not find pacman repository (%s): %s" % (PACMAN_DB_EXT, options.dbname + PACMAN_DB_EXT))
sys.exit(1)
if options.dbdest:
if not os.path.exists(dbdestpath + ".db.tar.gz"):
error("could not find pacman destination repository (%s): %s" % (PACMAN_DB_EXT, options.dbname + PACMAN_DB_EXT))
sys.exit(1)
#if not os.path.exists(options.dbname + AKABEI_DB_EXT):
# print("could not find akabei repository (%s): %s" % (AKABEI_DB_EXT, options.dbname + AKABEI_DB_EXT))
# sys.exit(1)
# set logging verbosity
#if options.verbose == 0:
#elif options.verbose == 1:
#elif options.verbose == 2:
if options.mode == 'repo-add':
do_add_pkg(options, options.dbname, options.arch, args)
elif options.mode == 'repo-remove':
do_remove_pkg(options, options.dbname, options.arch, args)
elif options.mode == 'repo-move':
do_move_pkg(options, options.dbname, options.dbdest, options.arch, args)
elif options.mode == 'repo-clean':
do_clean_db(options, options.dbname, options.arch)
elif options.mode == 'repo-lock':
do_lock_db(options, options.dbname, options.arch)
elif options.mode == 'repo-unlock':
do_unlock_db(options, options.dbname, options.arch)
elif options.mode == 'repo-check-upload':
if not do_check_upload(options, options.dbname, options.arch, args):
sys.exit(1)
elif options.mode == 'repo-recreate':
do_recreate_db(options, options.dbname, options.arch)
if __name__ == '__main__':
exitStatus = 0
try:
# sneaky way to ensure that we get passed back parameter even if
# we hit an exception.
retParams = {}
main(retParams)
exitStatus = retParams.get("exitStatus", exitStatus)
except (SystemExit,):
raise
except (Exception,) as exc:
exitStatus = 1
#!/usr/bin/python3
# A script to clean pkg that doesn't belong to the current repository database, by parsing
# both the pkg in the pacman database and in the server repository, and if they
# don't match the removal
#
# Copyright (C) 2014 Luca Giambonini <gluca86@gmail.com>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
import sys
import os
from contextlib import closing
import tarfile
import gzip
def removeExtensions(pkg):
return pkg[:pkg.rindex("-")]
def duplicates(dirname,repository,pkglist):
duplicateDictionary = {}
with closing(gzip.GzipFile(os.path.join(dirname, repository + ".db.tar.gz"))) as xz:
with tarfile.open(fileobj=xz) as f:
try:
list_tar_pkg = f.getnames()
for pkg in pkglist:
pkgWithoutExtension = removeExtensions(pkg) # remove file extension and architecture string
if pkgWithoutExtension not in list_tar_pkg:
duplicateDictionary[pkgWithoutExtension] = [pkg]
except Exception as e:
print(e)
return duplicateDictionary
def printDups(dups):
if not dups:
print (":: No package files found, is all up to date :-) ")
return
else:
print (":: The package files which would be removed are: ")
for name in dups:
print(name)
def removeDups(dups, path):
if not dups:
print (":: No package files found, is all up to date :-) ")
return
for name in dups:
pkg = dups[name][0]
completePath = path + "/" + pkg
os.remove(completePath)
print(completePath)
if os.path.isfile(completePath + ".sig"):
os.remove(completePath + ".sig")
print(completePath + ".sig")
print(":: Files removed successfully")
def help():
print("-d <dir> Perform check on <dir>. If no argument is supplied, defaults to the current working directory.")
print("-r <repo> The repository name that you want to look <repo>. No extension is needed.")
print("-f Prints the file that would be removed, without touching them.")
print("-h This message")
exit(0)
if __name__ == "__main__":
dirname = ""
repository = ""
fake = False
for arg in sys.argv:
if (arg == "-d"):
i = sys.argv.index("-d")
if (i < len(sys.argv) - 1):
dirname = sys.argv[i+1]
else:
print(":: Error: directory name is missing after -d")
exit(-1)
if (arg == "-r"):
i = sys.argv.index("-r")
if (i < len(sys.argv) - 1):
repository = sys.argv[i+1]
else:
print(":: Error: repository name is missing after -r")
exit(-1)
elif (arg == "-f"):
fake = True
elif (arg == "-h") or (arg == "--help"):
help()
# No argument supplied
if len(dirname) == 0:
dirname = os.getcwd()
if len(repository) == 0:
print(":: Error: repository name is missing, use option -r <repo>")
exit(-1)
pkglist = [file for file in os.listdir(dirname) if file.endswith(".pkg.tar.xz")]
dup = duplicates(dirname,repository,pkglist)
if fake:
printDups(dup)
else:
removeDups(dup, dirname)
#!/usr/bin/python3
# This is used by akbm when checking new uploads (repo-check-upload command).
import os
import re
def RemoveSignatureExtension(filename):
index = filename.index('.sig')
return filename[:index]
def IsPackageFile(filename):
return filename.endswith('.pkg.tar.xz')
def IsSignatureFile(filename):
return filename.endswith('.pkg.tar.xz.sig')
def GetPackageName(filename):
"""Retrieves the name of a package, without the version
and pkgrel, from the archive file name."""
pkgNames = []
pkgVerRe = '[0-9.]+'
# Divide the file name in pieces by -, then add pieces
# until you find a piece which represents a version (through
# regular expression). Then all the "name pieces" are joined
# together again to return the complete name.
#
# This should deal with package names containing numbers.
pieces = filename.split('-')
for p in pieces:
if re.match(pkgVerRe, p):
break
pkgNames.append(p)
pkgName = '-'.join(pkgNames)
return pkgName
def missingSignature(filelist):
"""Checks whether there is a signature file for each
package file. Returns the list of package files with no
signature file."""
pkgsHasSig = {}
for f in filelist:
if IsPackageFile(f):
pkgsHasSig[f] = False
elif IsSignatureFile(f):
pkgsHasSig[RemoveSignatureExtension(f)] = True
return [x for x in pkgsHasSig if not pkgsHasSig[x]]
def checkDuplicates(reposPath, arch, repoName, filelist):
"""Checks whether the repository already contains the same
package file we are trying to upload. Returns the list of
duplicate files."""
repoPath = os.path.join(reposPath, repoName, arch)
print(repoPath)
dirListing = [ f for f in os.listdir(repoPath) if IsPackageFile(f) ]
intersection = set(filelist) & set(dirListing)
return intersection
# Calling this on a lib32 package should return an empty list.
def checkLib32Package(reposPath, arch, filelist):
"""Checks whether a package has a corresponding package in lib32. Returns
the list of lib32 package names that were found."""
repoPath = os.path.join(reposPath, 'lib32', arch)
repoListing = [ GetPackageName(f) for f in os.listdir(repoPath) if IsPackageFile(f) ]
packageNames32 = []
for pkgFile in filelist:
if IsSignatureFile(pkgFile):
continue
pkgName = GetPackageName(pkgFile)
pkgName = 'lib32-' + pkgName
# We are not concerned with comparing versions, since that adds
# complexity, only whether a lib32 package exists or not.
if pkgName in repoListing:
packageNames32.append(pkgName)
return packageNames32
\ No newline at end of file