From: Mike O'Connor Date: Mon, 13 Apr 2009 20:09:23 +0000 (-0400) Subject: merge with master X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=1e1457bde3318c6605b4c97d4299803fdaf8e774;hp=ed6728af1e7acc51129d9fd8b3f1d91697928872;p=dak.git merge with master --- diff --git a/config/debian/apt.conf b/config/debian/apt.conf index f897d7b1..ea370008 100644 --- a/config/debian/apt.conf +++ b/config/debian/apt.conf @@ -73,7 +73,7 @@ tree "dists/unstable" FileList "/srv/ftp.debian.org/database/dists/unstable_$(SECTION)_binary-$(ARCH).list"; SourceFileList "/srv/ftp.debian.org/database/dists/unstable_$(SECTION)_source.list"; Sections "main contrib non-free"; - Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source"; + Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 source"; BinOverride "override.sid.$(SECTION)"; ExtraOverride "override.sid.extra.$(SECTION)"; SrcOverride "override.sid.$(SECTION).src"; @@ -196,43 +196,7 @@ tree "dists/experimental" FileList "/srv/ftp.debian.org/database/dists/experimental_$(SECTION)_binary-$(ARCH).list"; SourceFileList "/srv/ftp.debian.org/database/dists/experimental_$(SECTION)_source.list"; Sections "main contrib non-free"; - Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source"; + Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 source"; BinOverride "override.sid.$(SECTION)"; SrcOverride "override.sid.$(SECTION).src"; }; - -tree "dists/etch-m68k" -{ - FakeDI "dists/unstable"; - FileList "/srv/ftp.debian.org/database/dists/etch-m68k_$(SECTION)_binary-$(ARCH).list"; - SourceFileList "/srv/ftp.debian.org/database/dists/etch-m68k_$(SECTION)_source.list"; - Sections "main contrib non-free"; - Architectures "m68k source"; - BinOverride "override.etch.$(SECTION)"; - ExtraOverride "override.etch.extra.$(SECTION)"; - SrcOverride "override.etch.$(SECTION).src"; -}; - -tree "dists/etch-m68k/main" -{ - FileList "/srv/ftp.debian.org/database/dists/etch-m68k_main_$(SECTION)_binary-$(ARCH).list"; - Sections "debian-installer"; - Architectures "m68k"; - BinOverride "override.etch.main.$(SECTION)"; - SrcOverride "override.etch.main.src"; - BinCacheDB "packages-debian-installer-$(ARCH).db"; - Packages::Extensions ".udeb"; - Contents "$(DIST)/../Contents-udeb"; -}; - -tree "dists/etch-m68k/non-free" -{ - FileList "/srv/ftp.debian.org/database/dists/etch-m68k_non-free_$(SECTION)_binary-$(ARCH).list"; - Sections "debian-installer"; - Architectures "m68k"; - BinOverride "override.etch.main.$(SECTION)"; - SrcOverride "override.etch.main.src"; - BinCacheDB "packages-debian-installer-$(ARCH).db"; - Packages::Extensions ".udeb"; - Contents "$(DIST)/../Contents-udeb-nf"; -}; diff --git a/config/debian/common b/config/debian/common index ad10ea6c..3655d9d2 100644 --- a/config/debian/common +++ b/config/debian/common @@ -24,3 +24,21 @@ function debug () { log "$*" fi } + +# used by cron.dinstall *and* cron.unchecked. +function make_buildd_dir () { + cd $configdir + apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd + + cd ${incoming} + rm -f buildd/Release* + apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd incoming" -o APT::FTPArchive::Release::Architectures="${archs}" release buildd > Release + gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o Release.gpg Release + mv Release* buildd/. + + cd ${incoming} + mkdir -p tree/${STAMP} + cp -al ${incoming}/buildd/. tree/${STAMP}/ + ln -sfT tree/${STAMP} ${incoming}/builddweb + find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf +} diff --git a/config/debian/cron.dinstall b/config/debian/cron.dinstall index 1f0e1961..c730a116 100755 --- a/config/debian/cron.dinstall +++ b/config/debian/cron.dinstall @@ -74,7 +74,7 @@ function merkel1() { # Create the postgres dump files function pgdump_pre() { log "Creating pre-daily-cron-job backup of projectb database..." - pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S) + pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S) } function pgdump_post() { @@ -235,6 +235,13 @@ function buildd() { apt-ftparchive generate apt.conf.buildd } +function buildd_dir() { + # Rebuilt the buildd dir to avoid long times of 403 + log "Regenerating the buildd incoming dir" + STAMP=$(date "+%Y%m%d%H%M") + make_buildd_dir +} + function scripts() { log "Running various scripts from $scriptsdir" cd $scriptsdir @@ -289,6 +296,12 @@ function merkel2() { ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1 } +function merkel3() { + # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached + log "Trigger merkels dd accessible parts sync" + ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1 +} + function runparts() { log "Using run-parts to run scripts in $base/scripts/distmnt" run-parts --report $base/scripts/distmnt @@ -318,6 +331,8 @@ function stats() { cd $configdir $scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data R --slave --vanilla < $base/misc/ftpstats.R + dak stats arch-space > $webdir/arch-space + dak stats pkg-nums > $webdir/pkg-nums } function aptftpcleanup() { @@ -329,11 +344,19 @@ function aptftpcleanup() { function compress() { log "Compress old psql backups" cd $base/backup/ - find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 | + find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm + + find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin 720 | + while read dumpname; do + echo "Compressing $dumpname" + bzip2 -9v "$dumpname" + done + find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin 720 | while read dumpname; do echo "Compressing $dumpname" bzip2 -9v "$dumpname" done + finddup -l -d $base/backup } function logstats() { @@ -571,6 +594,14 @@ GO=( ) stage $GO +GO=( + FUNC="buildd_dir" + TIME="buildd_dir" + ARGS="" + ERR="false" +) +stage $GO + GO=( FUNC="cruft" TIME="cruft" @@ -690,14 +721,6 @@ GO=( ) stage $GO -GO=( - FUNC="pgdakdev" - TIME="dak-dev db" - ARGS="" - ERR="false" -) -stage $GO - GO=( FUNC="expire" TIME="expire_dumps" @@ -768,9 +791,18 @@ GO=( ARGS="" ERR="false" ) +stage $GO rm -f ${LOCK_BRITNEY} +GO=( + FUNC="pgdakdev" + TIME="dak-dev db" + ARGS="" + ERR="false" +) +stage $GO + GO=( FUNC="aptftpcleanup" TIME="apt-ftparchive cleanup" @@ -779,6 +811,14 @@ GO=( ) stage $GO +GO=( + FUNC="merkel3" + TIME="merkel ddaccessible sync" + ARGS="" + ERR="false" +) +stage $GO + GO=( FUNC="compress" TIME="compress" diff --git a/config/debian/cron.unchecked b/config/debian/cron.unchecked index 5e7a035d..d41d573d 100755 --- a/config/debian/cron.unchecked +++ b/config/debian/cron.unchecked @@ -5,6 +5,9 @@ set -u export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars . $SCRIPTVARS +# common functions are "outsourced" +. "${configdir}/common" + LOCKDAILY="" LOCKFILE="$lockdir/unchecked.lock" NOTICE="$lockdir/daily.lock" @@ -56,20 +59,7 @@ if lockfile -r3 $LOCKFILE; then cat override.sid.$i.src >> override.sid.all3.src fi done - cd $configdir - apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd - - cd ${incoming} - rm -f buildd/Release* - apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd incoming" -o APT::FTPArchive::Release::Architectures="${archs}" release buildd > Release - gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o Release.gpg Release - mv Release* buildd/. - - cd ${incoming} - mkdir -p tree/${STAMP} - cp -al ${incoming}/buildd/. tree/${STAMP}/ - ln -sfT tree/${STAMP} ${incoming}/builddweb - find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf + make_buildd_dir . $configdir/cron.buildd fi diff --git a/config/debian/dak.conf b/config/debian/dak.conf index 662d16f6..24282855 100644 --- a/config/debian/dak.conf +++ b/config/debian/dak.conf @@ -503,6 +503,7 @@ SuiteMappings "map testing-security testing-proposed-updates"; "map-unreleased testing unstable"; "map-unreleased testing-proposed-updates unstable"; + "reject etch-m68k"; }; AutomaticByHandPackages { @@ -598,6 +599,8 @@ Architectures s390 "IBM S/390"; sh "Hitatchi SuperH"; sparc "Sun SPARC/UltraSPARC"; + kfreebsd-i386 "GNU/kFreeBSD i386"; + kfreebsd-amd64 "GNU/kFreeBSD amd64"; }; Archive diff --git a/config/debian/vars b/config/debian/vars index 352a8fc4..09a047b2 100644 --- a/config/debian/vars +++ b/config/debian/vars @@ -5,7 +5,7 @@ bindir=$base/bin ftpdir=$base/ftp webdir=$base/web indices=$ftpdir/indices -archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc" +archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 " scriptdir=$base/scripts masterdir=$base/dak/ diff --git a/dak/clean_suites.py b/dak/clean_suites.py index b5904836..d6a966b5 100755 --- a/dak/clean_suites.py +++ b/dak/clean_suites.py @@ -201,7 +201,6 @@ def clean(): before = time.time() sys.stdout.write("[Deleting from source table... ") projectB.query("DELETE FROM dsc_files WHERE EXISTS (SELECT 1 FROM source s, files f, dsc_files df WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = df.source AND df.id = dsc_files.id)" % (delete_date)) - projectB.query("DELETE FROM src_uploaders WHERE EXISTS (SELECT 1 FROM source s, files f WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = src_uploaders.source)" % (delete_date)) projectB.query("DELETE FROM source WHERE EXISTS (SELECT 1 FROM files WHERE source.file = files.id AND files.last_used <= '%s')" % (delete_date)) sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before))) diff --git a/dak/cruft_report.py b/dak/cruft_report.py index 3c3d73cb..4c6d323e 100755 --- a/dak/cruft_report.py +++ b/dak/cruft_report.py @@ -168,7 +168,7 @@ def parse_nfu(architecture): f.close() else: - utils.warn("No wanna-build dump file for architecture %s", architecture) + utils.warn("No wanna-build dump file for architecture %s" % architecture) return ret ################################################################################ @@ -428,6 +428,8 @@ def main (): for component in check_components: architectures = filter(utils.real_arch, database.get_suite_architectures(suite)) for architecture in architectures: + if component == 'main/debian-installer' and re.match("kfreebsd", architecture): + continue filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suite, component, architecture) # apt_pkg.ParseTagFile needs a real file handle (fd, temp_filename) = utils.temp_filename() diff --git a/dak/dakdb/__init__.py b/dak/dakdb/__init__.py index e69de29b..a35616dd 100644 --- a/dak/dakdb/__init__.py +++ b/dak/dakdb/__init__.py @@ -0,0 +1,33 @@ +""" +Database update scripts for usage with B{dak update-db} + +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@license: GNU General Public License version 2 or later + +Update scripts have to C{import psycopg2} and +C{from daklib.dak_exceptions import DBUpdateError}. + +There has to be B{at least} the function C{do_update(self)} to be +defined. It should take all neccessary steps to update the +database. If the update fails the changes have to be rolled back and the +C{DBUpdateError} exception raised to properly halt the execution of any +other update. + +Example:: + def do_update(self): + print "Doing something" + + try: + c = self.db.cursor() + c.execute("SOME SQL STATEMENT") + self.db.commit() + + except psycopg2.ProgrammingError, msg: + self.db.rollback() + raise DBUpdateError, "Unable to do whatever, rollback issued. Error message : %s" % (str(msg)) + +This function can do whatever it wants and use everything from dak and +daklib. + +""" diff --git a/dak/dakdb/update1.py b/dak/dakdb/update1.py index 92fc4498..0c833773 100755 --- a/dak/dakdb/update1.py +++ b/dak/dakdb/update1.py @@ -1,7 +1,12 @@ #!/usr/bin/env python -""" Database Update Script - Saner DM db schema """ -# Copyright (C) 2008 Michael Casadevall +""" +Saner DM db schema + +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@license: GNU General Public License version 2 or later +""" # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/dak/dakdb/update10.py b/dak/dakdb/update10.py old mode 100644 new mode 100755 index cf1caa1c..5cd4f30d --- a/dak/dakdb/update10.py +++ b/dak/dakdb/update10.py @@ -2,11 +2,11 @@ # coding=utf8 """ -Debian Archive Kit Database Update Script -Copyright © 2008 Michael Casadevall -Copyright © 2009 Mike O'Connor +Add constraints to src_uploaders -Debian Archive Kit Database Update Script 8 +@contact: Debian FTP Master +@copyright: 2009 Mark Hymers +@license: GNU General Public License version 2 or later """ # This program is free software; you can redistribute it and/or modify @@ -25,6 +25,7 @@ Debian Archive Kit Database Update Script 8 ################################################################################ +# oh no, Ganneff has just corrected my english ################################################################################ @@ -36,21 +37,20 @@ from daklib.utils import get_conf ################################################################################ def do_update(self): - print "add package_type enum" + print "Add constraints to src_uploaders" Cnf = get_conf() try: c = self.db.cursor() - - c.execute("CREATE TYPE package_type AS ENUM('deb','udeb','tdeb', 'dsc')") - c.execute("ALTER TABLE binaries RENAME COLUMN type to type_text" ); - c.execute("ALTER TABLE binaries ADD COLUMN type package_type" ); - c.execute("UPDATE binaries set type=type_text::package_type" ); - c.execute("ALTER TABLE binaries DROP COLUMN type_text" ); - c.execute("CREATE INDEX binary_type_ids on binaries(type)") - + # Deal with out-of-date src_uploaders entries + c.execute("DELETE FROM src_uploaders WHERE source NOT IN (SELECT id FROM source)") + c.execute("DELETE FROM src_uploaders WHERE maintainer NOT IN (SELECT id FROM maintainer)") + # Add constraints + c.execute("ALTER TABLE src_uploaders ADD CONSTRAINT src_uploaders_maintainer FOREIGN KEY (maintainer) REFERENCES maintainer(id) ON DELETE CASCADE") + c.execute("ALTER TABLE src_uploaders ADD CONSTRAINT src_uploaders_source FOREIGN KEY (source) REFERENCES source(id) ON DELETE CASCADE") + c.execute("UPDATE config SET value = '10' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError, msg: self.db.rollback() - raise DBUpdateError, "Unable to apply binary type enum update, rollback issued. Error message : %s" % (str(msg)) + raise DBUpdateError, "Unable to apply suite config updates, rollback issued. Error message : %s" % (str(msg)) diff --git a/dak/dakdb/update2.py b/dak/dakdb/update2.py index 850e3ab5..2e3cb446 100755 --- a/dak/dakdb/update2.py +++ b/dak/dakdb/update2.py @@ -1,9 +1,14 @@ #!/usr/bin/env python # coding=utf8 -""" Database Update Script - debversion """ -# Copyright © 2008 Michael Casadevall -# Copyright © 2008 Roger Leigh +""" +debversion + +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@copyright: 2008 Roger Leigh +@license: GNU General Public License version 2 or later +""" # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/dak/dakdb/update3.py b/dak/dakdb/update3.py index eef7d4c6..c91ecf56 100755 --- a/dak/dakdb/update3.py +++ b/dak/dakdb/update3.py @@ -1,8 +1,13 @@ #!/usr/bin/env python -""" Database Update Script - Remove unused versioncmp """ -# Copyright (C) 2008 Michael Casadevall -# Copyright (C) 2009 Joerg Jaspert +""" +Remove unused versioncmp + +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@copyright: 2009 Joerg Jaspert +@license: GNU General Public License version 2 or later +""" # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by diff --git a/dak/dakdb/update4.py b/dak/dakdb/update4.py old mode 100644 new mode 100755 index 1a9d9c3a..477944c8 --- a/dak/dakdb/update4.py +++ b/dak/dakdb/update4.py @@ -1,6 +1,6 @@ #!/usr/bin/env python """ -Database Update Script - Get suite_architectures table use sane values +Get suite_architectures table use sane values @contact: Debian FTP Master @copyright: 2009 Joerg Jaspert diff --git a/dak/dakdb/update5.py b/dak/dakdb/update5.py index 1d820de4..beb961cc 100755 --- a/dak/dakdb/update5.py +++ b/dak/dakdb/update5.py @@ -1,7 +1,7 @@ #!/usr/bin/env python """ -Database Update Script - Fix bin_assoc_by_arch view +Fix bin_assoc_by_arch view @contact: Debian FTP Master @copyright: 2009 Joerg Jaspert diff --git a/dak/dakdb/update6.py b/dak/dakdb/update6.py old mode 100644 new mode 100755 index 4537579a..c7b0b17f --- a/dak/dakdb/update6.py +++ b/dak/dakdb/update6.py @@ -2,11 +2,12 @@ # coding=utf8 """ -Debian Archive Kit Database Update Script -Copyright © 2008 Michael Casadevall -Copyright © 2008 Roger Leigh +Adding content fields -Debian Archive Kit Database Update Script 2 +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@copyright: 2008 Roger Leigh +@license: GNU General Public License version 2 or later """ # This program is free software; you can redistribute it and/or modify diff --git a/dak/dakdb/update7.py b/dak/dakdb/update7.py index c8828535..6f91eb37 100755 --- a/dak/dakdb/update7.py +++ b/dak/dakdb/update7.py @@ -2,11 +2,13 @@ # coding=utf8 """ -Debian Archive Kit Database Update Script -Copyright © 2008 Michael Casadevall -Copyright © 2009 Joerg Jaspert +Moving suite config into DB + +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@copyright: 2009 Joerg Jaspert +@license: GNU General Public License version 2 or later -Debian Archive Kit Database Update Script 7 """ # This program is free software; you can redistribute it and/or modify diff --git a/dak/dakdb/update8.py b/dak/dakdb/update8.py index fc505f7a..2f92c4d8 100755 --- a/dak/dakdb/update8.py +++ b/dak/dakdb/update8.py @@ -2,11 +2,12 @@ # coding=utf8 """ -Debian Archive Kit Database Update Script -Copyright © 2008 Michael Casadevall -Copyright © 2009 Joerg Jaspert +More suite config into the DB -Debian Archive Kit Database Update Script 8 +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@copyright: 2009 Joerg Jaspert +@license: GNU General Public License version 2 or later """ # This program is free software; you can redistribute it and/or modify diff --git a/dak/dakdb/update9.py b/dak/dakdb/update9.py old mode 100644 new mode 100755 index 09785773..2ca3c51e --- a/dak/dakdb/update9.py +++ b/dak/dakdb/update9.py @@ -2,11 +2,12 @@ # coding=utf8 """ -Debian Archive Kit Database Update Script -Copyright © 2008 Michael Casadevall -Copyright © 2009 Mike O'Connor +Pending contents disinguished by arch -Debian Archive Kit Database Update Script 8 +@contact: Debian FTP Master +@copyright: 2008 Michael Casadevall +@copyright: 2009 Mike O'Connor +@license: GNU General Public License version 2 or later """ # This program is free software; you can redistribute it and/or modify diff --git a/dak/examine_package.py b/dak/examine_package.py index eb602794..9448724e 100755 --- a/dak/examine_package.py +++ b/dak/examine_package.py @@ -1,7 +1,13 @@ #!/usr/bin/env python -""" Script to automate some parts of checking NEW packages """ -# Copyright (C) 2000, 2001, 2002, 2003, 2006 James Troup +""" +Script to automate some parts of checking NEW packages + +@contact: Debian FTP Master +@copyright: 2000, 2001, 2002, 2003, 2006 James Troup +@copyright: 2009 Joerg Jaspert +@license: GNU General Public License version 2 or later +""" # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -32,7 +38,12 @@ ################################################################################ -import errno, os, pg, re, sys, md5 +import errno +import os +import pg +import re +import sys +import md5 import apt_pkg, apt_inst from daklib import database from daklib import utils @@ -50,6 +61,7 @@ projectB = pg.connect(Cnf["DB::Name"], Cnf["DB::Host"], int(Cnf["DB::Port"])) database.init(Cnf, projectB) printed_copyrights = {} +package_relations = {} #: Store relations of packages for later output # default is to not output html. use_html = 0 @@ -333,17 +345,38 @@ def create_depends_string (suite, depends_tree): comma_count += 1 return result -def output_deb_info(suite, filename): +def output_package_relations (): + """ + Output the package relations, if there is more than one package checked in this run. + """ + + if len(package_relations) < 2: + # Only list something if we have more than one binary to compare + return + + to_print = "" + for package in package_relations: + for relation in package_relations[package]: + to_print += "%-15s: (%s) %s\n" % (package, relation, package_relations[package][relation]) + + package_relations.clear() + foldable_output("Package relations", "relations", to_print) + +def output_deb_info(suite, filename, packagename): (control, control_keys, section, depends, recommends, arch, maintainer) = read_control(filename) if control == '': return formatted_text("no control info") to_print = "" + if not package_relations.has_key(packagename): + package_relations[packagename] = {} for key in control_keys : if key == 'Depends': field_value = create_depends_string(suite, depends) + package_relations[packagename][key] = field_value elif key == 'Recommends': field_value = create_depends_string(suite, recommends) + package_relations[packagename][key] = field_value elif key == 'Section': field_value = section elif key == 'Architecture': @@ -415,7 +448,7 @@ def check_deb (suite, deb_filename): foldable_output("control file for %s" % (filename), "binary-%s-control"%packagename, - output_deb_info(suite, deb_filename), norow=True) + output_deb_info(suite, deb_filename, packagename), norow=True) if is_a_udeb: foldable_output("skipping lintian check for udeb", "binary-%s-lintian"%packagename, @@ -523,6 +556,7 @@ def main (): else: utils.fubar("Unrecognised file type: '%s'." % (f)) finally: + output_package_relations() if not Options["Html-Output"]: # Reset stdout here so future less invocations aren't FUBAR less_fd.close() diff --git a/dak/generate_releases.py b/dak/generate_releases.py index 137c8447..d6aeb390 100755 --- a/dak/generate_releases.py +++ b/dak/generate_releases.py @@ -121,6 +121,31 @@ def print_sha1_files (tree, files): def print_sha256_files (tree, files): print_md5sha_files (tree, files, apt_pkg.sha256sum) +def write_release_file (relpath, suite, component, origin, label, arch, version="", suite_suffix="", notautomatic=""): + try: + if os.access(relpath, os.F_OK): + if os.stat(relpath).st_nlink > 1: + os.unlink(relpath) + release = open(relpath, "w") + except IOError: + utils.fubar("Couldn't write to " + relpath) + + release.write("Archive: %s\n" % (suite)) + if version != "": + release.write("Version: %s\n" % (version)) + + if suite_suffix: + release.write("Component: %s/%s\n" % (suite_suffix,component)) + else: + release.write("Component: %s\n" % (component)) + + release.write("Origin: %s\n" % (origin)) + release.write("Label: %s\n" % (label)) + if notautomatic != "": + release.write("NotAutomatic: %s\n" % (notautomatic)) + release.write("Architecture: %s\n" % (arch)) + release.close() + ################################################################################ def main (): @@ -269,29 +294,7 @@ def main (): else: rel = "%s/binary-%s/Release" % (sec, arch) relpath = Cnf["Dir::Root"]+tree+"/"+rel - - try: - if os.access(relpath, os.F_OK): - if os.stat(relpath).st_nlink > 1: - os.unlink(relpath) - release = open(relpath, "w") - #release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w") - except IOError: - utils.fubar("Couldn't write to " + relpath) - - release.write("Archive: %s\n" % (suite)) - if version != "": - release.write("Version: %s\n" % (version)) - if suite_suffix: - release.write("Component: %s/%s\n" % (suite_suffix,sec)) - else: - release.write("Component: %s\n" % (sec)) - release.write("Origin: %s\n" % (origin)) - release.write("Label: %s\n" % (label)) - if notautomatic != "": - release.write("NotAutomatic: %s\n" % (notautomatic)) - release.write("Architecture: %s\n" % (arch)) - release.close() + write_release_file(relpath, suite, sec, origin, label, arch, version, suite_suffix, notautomatic) files.append(rel) if AptCnf.has_key("tree::%s/main" % (tree)): @@ -303,6 +306,10 @@ def main (): for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split(): if arch != "source": # always true + rel = "%s/%s/binary-%s/Release" % (dis, sec, arch) + relpath = Cnf["Dir::Root"]+tree+"/"+rel + write_release_file(relpath, suite, dis, origin, label, arch, version, suite_suffix, notautomatic) + files.append(rel) for cfile in compressnames("tree::%s/%s" % (tree,dis), "Packages", "%s/%s/binary-%s/Packages" % (dis, sec, arch)): diff --git a/dak/process_accepted.py b/dak/process_accepted.py index e55ac54d..9883bb20 100755 --- a/dak/process_accepted.py +++ b/dak/process_accepted.py @@ -1,8 +1,14 @@ #!/usr/bin/env python -""" Installs Debian packages from queue/accepted into the pool """ -# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup +""" +Installs Debian packages from queue/accepted into the pool +@contact: Debian FTP Master +@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup +@copyright: 2009 Joerg Jaspert +@license: GNU General Public License version 2 or later + +""" # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or @@ -218,7 +224,7 @@ def usage (exit_code=0): ############################################################################### -def action (): +def action (queue=""): (summary, short_summary) = Upload.build_summaries() (prompt, answer) = ("", "XXX") @@ -250,7 +256,7 @@ def action (): if not installing_to_stable: install() else: - stable_install(summary, short_summary) + stable_install(summary, short_summary, queue) elif answer == 'Q': sys.exit(0) @@ -488,10 +494,15 @@ def install (): ################################################################################ -def stable_install (summary, short_summary): +def stable_install (summary, short_summary, fromsuite="proposed-updates"): global install_count - print "Installing to stable." + fromsuite = fromsuite.lower() + tosuite = "Stable" + if fromsuite == "oldstable-proposed-updates": + tosuite = "OldStable" + + print "Installing from %s to %s." % (fromsuite, tosuite) # Begin a transaction; if we bomb out anywhere between here and # the COMMIT WORK below, the DB won't be changed. @@ -507,9 +518,9 @@ def stable_install (summary, short_summary): if not ql: utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version)) source_id = ql[0][0] - suite_id = database.get_suite_id('proposed-updates') + suite_id = database.get_suite_id(fromsuite) projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id)) - suite_id = database.get_suite_id('stable') + suite_id = database.get_suite_id(tosuite.lower()) projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id)) # Add the binaries to stable (and remove it/them from proposed-updates) @@ -524,9 +535,9 @@ def stable_install (summary, short_summary): utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture)) binary_id = ql[0][0] - suite_id = database.get_suite_id('proposed-updates') + suite_id = database.get_suite_id(fromsuite) projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id)) - suite_id = database.get_suite_id('stable') + suite_id = database.get_suite_id(tosuite.lower()) projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id)) projectB.query("COMMIT WORK") @@ -534,17 +545,17 @@ def stable_install (summary, short_summary): utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file)) ## Update the Stable ChangeLog file - new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + ".ChangeLog" - changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + "ChangeLog" + new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + ".ChangeLog" + changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + "ChangeLog" if os.path.exists(new_changelog_filename): os.unlink (new_changelog_filename) new_changelog = utils.open_file(new_changelog_filename, 'w') for newfile in files.keys(): if files[newfile]["type"] == "deb": - new_changelog.write("stable/%s/binary-%s/%s\n" % (files[newfile]["component"], files[newfile]["architecture"], newfile)) + new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.lower(), files[newfile]["component"], files[newfile]["architecture"], newfile)) elif re_issource.match(newfile): - new_changelog.write("stable/%s/source/%s\n" % (files[newfile]["component"], newfile)) + new_changelog.write("%s/%s/source/%s\n" % (tosuite.lower(), files[newfile]["component"], newfile)) else: new_changelog.write("%s\n" % (newfile)) chop_changes = re_fdnic.sub("\n", changes["changes"]) @@ -560,19 +571,19 @@ def stable_install (summary, short_summary): install_count += 1 if not Options["No-Mail"] and changes["architecture"].has_key("source"): - Subst["__SUITE__"] = " into stable" + Subst["__SUITE__"] = " into %s" % (tosuite) Subst["__SUMMARY__"] = summary mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.install") utils.send_mail(mail_message) Upload.announce(short_summary, 1) # Finally remove the .dak file - dot_dak_file = os.path.join(Cnf["Suite::Proposed-Updates::CopyDotDak"], os.path.basename(Upload.pkg.changes_file[:-8]+".dak")) + dot_dak_file = os.path.join(Cnf["Suite::%s::CopyDotDak" % (fromsuite)], os.path.basename(Upload.pkg.changes_file[:-8]+".dak")) os.unlink(dot_dak_file) ################################################################################ -def process_it (changes_file): +def process_it (changes_file, queue=""): global reject_message reject_message = "" @@ -588,7 +599,7 @@ def process_it (changes_file): if installing_to_stable: old = Upload.pkg.changes_file Upload.pkg.changes_file = os.path.basename(old) - os.chdir(Cnf["Suite::Proposed-Updates::CopyDotDak"]) + os.chdir(Cnf["Suite::%s::CopyDotDak" % (queue)]) Upload.init_vars() Upload.update_vars() @@ -598,7 +609,7 @@ def process_it (changes_file): Upload.pkg.changes_file = old check() - action() + action(queue) # Restore CWD os.chdir(pkg.directory) @@ -620,7 +631,12 @@ def main(): utils.fubar("Archive maintenance in progress. Try again later.") # If running from within proposed-updates; assume an install to stable - if os.getcwd().find('proposed-updates') != -1: + queue = "" + if os.getenv('PWD').find('oldstable-proposed-updates') != -1: + queue = "Oldstable-Proposed-Updates" + installing_to_stable = 1 + elif os.getenv('PWD').find('proposed-updates') != -1: + queue = "Proposed-Updates" installing_to_stable = 1 # Obtain lock if not in no-action mode and initialize the log @@ -650,7 +666,7 @@ def main(): # Process the changes files for changes_file in changes_files: print "\n" + changes_file - process_it (changes_file) + process_it (changes_file, queue) if install_count: sets = "set" diff --git a/dak/process_new.py b/dak/process_new.py index acc4522f..9ecfcdc6 100755 --- a/dak/process_new.py +++ b/dak/process_new.py @@ -516,6 +516,7 @@ def check_pkg (): elif ftype == "dsc": examine_package.check_dsc(changes['distribution'], f) finally: + examine_package.output_package_relations() sys.stdout = stdout_fd except IOError, e: if e.errno == errno.EPIPE: @@ -854,7 +855,7 @@ def move_to_holding(suite, queue_dir): return Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file]) Upload.dump_vars(queue_dir) - move_to_dir(queue_dir) + move_to_dir(queue_dir, perms=0664) os.unlink(Upload.pkg.changes_file[:-8]+".dak") def _accept(): diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 8f9857f4..9301497b 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -1003,10 +1003,10 @@ def lookup_uid_from_fingerprint(fpr): """ Return the uid,name,isdm for a given gpg fingerprint - @ptype fpr: string + @type fpr: string @param fpr: a 40 byte GPG fingerprint - @return (uid, name, isdm) + @return: (uid, name, isdm) """ cursor = DBConn().cursor() cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr)) @@ -1014,7 +1014,7 @@ def lookup_uid_from_fingerprint(fpr): if qs: return qs else: - return (None, None, None) + return (None, None, False) def check_signed_by_key(): """Ensure the .changes is signed by an authorized uploader.""" @@ -1024,17 +1024,22 @@ def check_signed_by_key(): uid_name = "" # match claimed name with actual name: - if uid == None: + if uid is None: + # This is fundamentally broken but need us to refactor how we get + # the UIDs/Fingerprints in order for us to fix it properly uid, uid_email = changes["fingerprint"], uid may_nmu, may_sponsor = 1, 1 # XXX by default new dds don't have a fingerprint/uid in the db atm, # and can't get one in there if we don't allow nmu/sponsorship - elif is_dm is "t": - uid_email = uid - may_nmu, may_sponsor = 0, 0 - else: + elif is_dm is False: + # If is_dm is False, we allow full upload rights uid_email = "%s@debian.org" % (uid) may_nmu, may_sponsor = 1, 1 + else: + # Assume limited upload rights unless we've discovered otherwise + uid_email = uid + may_nmu, may_sponsor = 0, 0 + if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]: sponsored = 0 @@ -1053,6 +1058,7 @@ def check_signed_by_key(): if sponsored and not may_sponsor: reject("%s is not authorised to sponsor uploads" % (uid)) + cursor = DBConn().cursor() if not sponsored and not may_nmu: source_ids = [] cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes ) diff --git a/daklib/binary.py b/daklib/binary.py index 88d78761..bd7f1cc1 100755 --- a/daklib/binary.py +++ b/daklib/binary.py @@ -56,10 +56,10 @@ import utils class Binary(object): def __init__(self, filename, reject=None): """ - @ptype filename: string + @type filename: string @param filename: path of a .deb - @ptype reject: function + @type reject: function @param reject: a function to log reject messages to """ self.filename = filename @@ -165,12 +165,12 @@ class Binary(object): the hopefully near future, it should also include gathering info from the control file. - @ptype bootstrap_id: int + @type bootstrap_id: int @param bootstrap_id: the id of the binary these packages should be associated or zero meaning we are not bootstrapping so insert into a temporary table - @return True if the deb is valid and contents were imported + @return: True if the deb is valid and contents were imported """ result = False rejected = not self.valid_deb(relaxed) @@ -212,12 +212,11 @@ class Binary(object): the hopefully near future, it should also include gathering info from the control file. - @ptype bootstrap_id: int - @param bootstrap_id: the id of the binary these packages - should be associated or zero meaning we are not bootstrapping - so insert into a temporary table + @type package: string + @param package: the name of the package to be checked - @return True if the deb is valid and contents were imported + @rtype: boolean + @return: True if the deb is valid and contents were imported """ rejected = not self.valid_deb(True) self.__unpack() diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 21bcbb17..b2913722 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -203,8 +203,8 @@ class DBConn(Singleton): Returns database id for given override C{type}. Results are kept in a cache during runtime to minimize database queries. - @type type: string - @param type: The name of the override type + @type override_type: string + @param override_type: The name of the override type @rtype: int @return: the database id for the given override type @@ -500,10 +500,10 @@ class DBConn(Singleton): @type bin_id: int @param bin_id: the id of the binary - @type fullpath: string - @param fullpath: the path of the file being associated with the binary + @type fullpaths: list + @param fullpaths: the list of paths of the file being associated with the binary - @return True upon success + @return: True upon success """ c = self.db_con.cursor() @@ -541,7 +541,7 @@ class DBConn(Singleton): @type fullpaths: list @param fullpaths: the list of paths of the file being associated with the binary - @return True upon success + @return: True upon success """ c = self.db_con.cursor() diff --git a/daklib/utils.py b/daklib/utils.py index c1be6b90..651e13ae 100755 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -260,6 +260,7 @@ def create_hash(where, files, hashname, hashfunc): file_handle = open_file(f) except CantOpenError: rejmsg.append("Could not open file %s for checksumming" % (f)) + continue files[f][hash_key(hashname)] = hashfunc(file_handle) diff --git a/docs/README.quotes b/docs/README.quotes index 55c42be0..973cde3f 100644 --- a/docs/README.quotes +++ b/docs/README.quotes @@ -345,10 +345,6 @@ Canadians: This is a lighthouse. Your call. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - oh no, Ganneff has just corrected my english - -%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% - I often wonder if we should use NSA bot or something instead and get dinstall to send emails telling us about its progress :-) dinstall: I'm processing openoffice dinstall: I'm choking, please help me diff --git a/docs/TODO b/docs/TODO index f142c66b..3a99ad94 100644 --- a/docs/TODO +++ b/docs/TODO @@ -3,6 +3,7 @@ Various ------- + * Implement autosigning, see ftpmaster_autosigning on ftp-master host in text/. * Check TODO.old and move still-valid/useful entries over here. diff --git a/scripts/debian/expire_dumps b/scripts/debian/expire_dumps index 9fa6adeb..2907ba28 100755 --- a/scripts/debian/expire_dumps +++ b/scripts/debian/expire_dumps @@ -40,6 +40,7 @@ RULES = [ # keep 14 days, all each day # keep 31 days, 1 each 7th day # keep 365 days, 1 each 31th day + # keep 3650 days, 1 each 365th day ] TODAY = datetime.today()