FileList "/srv/ftp.debian.org/database/dists/unstable_$(SECTION)_binary-$(ARCH).list";
SourceFileList "/srv/ftp.debian.org/database/dists/unstable_$(SECTION)_source.list";
Sections "main contrib non-free";
- Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+ Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 source";
BinOverride "override.sid.$(SECTION)";
ExtraOverride "override.sid.extra.$(SECTION)";
SrcOverride "override.sid.$(SECTION).src";
FileList "/srv/ftp.debian.org/database/dists/experimental_$(SECTION)_binary-$(ARCH).list";
SourceFileList "/srv/ftp.debian.org/database/dists/experimental_$(SECTION)_source.list";
Sections "main contrib non-free";
- Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc source";
+ Architectures "alpha amd64 armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 source";
BinOverride "override.sid.$(SECTION)";
SrcOverride "override.sid.$(SECTION).src";
};
-
-tree "dists/etch-m68k"
-{
- FakeDI "dists/unstable";
- FileList "/srv/ftp.debian.org/database/dists/etch-m68k_$(SECTION)_binary-$(ARCH).list";
- SourceFileList "/srv/ftp.debian.org/database/dists/etch-m68k_$(SECTION)_source.list";
- Sections "main contrib non-free";
- Architectures "m68k source";
- BinOverride "override.etch.$(SECTION)";
- ExtraOverride "override.etch.extra.$(SECTION)";
- SrcOverride "override.etch.$(SECTION).src";
-};
-
-tree "dists/etch-m68k/main"
-{
- FileList "/srv/ftp.debian.org/database/dists/etch-m68k_main_$(SECTION)_binary-$(ARCH).list";
- Sections "debian-installer";
- Architectures "m68k";
- BinOverride "override.etch.main.$(SECTION)";
- SrcOverride "override.etch.main.src";
- BinCacheDB "packages-debian-installer-$(ARCH).db";
- Packages::Extensions ".udeb";
- Contents "$(DIST)/../Contents-udeb";
-};
-
-tree "dists/etch-m68k/non-free"
-{
- FileList "/srv/ftp.debian.org/database/dists/etch-m68k_non-free_$(SECTION)_binary-$(ARCH).list";
- Sections "debian-installer";
- Architectures "m68k";
- BinOverride "override.etch.main.$(SECTION)";
- SrcOverride "override.etch.main.src";
- BinCacheDB "packages-debian-installer-$(ARCH).db";
- Packages::Extensions ".udeb";
- Contents "$(DIST)/../Contents-udeb-nf";
-};
log "$*"
fi
}
+
+# used by cron.dinstall *and* cron.unchecked.
+function make_buildd_dir () {
+ cd $configdir
+ apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd
+
+ cd ${incoming}
+ rm -f buildd/Release*
+ apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd incoming" -o APT::FTPArchive::Release::Architectures="${archs}" release buildd > Release
+ gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o Release.gpg Release
+ mv Release* buildd/.
+
+ cd ${incoming}
+ mkdir -p tree/${STAMP}
+ cp -al ${incoming}/buildd/. tree/${STAMP}/
+ ln -sfT tree/${STAMP} ${incoming}/builddweb
+ find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+}
# Create the postgres dump files
function pgdump_pre() {
log "Creating pre-daily-cron-job backup of projectb database..."
- pg_dump projectb > $base/backup/dump_$(date +%Y.%m.%d-%H:%M:%S)
+ pg_dump projectb > $base/backup/dump_pre_$(date +%Y.%m.%d-%H:%M:%S)
}
function pgdump_post() {
apt-ftparchive generate apt.conf.buildd
}
+function buildd_dir() {
+ # Rebuilt the buildd dir to avoid long times of 403
+ log "Regenerating the buildd incoming dir"
+ STAMP=$(date "+%Y%m%d%H%M")
+ make_buildd_dir
+}
+
function scripts() {
log "Running various scripts from $scriptsdir"
cd $scriptsdir
ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_projectb dak@merkel.debian.org sleep 1
}
+function merkel3() {
+ # Push dak@merkel to tell it to sync the dd accessible parts. Returns immediately, the sync runs detached
+ log "Trigger merkels dd accessible parts sync"
+ ssh -2 -o BatchMode=yes -o SetupTimeOut=30 -o ConnectTimeout=30 -i ~/.ssh/push_merkel_ddaccess dak@merkel.debian.org sleep 1
+}
+
function runparts() {
log "Using run-parts to run scripts in $base/scripts/distmnt"
run-parts --report $base/scripts/distmnt
cd $configdir
$scriptsdir/update-ftpstats $base/log/* > $base/misc/ftpstats.data
R --slave --vanilla < $base/misc/ftpstats.R
+ dak stats arch-space > $webdir/arch-space
+ dak stats pkg-nums > $webdir/pkg-nums
}
function aptftpcleanup() {
function compress() {
log "Compress old psql backups"
cd $base/backup/
- find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mtime +1 |
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_pre_*' -mtime +2 -print0 | xargs -0 --no-run-if-empty rm
+
+ find -maxdepth 1 -mindepth 1 -type f -name 'dump_*' \! -name '*.bz2' \! -name '*.gz' -mmin 720 |
+ while read dumpname; do
+ echo "Compressing $dumpname"
+ bzip2 -9v "$dumpname"
+ done
+ find -maxdepth 1 -mindepth 1 -type f -name "dumpall_*" \! -name '*.bz2' \! -name '*.gz' -mmin 720 |
while read dumpname; do
echo "Compressing $dumpname"
bzip2 -9v "$dumpname"
done
+ finddup -l -d $base/backup
}
function logstats() {
)
stage $GO
+GO=(
+ FUNC="buildd_dir"
+ TIME="buildd_dir"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="cruft"
TIME="cruft"
)
stage $GO
-GO=(
- FUNC="pgdakdev"
- TIME="dak-dev db"
- ARGS=""
- ERR="false"
-)
-stage $GO
-
GO=(
FUNC="expire"
TIME="expire_dumps"
ARGS=""
ERR="false"
)
+stage $GO
rm -f ${LOCK_BRITNEY}
+GO=(
+ FUNC="pgdakdev"
+ TIME="dak-dev db"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="aptftpcleanup"
TIME="apt-ftparchive cleanup"
)
stage $GO
+GO=(
+ FUNC="merkel3"
+ TIME="merkel ddaccessible sync"
+ ARGS=""
+ ERR="false"
+)
+stage $GO
+
GO=(
FUNC="compress"
TIME="compress"
export SCRIPTVARS=/srv/ftp.debian.org/dak/config/debian/vars
. $SCRIPTVARS
+# common functions are "outsourced"
+. "${configdir}/common"
+
LOCKDAILY=""
LOCKFILE="$lockdir/unchecked.lock"
NOTICE="$lockdir/daily.lock"
cat override.sid.$i.src >> override.sid.all3.src
fi
done
- cd $configdir
- apt-ftparchive -qq -o APT::FTPArchive::Contents=off generate apt.conf.buildd
-
- cd ${incoming}
- rm -f buildd/Release*
- apt-ftparchive -qq -o APT::FTPArchive::Release::Origin="Debian" -o APT::FTPArchive::Release::Label="Debian" -o APT::FTPArchive::Release::Description="buildd incoming" -o APT::FTPArchive::Release::Architectures="${archs}" release buildd > Release
- gpg --secret-keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/secring.gpg --keyring /srv/ftp.debian.org/s3kr1t/dot-gnupg/pubring.gpg --no-options --batch --no-tty --armour --default-key 6070D3A1 --detach-sign -o Release.gpg Release
- mv Release* buildd/.
-
- cd ${incoming}
- mkdir -p tree/${STAMP}
- cp -al ${incoming}/buildd/. tree/${STAMP}/
- ln -sfT tree/${STAMP} ${incoming}/builddweb
- find ./tree -mindepth 1 -maxdepth 1 -not -name "${STAMP}" -type d -print0 | xargs --no-run-if-empty -0 rm -rf
+ make_buildd_dir
. $configdir/cron.buildd
fi
"map testing-security testing-proposed-updates";
"map-unreleased testing unstable";
"map-unreleased testing-proposed-updates unstable";
+ "reject etch-m68k";
};
AutomaticByHandPackages {
s390 "IBM S/390";
sh "Hitatchi SuperH";
sparc "Sun SPARC/UltraSPARC";
+ kfreebsd-i386 "GNU/kFreeBSD i386";
+ kfreebsd-amd64 "GNU/kFreeBSD amd64";
};
Archive
ftpdir=$base/ftp
webdir=$base/web
indices=$ftpdir/indices
-archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc"
+archs="alpha amd64 arm armel hppa hurd-i386 i386 ia64 mips mipsel powerpc s390 sparc kfreebsd-i386 kfreebsd-amd64 "
scriptdir=$base/scripts
masterdir=$base/dak/
before = time.time()
sys.stdout.write("[Deleting from source table... ")
projectB.query("DELETE FROM dsc_files WHERE EXISTS (SELECT 1 FROM source s, files f, dsc_files df WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = df.source AND df.id = dsc_files.id)" % (delete_date))
- projectB.query("DELETE FROM src_uploaders WHERE EXISTS (SELECT 1 FROM source s, files f WHERE f.last_used <= '%s' AND s.file = f.id AND s.id = src_uploaders.source)" % (delete_date))
projectB.query("DELETE FROM source WHERE EXISTS (SELECT 1 FROM files WHERE source.file = files.id AND files.last_used <= '%s')" % (delete_date))
sys.stdout.write("done. (%d seconds)]\n" % (int(time.time()-before)))
f.close()
else:
- utils.warn("No wanna-build dump file for architecture %s", architecture)
+ utils.warn("No wanna-build dump file for architecture %s" % architecture)
return ret
################################################################################
for component in check_components:
architectures = filter(utils.real_arch, database.get_suite_architectures(suite))
for architecture in architectures:
+ if component == 'main/debian-installer' and re.match("kfreebsd", architecture):
+ continue
filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (Cnf["Dir::Root"], suite, component, architecture)
# apt_pkg.ParseTagFile needs a real file handle
(fd, temp_filename) = utils.temp_filename()
+"""
+Database update scripts for usage with B{dak update-db}
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@license: GNU General Public License version 2 or later
+
+Update scripts have to C{import psycopg2} and
+C{from daklib.dak_exceptions import DBUpdateError}.
+
+There has to be B{at least} the function C{do_update(self)} to be
+defined. It should take all neccessary steps to update the
+database. If the update fails the changes have to be rolled back and the
+C{DBUpdateError} exception raised to properly halt the execution of any
+other update.
+
+Example::
+ def do_update(self):
+ print "Doing something"
+
+ try:
+ c = self.db.cursor()
+ c.execute("SOME SQL STATEMENT")
+ self.db.commit()
+
+ except psycopg2.ProgrammingError, msg:
+ self.db.rollback()
+ raise DBUpdateError, "Unable to do whatever, rollback issued. Error message : %s" % (str(msg))
+
+This function can do whatever it wants and use everything from dak and
+daklib.
+
+"""
#!/usr/bin/env python
-""" Database Update Script - Saner DM db schema """
-# Copyright (C) 2008 Michael Casadevall <mcasadevall@debian.org>
+"""
+Saner DM db schema
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Mike O'Connor <stew@debian.org>
+Add constraints to src_uploaders
-Debian Archive Kit Database Update Script 8
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2009 Mark Hymers <mhy@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
################################################################################
+# <mhy> oh no, Ganneff has just corrected my english
################################################################################
################################################################################
def do_update(self):
- print "add package_type enum"
+ print "Add constraints to src_uploaders"
Cnf = get_conf()
try:
c = self.db.cursor()
-
- c.execute("CREATE TYPE package_type AS ENUM('deb','udeb','tdeb', 'dsc')")
- c.execute("ALTER TABLE binaries RENAME COLUMN type to type_text" );
- c.execute("ALTER TABLE binaries ADD COLUMN type package_type" );
- c.execute("UPDATE binaries set type=type_text::package_type" );
- c.execute("ALTER TABLE binaries DROP COLUMN type_text" );
- c.execute("CREATE INDEX binary_type_ids on binaries(type)")
-
+ # Deal with out-of-date src_uploaders entries
+ c.execute("DELETE FROM src_uploaders WHERE source NOT IN (SELECT id FROM source)")
+ c.execute("DELETE FROM src_uploaders WHERE maintainer NOT IN (SELECT id FROM maintainer)")
+ # Add constraints
+ c.execute("ALTER TABLE src_uploaders ADD CONSTRAINT src_uploaders_maintainer FOREIGN KEY (maintainer) REFERENCES maintainer(id) ON DELETE CASCADE")
+ c.execute("ALTER TABLE src_uploaders ADD CONSTRAINT src_uploaders_source FOREIGN KEY (source) REFERENCES source(id) ON DELETE CASCADE")
+ c.execute("UPDATE config SET value = '10' WHERE name = 'db_revision'")
self.db.commit()
except psycopg2.ProgrammingError, msg:
self.db.rollback()
- raise DBUpdateError, "Unable to apply binary type enum update, rollback issued. Error message : %s" % (str(msg))
+ raise DBUpdateError, "Unable to apply suite config updates, rollback issued. Error message : %s" % (str(msg))
#!/usr/bin/env python
# coding=utf8
-""" Database Update Script - debversion """
-# Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-# Copyright © 2008 Roger Leigh <rleigh@debian.org>
+"""
+debversion
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2008 Roger Leigh <rleigh@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#!/usr/bin/env python
-""" Database Update Script - Remove unused versioncmp """
-# Copyright (C) 2008 Michael Casadevall <mcasadevall@debian.org>
-# Copyright (C) 2009 Joerg Jaspert <joerg@debian.org>
+"""
+Remove unused versioncmp
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
#!/usr/bin/env python
"""
-Database Update Script - Get suite_architectures table use sane values
+Get suite_architectures table use sane values
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
#!/usr/bin/env python
"""
-Database Update Script - Fix bin_assoc_by_arch view
+Fix bin_assoc_by_arch view
@contact: Debian FTP Master <ftpmaster@debian.org>
@copyright: 2009 Joerg Jaspert <joerg@debian.org>
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2008 Roger Leigh <rleigh@debian.org>
+Adding content fields
-Debian Archive Kit Database Update Script 2
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2008 Roger Leigh <rleigh@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Joerg Jaspert <joerg@debian.org>
+Moving suite config into DB
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
-Debian Archive Kit Database Update Script 7
"""
# This program is free software; you can redistribute it and/or modify
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Joerg Jaspert <joerg@debian.org>
+More suite config into the DB
-Debian Archive Kit Database Update Script 8
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
# coding=utf8
"""
-Debian Archive Kit Database Update Script
-Copyright © 2008 Michael Casadevall <mcasadevall@debian.org>
-Copyright © 2009 Mike O'Connor <stew@debian.org>
+Pending contents disinguished by arch
-Debian Archive Kit Database Update Script 8
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2008 Michael Casadevall <mcasadevall@debian.org>
+@copyright: 2009 Mike O'Connor <stew@debian.org>
+@license: GNU General Public License version 2 or later
"""
# This program is free software; you can redistribute it and/or modify
#!/usr/bin/env python
-""" Script to automate some parts of checking NEW packages """
-# Copyright (C) 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
+"""
+Script to automate some parts of checking NEW packages
+
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2000, 2001, 2002, 2003, 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
################################################################################
-import errno, os, pg, re, sys, md5
+import errno
+import os
+import pg
+import re
+import sys
+import md5
import apt_pkg, apt_inst
from daklib import database
from daklib import utils
database.init(Cnf, projectB)
printed_copyrights = {}
+package_relations = {} #: Store relations of packages for later output
# default is to not output html.
use_html = 0
comma_count += 1
return result
-def output_deb_info(suite, filename):
+def output_package_relations ():
+ """
+ Output the package relations, if there is more than one package checked in this run.
+ """
+
+ if len(package_relations) < 2:
+ # Only list something if we have more than one binary to compare
+ return
+
+ to_print = ""
+ for package in package_relations:
+ for relation in package_relations[package]:
+ to_print += "%-15s: (%s) %s\n" % (package, relation, package_relations[package][relation])
+
+ package_relations.clear()
+ foldable_output("Package relations", "relations", to_print)
+
+def output_deb_info(suite, filename, packagename):
(control, control_keys, section, depends, recommends, arch, maintainer) = read_control(filename)
if control == '':
return formatted_text("no control info")
to_print = ""
+ if not package_relations.has_key(packagename):
+ package_relations[packagename] = {}
for key in control_keys :
if key == 'Depends':
field_value = create_depends_string(suite, depends)
+ package_relations[packagename][key] = field_value
elif key == 'Recommends':
field_value = create_depends_string(suite, recommends)
+ package_relations[packagename][key] = field_value
elif key == 'Section':
field_value = section
elif key == 'Architecture':
foldable_output("control file for %s" % (filename), "binary-%s-control"%packagename,
- output_deb_info(suite, deb_filename), norow=True)
+ output_deb_info(suite, deb_filename, packagename), norow=True)
if is_a_udeb:
foldable_output("skipping lintian check for udeb", "binary-%s-lintian"%packagename,
else:
utils.fubar("Unrecognised file type: '%s'." % (f))
finally:
+ output_package_relations()
if not Options["Html-Output"]:
# Reset stdout here so future less invocations aren't FUBAR
less_fd.close()
def print_sha256_files (tree, files):
print_md5sha_files (tree, files, apt_pkg.sha256sum)
+def write_release_file (relpath, suite, component, origin, label, arch, version="", suite_suffix="", notautomatic=""):
+ try:
+ if os.access(relpath, os.F_OK):
+ if os.stat(relpath).st_nlink > 1:
+ os.unlink(relpath)
+ release = open(relpath, "w")
+ except IOError:
+ utils.fubar("Couldn't write to " + relpath)
+
+ release.write("Archive: %s\n" % (suite))
+ if version != "":
+ release.write("Version: %s\n" % (version))
+
+ if suite_suffix:
+ release.write("Component: %s/%s\n" % (suite_suffix,component))
+ else:
+ release.write("Component: %s\n" % (component))
+
+ release.write("Origin: %s\n" % (origin))
+ release.write("Label: %s\n" % (label))
+ if notautomatic != "":
+ release.write("NotAutomatic: %s\n" % (notautomatic))
+ release.write("Architecture: %s\n" % (arch))
+ release.close()
+
################################################################################
def main ():
else:
rel = "%s/binary-%s/Release" % (sec, arch)
relpath = Cnf["Dir::Root"]+tree+"/"+rel
-
- try:
- if os.access(relpath, os.F_OK):
- if os.stat(relpath).st_nlink > 1:
- os.unlink(relpath)
- release = open(relpath, "w")
- #release = open(longsuite.replace("/","_") + "_" + arch + "_" + sec + "_Release", "w")
- except IOError:
- utils.fubar("Couldn't write to " + relpath)
-
- release.write("Archive: %s\n" % (suite))
- if version != "":
- release.write("Version: %s\n" % (version))
- if suite_suffix:
- release.write("Component: %s/%s\n" % (suite_suffix,sec))
- else:
- release.write("Component: %s\n" % (sec))
- release.write("Origin: %s\n" % (origin))
- release.write("Label: %s\n" % (label))
- if notautomatic != "":
- release.write("NotAutomatic: %s\n" % (notautomatic))
- release.write("Architecture: %s\n" % (arch))
- release.close()
+ write_release_file(relpath, suite, sec, origin, label, arch, version, suite_suffix, notautomatic)
files.append(rel)
if AptCnf.has_key("tree::%s/main" % (tree)):
for arch in AptCnf["tree::%s/%s::Architectures" % (tree,dis)].split():
if arch != "source": # always true
+ rel = "%s/%s/binary-%s/Release" % (dis, sec, arch)
+ relpath = Cnf["Dir::Root"]+tree+"/"+rel
+ write_release_file(relpath, suite, dis, origin, label, arch, version, suite_suffix, notautomatic)
+ files.append(rel)
for cfile in compressnames("tree::%s/%s" % (tree,dis),
"Packages",
"%s/%s/binary-%s/Packages" % (dis, sec, arch)):
#!/usr/bin/env python
-""" Installs Debian packages from queue/accepted into the pool """
-# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+"""
+Installs Debian packages from queue/accepted into the pool
+@contact: Debian FTP Master <ftpmaster@debian.org>
+@copyright: 2000, 2001, 2002, 2003, 2004, 2006 James Troup <james@nocrew.org>
+@copyright: 2009 Joerg Jaspert <joerg@debian.org>
+@license: GNU General Public License version 2 or later
+
+"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
###############################################################################
-def action ():
+def action (queue=""):
(summary, short_summary) = Upload.build_summaries()
(prompt, answer) = ("", "XXX")
if not installing_to_stable:
install()
else:
- stable_install(summary, short_summary)
+ stable_install(summary, short_summary, queue)
elif answer == 'Q':
sys.exit(0)
################################################################################
-def stable_install (summary, short_summary):
+def stable_install (summary, short_summary, fromsuite="proposed-updates"):
global install_count
- print "Installing to stable."
+ fromsuite = fromsuite.lower()
+ tosuite = "Stable"
+ if fromsuite == "oldstable-proposed-updates":
+ tosuite = "OldStable"
+
+ print "Installing from %s to %s." % (fromsuite, tosuite)
# Begin a transaction; if we bomb out anywhere between here and
# the COMMIT WORK below, the DB won't be changed.
if not ql:
utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s) in source table." % (package, version))
source_id = ql[0][0]
- suite_id = database.get_suite_id('proposed-updates')
+ suite_id = database.get_suite_id(fromsuite)
projectB.query("DELETE FROM src_associations WHERE suite = '%s' AND source = '%s'" % (suite_id, source_id))
- suite_id = database.get_suite_id('stable')
+ suite_id = database.get_suite_id(tosuite.lower())
projectB.query("INSERT INTO src_associations (suite, source) VALUES ('%s', '%s')" % (suite_id, source_id))
# Add the binaries to stable (and remove it/them from proposed-updates)
utils.fubar("[INTERNAL ERROR] couldn't find '%s' (%s for %s architecture) in binaries table." % (package, version, architecture))
binary_id = ql[0][0]
- suite_id = database.get_suite_id('proposed-updates')
+ suite_id = database.get_suite_id(fromsuite)
projectB.query("DELETE FROM bin_associations WHERE suite = '%s' AND bin = '%s'" % (suite_id, binary_id))
- suite_id = database.get_suite_id('stable')
+ suite_id = database.get_suite_id(tosuite.lower())
projectB.query("INSERT INTO bin_associations (suite, bin) VALUES ('%s', '%s')" % (suite_id, binary_id))
projectB.query("COMMIT WORK")
utils.move (pkg.changes_file, Cnf["Dir::Morgue"] + '/process-accepted/' + os.path.basename(pkg.changes_file))
## Update the Stable ChangeLog file
- new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + ".ChangeLog"
- changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::Stable::ChangeLogBase"] + "ChangeLog"
+ new_changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + ".ChangeLog"
+ changelog_filename = Cnf["Dir::Root"] + Cnf["Suite::%s::ChangeLogBase" % (tosuite)] + "ChangeLog"
if os.path.exists(new_changelog_filename):
os.unlink (new_changelog_filename)
new_changelog = utils.open_file(new_changelog_filename, 'w')
for newfile in files.keys():
if files[newfile]["type"] == "deb":
- new_changelog.write("stable/%s/binary-%s/%s\n" % (files[newfile]["component"], files[newfile]["architecture"], newfile))
+ new_changelog.write("%s/%s/binary-%s/%s\n" % (tosuite.lower(), files[newfile]["component"], files[newfile]["architecture"], newfile))
elif re_issource.match(newfile):
- new_changelog.write("stable/%s/source/%s\n" % (files[newfile]["component"], newfile))
+ new_changelog.write("%s/%s/source/%s\n" % (tosuite.lower(), files[newfile]["component"], newfile))
else:
new_changelog.write("%s\n" % (newfile))
chop_changes = re_fdnic.sub("\n", changes["changes"])
install_count += 1
if not Options["No-Mail"] and changes["architecture"].has_key("source"):
- Subst["__SUITE__"] = " into stable"
+ Subst["__SUITE__"] = " into %s" % (tosuite)
Subst["__SUMMARY__"] = summary
mail_message = utils.TemplateSubst(Subst,Cnf["Dir::Templates"]+"/process-accepted.install")
utils.send_mail(mail_message)
Upload.announce(short_summary, 1)
# Finally remove the .dak file
- dot_dak_file = os.path.join(Cnf["Suite::Proposed-Updates::CopyDotDak"], os.path.basename(Upload.pkg.changes_file[:-8]+".dak"))
+ dot_dak_file = os.path.join(Cnf["Suite::%s::CopyDotDak" % (fromsuite)], os.path.basename(Upload.pkg.changes_file[:-8]+".dak"))
os.unlink(dot_dak_file)
################################################################################
-def process_it (changes_file):
+def process_it (changes_file, queue=""):
global reject_message
reject_message = ""
if installing_to_stable:
old = Upload.pkg.changes_file
Upload.pkg.changes_file = os.path.basename(old)
- os.chdir(Cnf["Suite::Proposed-Updates::CopyDotDak"])
+ os.chdir(Cnf["Suite::%s::CopyDotDak" % (queue)])
Upload.init_vars()
Upload.update_vars()
Upload.pkg.changes_file = old
check()
- action()
+ action(queue)
# Restore CWD
os.chdir(pkg.directory)
utils.fubar("Archive maintenance in progress. Try again later.")
# If running from within proposed-updates; assume an install to stable
- if os.getcwd().find('proposed-updates') != -1:
+ queue = ""
+ if os.getenv('PWD').find('oldstable-proposed-updates') != -1:
+ queue = "Oldstable-Proposed-Updates"
+ installing_to_stable = 1
+ elif os.getenv('PWD').find('proposed-updates') != -1:
+ queue = "Proposed-Updates"
installing_to_stable = 1
# Obtain lock if not in no-action mode and initialize the log
# Process the changes files
for changes_file in changes_files:
print "\n" + changes_file
- process_it (changes_file)
+ process_it (changes_file, queue)
if install_count:
sets = "set"
elif ftype == "dsc":
examine_package.check_dsc(changes['distribution'], f)
finally:
+ examine_package.output_package_relations()
sys.stdout = stdout_fd
except IOError, e:
if e.errno == errno.EPIPE:
return
Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
Upload.dump_vars(queue_dir)
- move_to_dir(queue_dir)
+ move_to_dir(queue_dir, perms=0664)
os.unlink(Upload.pkg.changes_file[:-8]+".dak")
def _accept():
"""
Return the uid,name,isdm for a given gpg fingerprint
- @ptype fpr: string
+ @type fpr: string
@param fpr: a 40 byte GPG fingerprint
- @return (uid, name, isdm)
+ @return: (uid, name, isdm)
"""
cursor = DBConn().cursor()
cursor.execute( "SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
if qs:
return qs
else:
- return (None, None, None)
+ return (None, None, False)
def check_signed_by_key():
"""Ensure the .changes is signed by an authorized uploader."""
uid_name = ""
# match claimed name with actual name:
- if uid == None:
+ if uid is None:
+ # This is fundamentally broken but need us to refactor how we get
+ # the UIDs/Fingerprints in order for us to fix it properly
uid, uid_email = changes["fingerprint"], uid
may_nmu, may_sponsor = 1, 1
# XXX by default new dds don't have a fingerprint/uid in the db atm,
# and can't get one in there if we don't allow nmu/sponsorship
- elif is_dm is "t":
- uid_email = uid
- may_nmu, may_sponsor = 0, 0
- else:
+ elif is_dm is False:
+ # If is_dm is False, we allow full upload rights
uid_email = "%s@debian.org" % (uid)
may_nmu, may_sponsor = 1, 1
+ else:
+ # Assume limited upload rights unless we've discovered otherwise
+ uid_email = uid
+ may_nmu, may_sponsor = 0, 0
+
if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]:
sponsored = 0
if sponsored and not may_sponsor:
reject("%s is not authorised to sponsor uploads" % (uid))
+ cursor = DBConn().cursor()
if not sponsored and not may_nmu:
source_ids = []
cursor.execute( "SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = %(source)s AND s.dm_upload_allowed = 'yes'", changes )
class Binary(object):
def __init__(self, filename, reject=None):
"""
- @ptype filename: string
+ @type filename: string
@param filename: path of a .deb
- @ptype reject: function
+ @type reject: function
@param reject: a function to log reject messages to
"""
self.filename = filename
the hopefully near future, it should also include gathering info from the
control file.
- @ptype bootstrap_id: int
+ @type bootstrap_id: int
@param bootstrap_id: the id of the binary these packages
should be associated or zero meaning we are not bootstrapping
so insert into a temporary table
- @return True if the deb is valid and contents were imported
+ @return: True if the deb is valid and contents were imported
"""
result = False
rejected = not self.valid_deb(relaxed)
the hopefully near future, it should also include gathering info from the
control file.
- @ptype bootstrap_id: int
- @param bootstrap_id: the id of the binary these packages
- should be associated or zero meaning we are not bootstrapping
- so insert into a temporary table
+ @type package: string
+ @param package: the name of the package to be checked
- @return True if the deb is valid and contents were imported
+ @rtype: boolean
+ @return: True if the deb is valid and contents were imported
"""
rejected = not self.valid_deb(True)
self.__unpack()
Returns database id for given override C{type}.
Results are kept in a cache during runtime to minimize database queries.
- @type type: string
- @param type: The name of the override type
+ @type override_type: string
+ @param override_type: The name of the override type
@rtype: int
@return: the database id for the given override type
@type bin_id: int
@param bin_id: the id of the binary
- @type fullpath: string
- @param fullpath: the path of the file being associated with the binary
+ @type fullpaths: list
+ @param fullpaths: the list of paths of the file being associated with the binary
- @return True upon success
+ @return: True upon success
"""
c = self.db_con.cursor()
@type fullpaths: list
@param fullpaths: the list of paths of the file being associated with the binary
- @return True upon success
+ @return: True upon success
"""
c = self.db_con.cursor()
file_handle = open_file(f)
except CantOpenError:
rejmsg.append("Could not open file %s for checksumming" % (f))
+ continue
files[f][hash_key(hashname)] = hashfunc(file_handle)
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-<mhy> oh no, Ganneff has just corrected my english
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
<mhy> I often wonder if we should use NSA bot or something instead and get dinstall to send emails telling us about its progress :-)
<mhy> dinstall: I'm processing openoffice
<mhy> dinstall: I'm choking, please help me
Various
-------
+
* Implement autosigning, see ftpmaster_autosigning on ftp-master host in text/.
* Check TODO.old and move still-valid/useful entries over here.
# keep 14 days, all each day
# keep 31 days, 1 each 7th day
# keep 365 days, 1 each 31th day
+ # keep 3650 days, 1 each 365th day
]
TODAY = datetime.today()