From: Mark Hymers Date: Sat, 7 May 2011 12:35:26 +0000 (+0100) Subject: Merge remote branch 'ftpmaster/master' into multiproc X-Git-Url: https://git.decadent.org.uk/gitweb/?a=commitdiff_plain;h=99749f86a3ca945f6f9f322f5694255e47ee7809;hp=8d2c584e078a628f7c510b0b375bfb23fc027d5f;p=dak.git Merge remote branch 'ftpmaster/master' into multiproc Conflicts: dak/generate_packages_sources2.py Signed-off-by: Mark Hymers --- diff --git a/config/debian-security/cron.unchecked b/config/debian-security/cron.unchecked index d519b209..100e5878 100755 --- a/config/debian-security/cron.unchecked +++ b/config/debian-security/cron.unchecked @@ -17,8 +17,8 @@ doanything=false dopolicy=false # So first we should go and see if any process-policy action is done -dak process-policy embargo | mail -a "X-Debian: DAK" -e -s "Automatically accepted from embargoed" team@security.debian.org -- -F "Debian FTP Masters" -f ftonaster@ftp-master.debian.org -dak process-policy disembargo | mail -a "X-Debian: DAK" -e -s "Automatically accepted from unembargoed" team@security.debian.org -- -F "Debian FTP Masters" -f ftonaster@ftp-master.debian.org +dak process-policy embargo | mail -a "X-Debian: DAK" -e -s "Automatically accepted from embargoed" team@security.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org +dak process-policy disembargo | mail -a "X-Debian: DAK" -e -s "Automatically accepted from unembargoed" team@security.debian.org -- -F "Debian FTP Masters" -f ftpmaster@ftp-master.debian.org # Now, if this really did anything, we can then sync it over. Files # in newstage mean they are (late) accepts of security stuff, need diff --git a/dak/generate_index_diffs.py b/dak/generate_index_diffs.py index 5cac5ccd..83d51824 100755 --- a/dak/generate_index_diffs.py +++ b/dak/generate_index_diffs.py @@ -225,7 +225,7 @@ def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14): return if oldstat[1:3] == origstat[1:3]: - print "%s: hardlink unbroken, assuming unchanged" % (origfile) + #print "%s: hardlink unbroken, assuming unchanged" % (origfile) return oldf = smartopen(oldfile) @@ -234,9 +234,9 @@ def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14): # should probably early exit if either of these checks fail # alternatively (optionally?) could just trim the patch history - if upd.filesizesha1: - if upd.filesizesha1 != oldsizesha1: - print "info: old file " + oldfile + " changed! %s %s => %s %s" % (upd.filesizesha1 + oldsizesha1) + #if upd.filesizesha1: + # if upd.filesizesha1 != oldsizesha1: + # print "info: old file " + oldfile + " changed! %s %s => %s %s" % (upd.filesizesha1 + oldsizesha1) if Options.has_key("CanonicalPath"): upd.can_path=Options["CanonicalPath"] @@ -249,7 +249,7 @@ def genchanges(Options, outdir, oldfile, origfile, maxdiffs = 14): if newsizesha1 == oldsizesha1: os.unlink(newfile) oldf.close() - print "%s: unchanged" % (origfile) + #print "%s: unchanged" % (origfile) else: if not os.path.isdir(outdir): os.mkdir(outdir) diff --git a/dak/generate_packages_sources2.py b/dak/generate_packages_sources2.py index b157fcbe..8908e3cf 100755 --- a/dak/generate_packages_sources2.py +++ b/dak/generate_packages_sources2.py @@ -194,7 +194,7 @@ WHERE AND o.type = :type_id AND o.suite = :overridesuite AND o.component = :component -ORDER BY tmp.package, tmp.version +ORDER BY tmp.source, tmp.package, tmp.version """ def generate_packages(suite_id, component_id, architecture_id, type_name): @@ -282,6 +282,8 @@ def main(): for c in component_ids: pool.apply_async(generate_sources, [s.suite_id, c], callback=parse_results) for a in s.architectures: + if a == 'source': + continue pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'deb'], callback=parse_results) pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'udeb'], callback=parse_results) diff --git a/daklib/dbconn.py b/daklib/dbconn.py index 6cddd35f..91ae848e 100755 --- a/daklib/dbconn.py +++ b/daklib/dbconn.py @@ -33,6 +33,7 @@ ################################################################################ +import apt_pkg import os from os.path import normpath import re @@ -73,7 +74,7 @@ from sqlalchemy.orm.exc import NoResultFound # in the database from config import Config from textutils import fix_maintainer -from dak_exceptions import DBUpdateError, NoSourceFieldError +from dak_exceptions import DBUpdateError, NoSourceFieldError, FileExistsError # suppress some deprecation warnings in squeeze related to sqlalchemy import warnings @@ -890,6 +891,9 @@ class BuildQueue(object): else: os.symlink(targetpath, queuepath) qf.fileid = poolfile.file_id + except FileExistsError: + if not poolfile.identical_to(queuepath): + raise except OSError: return None @@ -948,6 +952,9 @@ class BuildQueue(object): # Always copy files from policy queues as they might move around. import utils utils.copy(source, target) + except FileExistsError: + if not policyqueuefile.identical_to(target): + raise except OSError: return None @@ -1043,6 +1050,24 @@ class ChangePendingFile(object): def __repr__(self): return '' % self.change_pending_file_id + def identical_to(self, filename): + """ + compare size and hash with the given file + + @rtype: bool + @return: true if the given file has the same size and hash as this object; false otherwise + """ + st = os.stat(filename) + if self.size != st.st_size: + return False + + f = open(filename, "r") + sha256sum = apt_pkg.sha256sum(f) + if sha256sum != self.sha256sum: + return False + + return True + __all__.append('ChangePendingFile') ################################################################################ @@ -1392,6 +1417,24 @@ class PoolFile(ORMObject): def not_null_constraints(self): return ['filename', 'md5sum', 'location'] + def identical_to(self, filename): + """ + compare size and hash with the given file + + @rtype: bool + @return: true if the given file has the same size and hash as this object; false otherwise + """ + st = os.stat(filename) + if self.filesize != st.st_size: + return False + + f = open(filename, "r") + sha256sum = apt_pkg.sha256sum(f) + if sha256sum != self.sha256sum: + return False + + return True + __all__.append('PoolFile') @session_wrapper