X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Fqueue.py;h=7e7f468fea248d5b402a8c492c7d1c8c4017bf1a;hb=2d222c33d1b6453bf117a0c8cc2f791fabe4e3a4;hp=d70e60b190d35195434ca031e50435f10c8c0ff7;hpb=28f3e6f0659504214d482d682f4b2b4791872cc6;p=dak.git diff --git a/daklib/queue.py b/daklib/queue.py index d70e60b1..7e7f468f 100755 --- a/daklib/queue.py +++ b/daklib/queue.py @@ -26,7 +26,6 @@ Queue utility functions for dak ############################################################################### -import cPickle import errno import os import pg @@ -39,6 +38,7 @@ import utils import commands import shutil import textwrap +import tempfile from types import * import yaml @@ -72,7 +72,7 @@ def get_type(f, session): """ # Determine the type if f.has_key("dbtype"): - file_type = file["dbtype"] + file_type = f["dbtype"] elif re_source_ext.match(f["type"]): file_type = "dsc" else: @@ -452,9 +452,8 @@ class Upload(object): # Check there isn't already a changes file of the same name in one # of the queue directories. base_filename = os.path.basename(filename) - for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]: - if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)): - self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d)) + if get_knownchange(base_filename): + self.rejects.append("%s: a file with this name already exists." % (base_filename)) # Check the .changes is non-empty if not self.pkg.files: @@ -999,11 +998,6 @@ class Upload(object): for field_name in [ "build-depends", "build-depends-indep" ]: field = self.pkg.dsc.get(field_name) if field: - # Check for broken dpkg-dev lossage... - if field.startswith("ARRAY"): - self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \ - (dsc_filename, field_name.title())) - # Have apt try to parse them... try: apt_pkg.ParseSrcDepends(field) @@ -1055,8 +1049,8 @@ class Upload(object): if not os.path.exists(src): return ftype = m.group(3) - if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \ - pkg.orig_files[f].has_key("path"): + if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \ + self.pkg.orig_files[f].has_key("path"): continue dest = os.path.join(os.getcwd(), f) os.symlink(src, dest) @@ -1074,7 +1068,7 @@ class Upload(object): (result, output) = commands.getstatusoutput(cmd) if (result != 0): self.rejects.append("'dpkg-source -x' failed for %s [return code: %s]." % (dsc_filename, result)) - self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] "), "") + self.rejects.append(utils.prefix_multi_line_string(output, " [dpkg-source output:] ")) return if not cnf.Find("Dir::Queue::BTSVersionTrack"): @@ -1200,6 +1194,194 @@ class Upload(object): self.ensure_hashes() + ########################################################################### + + def ensure_orig(self, target_dir='.', session=None): + """ + Ensures that all orig files mentioned in the changes file are present + in target_dir. If they do not exist, they are symlinked into place. + + An list containing the symlinks that were created are returned (so they + can be removed). + """ + + symlinked = [] + cnf = Config() + + for filename, entry in self.pkg.dsc_files.iteritems(): + if not re_is_orig_source.match(filename): + # File is not an orig; ignore + continue + + if os.path.exists(filename): + # File exists, no need to continue + continue + + def symlink_if_valid(path): + f = utils.open_file(path) + md5sum = apt_pkg.md5sum(f) + f.close() + + fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum) + expected = (int(entry['size']), entry['md5sum']) + + if fingerprint != expected: + return False + + dest = os.path.join(target_dir, filename) + + os.symlink(path, dest) + symlinked.append(dest) + + return True + + session_ = session + if session is None: + session_ = DBConn().session() + + found = False + + # Look in the pool + for poolfile in get_poolfile_like_name('/%s' % filename, session_): + poolfile_path = os.path.join( + poolfile.location.path, poolfile.filename + ) + + if symlink_if_valid(poolfile_path): + found = True + break + + if session is None: + session_.close() + + if found: + continue + + # Look in some other queues for the file + queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates', + 'OldProposedUpdates', 'Embargoed', 'Unembargoed') + + for queue in queues: + if 'Dir::Queue::%s' % directory not in cnf: + continue + + queuefile_path = os.path.join( + cnf['Dir::Queue::%s' % directory], filename + ) + + if not os.path.exists(queuefile_path): + # Does not exist in this queue + continue + + if symlink_if_valid(queuefile_path): + break + + return symlinked + + ########################################################################### + + def check_lintian(self): + cnf = Config() + + # Only check some distributions + valid_dist = False + for dist in ('unstable', 'experimental'): + if dist in self.pkg.changes['distribution']: + valid_dist = True + break + + if not valid_dist: + return + + tagfile = cnf.get("Dinstall::LintianTags") + if tagfile is None: + # We don't have a tagfile, so just don't do anything. + return + + # Parse the yaml file + sourcefile = file(tagfile, 'r') + sourcecontent = sourcefile.read() + sourcefile.close() + try: + lintiantags = yaml.load(sourcecontent)['lintian'] + except yaml.YAMLError, msg: + utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg)) + return + + # Try and find all orig mentioned in the .dsc + symlinked = self.ensure_orig() + + # Now setup the input file for lintian. lintian wants "one tag per line" only, + # so put it together like it. We put all types of tags in one file and then sort + # through lintians output later to see if its a fatal tag we detected, or not. + # So we only run lintian once on all tags, even if we might reject on some, but not + # reject on others. + # Additionally build up a set of tags + tags = set() + (fd, temp_filename) = utils.temp_filename() + temptagfile = os.fdopen(fd, 'w') + for tagtype in lintiantags: + for tag in lintiantags[tagtype]: + temptagfile.write("%s\n" % tag) + tags.add(tag) + temptagfile.close() + + # So now we should look at running lintian at the .changes file, capturing output + # to then parse it. + command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file) + (result, output) = commands.getstatusoutput(command) + + # We are done with lintian, remove our tempfile and any symlinks we created + os.unlink(temp_filename) + for symlink in symlinked: + os.unlink(symlink) + + if (result == 2): + utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result)) + utils.warn(utils.prefix_multi_line_string(output, " [possible output:] ")) + + if len(output) == 0: + return + + def log(*txt): + if self.logger: + self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt)) + + # We have output of lintian, this package isn't clean. Lets parse it and see if we + # are having a victim for a reject. + # W: tzdata: binary-without-manpage usr/sbin/tzconfig + for line in output.split('\n'): + m = re_parse_lintian.match(line) + if m is None: + continue + + etype = m.group(1) + epackage = m.group(2) + etag = m.group(3) + etext = m.group(4) + + # So lets check if we know the tag at all. + if etag not in tags: + continue + + if etype == 'O': + # We know it and it is overriden. Check that override is allowed. + if etag in lintiantags['warning']: + # The tag is overriden, and it is allowed to be overriden. + # Don't add a reject message. + pass + elif etag in lintiantags['error']: + # The tag is overriden - but is not allowed to be + self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag)) + log("overidden tag is overridden", etag) + else: + # Tag is known, it is not overriden, direct reject. + self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext)) + log("auto rejecting", etag) + # Now tell if they *might* override it. + if etag in lintiantags['warning']: + self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage)) + ########################################################################### def check_urgency(self): cnf = Config() @@ -1653,7 +1835,7 @@ distribution.""" # yes # This routine returns None on success or an error on failure - res = get_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"]) + res = get_or_set_queue('accepted').autobuild_upload(self.pkg, cnf["Dir::Queue::Accepted"]) if res: utils.fubar(res) @@ -1835,7 +2017,7 @@ distribution.""" if not manual: self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] self.Subst["__MANUAL_REJECT_MESSAGE__"] = "" - self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)\nX-Katie-Rejection: automatic (moo)" + self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)" os.write(reason_fd, reject_message) reject_mail_message = utils.TemplateSubst(self.Subst, rej_template) else: @@ -2253,7 +2435,7 @@ distribution.""" self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] self.Subst["__REJECT_MESSAGE__"] = self.package_info() self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"] - self.Subst["__BCC__"] = "X-DAK: dak process-accepted\nX-Katie: $Revision: 1.18 $" + self.Subst["__BCC__"] = "X-DAK: dak process-accepted" if cnf.has_key("Dinstall::Bcc"): self.Subst["__BCC__"] += "\nBcc: %s" % (cnf["Dinstall::Bcc"])