import errno
import os
-import pg
import stat
import sys
import time
import commands
import shutil
import textwrap
-import tempfile
from types import *
import yaml
self.pkg.reset()
def package_info(self):
- msg = ''
-
- if len(self.rejects) > 0:
- msg += "Reject Reasons:\n"
- msg += "\n".join(self.rejects)
+ """
+ Format various messages from this Upload to send to the maintainer.
+ """
- if len(self.warnings) > 0:
- msg += "Warnings:\n"
- msg += "\n".join(self.warnings)
+ msgs = (
+ ('Reject Reasons', self.rejects),
+ ('Warnings', self.warnings),
+ ('Notes', self.notes),
+ )
- if len(self.notes) > 0:
- msg += "Notes:\n"
- msg += "\n".join(self.notes)
+ msg = ''
+ for title, messages in msgs:
+ if messages:
+ msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
return msg
# If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
if not self.pkg.changes.has_key("architecture") or not \
- isinstance(self.pkg.changes["architecture"], DictType):
+ isinstance(self.pkg.changes["architecture"], dict):
self.pkg.changes["architecture"] = { "Unknown" : "" }
# and maintainer2047 may not exist.
fix_maintainer (self.pkg.changes["maintainer"])
except ParseMaintError, msg:
self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
- % (filename, changes["maintainer"], msg))
+ % (filename, self.pkg.changes["maintainer"], msg))
# ...likewise for the Changed-By: field if it exists.
try:
# Check there isn't already a changes file of the same name in one
# of the queue directories.
base_filename = os.path.basename(filename)
- for d in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
- if os.path.exists(os.path.join(Cnf["Dir::Queue::%s" % (d) ], base_filename)):
- self.rejects.append("%s: a file with this name already exists in the %s directory." % (base_filename, d))
+ if get_knownchange(base_filename):
+ self.rejects.append("%s: a file with this name already exists." % (base_filename))
# Check the .changes is non-empty
if not self.pkg.files:
# Validate the component
if not get_component(entry["component"], session):
- self.rejects.append("file '%s' has unknown component '%s'." % (f, component))
+ self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
return
# See if the package is NEW
location = cnf["Dir::Pool"]
l = get_location(location, entry["component"], archive, session)
if l is None:
- self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (component, archive))
+ self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s, Archive: %s)" % (entry["component"], archive))
entry["location id"] = -1
else:
entry["location id"] = l.location_id
if not os.path.exists(src):
return
ftype = m.group(3)
- if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
- pkg.orig_files[f].has_key("path"):
+ if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
+ self.pkg.orig_files[f].has_key("path"):
continue
dest = os.path.join(os.getcwd(), f)
os.symlink(src, dest)
self.ensure_hashes()
###########################################################################
+
+ def ensure_orig(self, target_dir='.', session=None):
+ """
+ Ensures that all orig files mentioned in the changes file are present
+ in target_dir. If they do not exist, they are symlinked into place.
+
+ An list containing the symlinks that were created are returned (so they
+ can be removed).
+ """
+
+ symlinked = []
+ cnf = Config()
+
+ for filename, entry in self.pkg.dsc_files.iteritems():
+ if not re_is_orig_source.match(filename):
+ # File is not an orig; ignore
+ continue
+
+ if os.path.exists(filename):
+ # File exists, no need to continue
+ continue
+
+ def symlink_if_valid(path):
+ f = utils.open_file(path)
+ md5sum = apt_pkg.md5sum(f)
+ f.close()
+
+ fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
+ expected = (int(entry['size']), entry['md5sum'])
+
+ if fingerprint != expected:
+ return False
+
+ dest = os.path.join(target_dir, filename)
+
+ os.symlink(path, dest)
+ symlinked.append(dest)
+
+ return True
+
+ session_ = session
+ if session is None:
+ session_ = DBConn().session()
+
+ found = False
+
+ # Look in the pool
+ for poolfile in get_poolfile_like_name('/%s' % filename, session_):
+ poolfile_path = os.path.join(
+ poolfile.location.path, poolfile.filename
+ )
+
+ if symlink_if_valid(poolfile_path):
+ found = True
+ break
+
+ if session is None:
+ session_.close()
+
+ if found:
+ continue
+
+ # Look in some other queues for the file
+ queues = ('Accepted', 'New', 'Byhand', 'ProposedUpdates',
+ 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
+
+ for queue in queues:
+ if not cnf.get('Dir::Queue::%s' % queue):
+ continue
+
+ queuefile_path = os.path.join(
+ cnf['Dir::Queue::%s' % queue], filename
+ )
+
+ if not os.path.exists(queuefile_path):
+ # Does not exist in this queue
+ continue
+
+ if symlink_if_valid(queuefile_path):
+ break
+
+ return symlinked
+
+ ###########################################################################
+
def check_lintian(self):
+ cnf = Config()
+
+ # Don't reject binary uploads
+ if not self.pkg.changes['architecture'].has_key('source'):
+ return
+
# Only check some distributions
valid_dist = False
for dist in ('unstable', 'experimental'):
if not valid_dist:
return
- cnf = Config()
tagfile = cnf.get("Dinstall::LintianTags")
if tagfile is None:
# We don't have a tagfile, so just don't do anything.
return
+
# Parse the yaml file
sourcefile = file(tagfile, 'r')
sourcecontent = sourcefile.read()
utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
return
+ # Try and find all orig mentioned in the .dsc
+ symlinked = self.ensure_orig()
+
# Now setup the input file for lintian. lintian wants "one tag per line" only,
# so put it together like it. We put all types of tags in one file and then sort
# through lintians output later to see if its a fatal tag we detected, or not.
# to then parse it.
command = "lintian --show-overrides --tags-from-file %s %s" % (temp_filename, self.pkg.changes_file)
(result, output) = commands.getstatusoutput(command)
- # We are done with lintian, remove our tempfile
+
+ # We are done with lintian, remove our tempfile and any symlinks we created
os.unlink(temp_filename)
+ for symlink in symlinked:
+ os.unlink(symlink)
+
if (result == 2):
utils.warn("lintian failed for %s [return code: %s]." % (self.pkg.changes_file, result))
utils.warn(utils.prefix_multi_line_string(output, " [possible output:] "))
if len(output) == 0:
return
+ def log(*txt):
+ if self.logger:
+ self.logger.log([self.pkg.changes_file, "check_lintian"] + list(txt))
+
# We have output of lintian, this package isn't clean. Lets parse it and see if we
# are having a victim for a reject.
# W: tzdata: binary-without-manpage usr/sbin/tzconfig
elif etag in lintiantags['error']:
# The tag is overriden - but is not allowed to be
self.rejects.append("%s: Overriden tag %s found, but this tag may not be overwritten." % (epackage, etag))
+ log("ftpmaster does not allow tag to be overridable", etag)
else:
# Tag is known, it is not overriden, direct reject.
self.rejects.append("%s: Found lintian output: '%s %s', automatically rejected package." % (epackage, etag, etext))
# Now tell if they *might* override it.
if etag in lintiantags['warning']:
+ log("auto rejecting", "overridable", etag)
self.rejects.append("%s: If you have a good reason, you may override this lintian tag." % (epackage))
+ else:
+ log("auto rejecting", "not overridable", etag)
###########################################################################
def check_urgency(self):
rej = False
for f in self.pkg.files.keys():
if self.pkg.files[f].has_key("byhand"):
- self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
+ self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
rej = True
if self.pkg.files[f].has_key("new"):
- self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+ self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
rej = True
if rej:
###########################################################################
- def remove(self, dir=None):
+ def remove(self, from_dir=None):
"""
Used (for instance) in p-u to remove the package from unchecked
"""
- if dir is None:
+ if from_dir is None:
os.chdir(self.pkg.directory)
else:
- os.chdir(dir)
+ os.chdir(from_dir)
for f in self.pkg.files.keys():
os.unlink(f)
return 0
################################################################################
- def in_override_p(self, package, component, suite, binary_type, file, session):
+ def in_override_p(self, package, component, suite, binary_type, filename, session):
"""
Check if a package already has override entries in the DB
@type binary_type: string
@param binary_type: type of the package
- @type file: string
- @param file: filename we check
+ @type filename: string
+ @param filename: filename we check
@return: the database result. But noone cares anyway.
# Remember the section and priority so we can check them later if appropriate
if len(result) > 0:
result = result[0]
- self.pkg.files[file]["override section"] = result.section.section
- self.pkg.files[file]["override priority"] = result.priority.priority
+ self.pkg.files[filename]["override section"] = result.section.section
+ self.pkg.files[filename]["override priority"] = result.priority.priority
return result
return None
self.reject.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (file, existent_version, suite, new_version, target_suite))
################################################################################
- def check_binary_against_db(self, file, session):
+ def check_binary_against_db(self, filename, session):
# Ensure version is sane
q = session.query(BinAssociation)
- q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[file]["package"])
- q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[file]["architecture"], 'all']))
+ q = q.join(DBBinary).filter(DBBinary.package==self.pkg.files[filename]["package"])
+ q = q.join(Architecture).filter(Architecture.arch_string.in_([self.pkg.files[filename]["architecture"], 'all']))
self.cross_suite_version_check([ (x.suite.suite_name, x.binary.version) for x in q.all() ],
- file, self.pkg.files[file]["version"], sourceful=False)
+ filename, self.pkg.files[filename]["version"], sourceful=False)
# Check for any existing copies of the file
- q = session.query(DBBinary).filter_by(package=self.pkg.files[file]["package"])
- q = q.filter_by(version=self.pkg.files[file]["version"])
- q = q.join(Architecture).filter_by(arch_string=self.pkg.files[file]["architecture"])
+ q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
+ q = q.filter_by(version=self.pkg.files[filename]["version"])
+ q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
if q.count() > 0:
- self.rejects.append("%s: can not overwrite existing copy already in the archive." % (file))
+ self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
################################################################################
- def check_source_against_db(self, file, session):
+ def check_source_against_db(self, filename, session):
"""
"""
source = self.pkg.dsc.get("source")
q = q.join(DBSource).filter(DBSource.source==source)
self.cross_suite_version_check([ (x.suite.suite_name, x.source.version) for x in q.all() ],
- file, version, sourceful=True)
+ filename, version, sourceful=True)
################################################################################
- def check_dsc_against_db(self, file, session):
+ def check_dsc_against_db(self, filename, session):
"""
@warning: NB: this function can remove entries from the 'files' index [if
orig_files[dsc_name]["path"] = in_otherdir
if not found:
- self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
+ self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
continue
else:
- self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
+ self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
continue
if actual_md5 != dsc_entry["md5sum"]:
- self.rejects.append("md5sum for %s doesn't match %s." % (found, file))
+ self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
if actual_size != int(dsc_entry["size"]):
- self.rejects.append("size for %s doesn't match %s." % (found, file))
+ self.rejects.append("size for %s doesn't match %s." % (found, filename))
################################################################################
# This is used by process-new and process-holding to recheck a changes file