###############################################################################
-import cPickle
import errno
import os
import pg
from holding import Holding
from dbconn import *
from summarystats import SummaryStats
-from utils import parse_changes
+from utils import parse_changes, check_dsc_files
from textutils import fix_maintainer
from binary import Binary
"""
# Determine the type
if f.has_key("dbtype"):
- file_type = file["dbtype"]
- elif f["type"] in [ "orig.tar.gz", "orig.tar.bz2", "tar.gz", "tar.bz2", "diff.gz", "diff.bz2", "dsc" ]:
+ file_type = f["dbtype"]
+ elif re_source_ext.match(f["type"]):
file_type = "dsc"
else:
utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
###########################################################################
def load_changes(self, filename):
"""
- @rtype boolean
+ @rtype: boolean
@rvalue: whether the changes file was valid or not. We may want to
reject even if this is True (see what gets put in self.rejects).
This is simply to prevent us even trying things later which will
# Check the version and for file overwrites
self.check_binary_against_db(f, session)
- b = Binary(f)
- b.scan_package()
- if len(b.rejects) > 0:
- for j in b.rejects:
- self.rejects.append(j)
+ # Temporarily disable contents generation until we change the table storage layout
+ #b = Binary(f)
+ #b.scan_package()
+ #if len(b.rejects) > 0:
+ # for j in b.rejects:
+ # self.rejects.append(j)
def source_file_checks(self, f, session):
entry = self.pkg.files[f]
self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
# Ensure the source version matches the version in the .changes file
- if entry["type"] == "orig.tar.gz":
+ if re_is_orig_source.match(f):
changes_version = self.pkg.changes["chopversion2"]
else:
changes_version = self.pkg.changes["chopversion"]
self.rejects.append("source only uploads are not supported.")
###########################################################################
- def check_dsc(self, action=True):
+ def check_dsc(self, action=True, session=None):
"""Returns bool indicating whether or not the source changes are valid"""
# Ensure there is source to check
if not self.pkg.changes["architecture"].has_key("source"):
if not re_valid_version.match(self.pkg.dsc["version"]):
self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
- # Bumping the version number of the .dsc breaks extraction by stable's
- # dpkg-source. So let's not do that...
- if self.pkg.dsc["format"] != "1.0":
- self.rejects.append("%s: incompatible 'Format' version produced by a broken version of dpkg-dev 1.9.1{3,4}." % (dsc_filename))
+ # Only a limited list of source formats are allowed in each suite
+ for dist in self.pkg.changes["distribution"].keys():
+ allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
+ if self.pkg.dsc["format"] not in allowed:
+ self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
# Validate the Maintainer field
try:
for field_name in [ "build-depends", "build-depends-indep" ]:
field = self.pkg.dsc.get(field_name)
if field:
- # Check for broken dpkg-dev lossage...
- if field.startswith("ARRAY"):
- self.rejects.append("%s: invalid %s field produced by a broken version of dpkg-dev (1.10.11)" % \
- (dsc_filename, field_name.title()))
-
# Have apt try to parse them...
try:
apt_pkg.ParseSrcDepends(field)
if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
- # Ensure there is a .tar.gz in the .dsc file
- has_tar = False
- for f in self.pkg.dsc_files.keys():
- m = re_issource.match(f)
- if not m:
- self.rejects.append("%s: %s in Files field not recognised as source." % (dsc_filename, f))
- continue
- ftype = m.group(3)
- if ftype == "orig.tar.gz" or ftype == "tar.gz":
- has_tar = True
-
- if not has_tar:
- self.rejects.append("%s: no .tar.gz or .orig.tar.gz in 'Files' field." % (dsc_filename))
+ # Ensure the Files field contain only what's expected
+ self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
# Ensure source is newer than existing source in target suites
session = DBConn().session()
if not os.path.exists(src):
return
ftype = m.group(3)
- if ftype == "orig.tar.gz" and self.pkg.orig_tar_gz:
+ if re_is_orig_source.match(f) and pkg.orig_files.has_key(f) and \
+ pkg.orig_files[f].has_key("path"):
continue
dest = os.path.join(os.getcwd(), f)
os.symlink(src, dest)
- # If the orig.tar.gz is not a part of the upload, create a symlink to the
- # existing copy.
- if self.pkg.orig_tar_gz:
- dest = os.path.join(os.getcwd(), os.path.basename(self.pkg.orig_tar_gz))
- os.symlink(self.pkg.orig_tar_gz, dest)
+ # If the orig files are not a part of the upload, create symlinks to the
+ # existing copies.
+ for orig_file in self.pkg.orig_files.keys():
+ if not self.pkg.orig_files[orig_file].has_key("path"):
+ continue
+ dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
+ os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
# Extract the source
cmd = "dpkg-source -sn -x %s" % (dsc_filename)
# We should probably scrap or rethink the whole reprocess thing
# Bail out if:
# a) there's no source
- # or b) reprocess is 2 - we will do this check next time when orig.tar.gz is in 'files'
- # or c) the orig.tar.gz is MIA
+ # or b) reprocess is 2 - we will do this check next time when orig
+ # tarball is in 'files'
+ # or c) the orig files are MIA
if not self.pkg.changes["architecture"].has_key("source") or self.reprocess == 2 \
- or self.pkg.orig_tar_gz == -1:
+ or len(self.pkg.orig_files) == 0:
return
tmpdir = utils.temp_dirname()
sourcepkg, trans)
if current is not None:
- currentlymsg = "at version %s" % (current)
+ currentlymsg = "at version %s" % (current.version)
else:
currentlymsg = "not present in testing"
rejectmsg += "Transition description: %s\n\n" % (t["reason"])
rejectmsg += "\n".join(textwrap.wrap("""Your package
- is part of a testing transition designed to get %s migrated (it is
- currently %s, we need version %s). This transition is managed by the
- Release Team, and %s is the Release-Team member responsible for it.
- Please mail debian-release@lists.debian.org or contact %s directly if you
- need further assistance. You might want to upload to experimental until this
- transition is done."""
+is part of a testing transition designed to get %s migrated (it is
+currently %s, we need version %s). This transition is managed by the
+Release Team, and %s is the Release-Team member responsible for it.
+Please mail debian-release@lists.debian.org or contact %s directly if you
+need further assistance. You might want to upload to experimental until this
+transition is done."""
% (source, currentlymsg, expected,t["rm"], t["rm"])))
self.rejects.append(rejectmsg)
for bug in bugs:
summary += "%s " % (bug)
if action:
+ self.update_subst()
self.Subst["__BUG_NUMBER__"] = bug
if self.pkg.changes["distribution"].has_key("stable"):
self.Subst["__STABLE_WARNING__"] = """
distribution."""
else:
self.Subst["__STABLE_WARNING__"] = ""
- mail_message = utils.TemplateSubst(self.Subst, template)
- utils.send_mail(mail_message)
+ mail_message = utils.TemplateSubst(self.Subst, template)
+ utils.send_mail(mail_message)
# Clear up after ourselves
del self.Subst["__BUG_NUMBER__"]
summary += "Announcing to %s\n" % (announce_list)
if action:
+ self.update_subst()
self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
if cnf.get("Dinstall::TrackingServer") and \
self.pkg.changes["architecture"].has_key("source"):
# Send accept mail, announce to lists, close bugs and check for
# override disparities
if not cnf["Dinstall::Options::No-Mail"]:
+ self.update_subst()
self.Subst["__SUITE__"] = ""
self.Subst["__SUMMARY__"] = summary
mail_message = utils.TemplateSubst(self.Subst, accepttemplate)
overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
+ self.update_subst()
self.Subst["__SUMMARY__"] = summary
mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
utils.send_mail(mail_message)
rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
+ self.update_subst()
if not manual:
self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
- self.Subst["__CC__"] = "Cc: " + Cnf["Dinstall::MyEmailAddress"]
+ self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
# Write the rejection email out as the <foo>.reason file
os.write(reason_fd, reject_mail_message)
"""
@warning: NB: this function can remove entries from the 'files' index [if
- the .orig.tar.gz is a duplicate of the one in the archive]; if
+ the orig tarball is a duplicate of the one in the archive]; if
you're iterating over 'files' and call this function as part of
the loop, be sure to add a check to the top of the loop to
ensure you haven't just tried to dereference the deleted entry.
"""
Cnf = Config()
- self.pkg.orig_tar_gz = None
+ self.pkg.orig_files = {} # XXX: do we need to clear it?
+ orig_files = self.pkg.orig_files
# Try and find all files mentioned in the .dsc. This has
# to work harder to cope with the multiple possible
if len(ql) > 0:
# Ignore exact matches for .orig.tar.gz
match = 0
- if dsc_name.endswith(".orig.tar.gz"):
+ if re_is_orig_source.match(dsc_name):
for i in ql:
if self.pkg.files.has_key(dsc_name) and \
int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
# This would fix the stupidity of changing something we often iterate over
# whilst we're doing it
del self.pkg.files[dsc_name]
- self.pkg.orig_tar_gz = os.path.join(i.location.path, i.filename)
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
match = 1
if not match:
self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
- elif dsc_name.endswith(".orig.tar.gz"):
+ elif re_is_orig_source.match(dsc_name):
# Check in the pool
ql = get_poolfile_like_name(dsc_name, session)
# need this for updating dsc_files in install()
dsc_entry["files id"] = x.file_id
# See install() in process-accepted...
- self.pkg.orig_tar_id = x.file_id
- self.pkg.orig_tar_gz = old_file
- self.pkg.orig_tar_location = x.location.location_id
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["id"] = x.file_id
+ orig_files[dsc_name]["path"] = old_file
+ orig_files[dsc_name]["location"] = x.location.location_id
else:
# TODO: Record the queues and info in the DB so we don't hardcode all this crap
# Not there? Check the queue directories...
in_otherdir_fh.close()
actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
found = in_otherdir
- self.pkg.orig_tar_gz = in_otherdir
+ if not orig_files.has_key(dsc_name):
+ orig_files[dsc_name] = {}
+ orig_files[dsc_name]["path"] = in_otherdir
if not found:
self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (file, dsc_name))
- self.pkg.orig_tar_gz = -1
continue
else:
self.rejects.append("%s refers to %s, but I can't find it in the queue." % (file, dsc_name))
def do_unaccept(self):
cnf = Config()
+ self.update_subst()
self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
self.Subst["__REJECT_MESSAGE__"] = self.package_info()
self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]