import commands, errno, fcntl, os, re, shutil, stat, sys, time, tempfile, traceback
import apt_inst, apt_pkg
-import daklib.database as database
-import daklib.logging as logging
-import daklib.queue as queue
-import daklib.utils as utils
+from daklib import database
+from daklib import logging
+from daklib import queue
+from daklib import utils
+from daklib.dak_exceptions import *
from types import *
# Parse the .changes field into a dictionary
try:
changes.update(utils.parse_changes(filename))
- except utils.cant_open_exc:
+ except CantOpenError:
reject("%s: can't read file." % (filename))
return 0
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
return 0
# Parse the Files field from the .changes into another dictionary
try:
files.update(utils.build_file_list(changes))
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (filename, line))
- except utils.nk_format_exc, format:
+ except UnknownFormatError, format:
reject("%s: unknown format '%s'." % (filename, format))
return 0
(changes["maintainer822"], changes["maintainer2047"],
changes["maintainername"], changes["maintaineremail"]) = \
utils.fix_maintainer (changes["maintainer"])
- except utils.ParseMaintError, msg:
+ except ParseMaintError, msg:
reject("%s: Maintainer field ('%s') failed to parse: %s" \
% (filename, changes["maintainer"], msg))
(changes["changedby822"], changes["changedby2047"],
changes["changedbyname"], changes["changedbyemail"]) = \
utils.fix_maintainer (changes.get("changed-by", ""))
- except utils.ParseMaintError, msg:
+ except ParseMaintError, msg:
(changes["changedby822"], changes["changedby2047"],
changes["changedbyname"], changes["changedbyemail"]) = \
("", "", "", "")
# Ensure the architecture of the .deb is one we know about.
default_suite = Cnf.get("Dinstall::DefaultSuite", "Unstable")
architecture = control.Find("Architecture")
- if architecture not in Cnf.ValueList("Suite::%s::Architectures" % (default_suite)):
+ upload_suite = changes["distribution"].keys()[0]
+ if architecture not in Cnf.ValueList("Suite::%s::Architectures" % (default_suite)) and architecture not in Cnf.ValueList("Suite::%s::Architectures" % (upload_suite)):
reject("Unknown architecture '%s'." % (architecture))
# Ensure the architecture of the .deb is one of the ones
# Parse the .dsc file
try:
dsc.update(utils.parse_changes(dsc_filename, signing_rules=1))
- except utils.cant_open_exc:
+ except CantOpenError:
# if not -n copy_to_holding() will have done this for us...
if Options["No-Action"]:
reject("%s: can't read file." % (dsc_filename))
- except utils.changes_parse_error_exc, line:
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
- except utils.invalid_dsc_format_exc, line:
+ except InvalidDscError, line:
reject("%s: syntax error on line %s." % (dsc_filename, line))
# Build up the file list of files mentioned by the .dsc
try:
dsc_files.update(utils.build_file_list(dsc, is_a_dsc=1))
- except utils.no_files_exc:
+ except NoFilesFieldError:
reject("%s: no Files: field." % (dsc_filename))
return 0
- except utils.changes_parse_error_exc, line:
+ except UnknownFormatError, format:
+ reject("%s: unknown format '%s'." % (dsc_filename, format))
+ return 0
+ except ParseChangesError, line:
reject("%s: parse error, can't grok: %s." % (dsc_filename, line))
return 0
# Validate the Maintainer field
try:
utils.fix_maintainer (dsc["maintainer"])
- except utils.ParseMaintError, msg:
+ except ParseMaintError, msg:
reject("%s: Maintainer field ('%s') failed to parse: %s" \
% (dsc_filename, dsc["maintainer"], msg))
files[orig_tar_gz] = {}
files[orig_tar_gz]["size"] = os.stat(orig_tar_gz)[stat.ST_SIZE]
files[orig_tar_gz]["md5sum"] = dsc_files[orig_tar_gz]["md5sum"]
+ files[orig_tar_gz]["sha1sum"] = dsc_files[orig_tar_gz]["sha1sum"]
+ files[orig_tar_gz]["sha256sum"] = dsc_files[orig_tar_gz]["sha256sum"]
files[orig_tar_gz]["section"] = files[dsc_filename]["section"]
files[orig_tar_gz]["priority"] = files[dsc_filename]["priority"]
files[orig_tar_gz]["component"] = files[dsc_filename]["component"]
if changes["architecture"].has_key("source"):
if not changes.has_key("urgency"):
changes["urgency"] = Cnf["Urgency::Default"]
+ changes["urgency"] = changes["urgency"].lower()
if changes["urgency"] not in Cnf.ValueList("Urgency::Valid"):
reject("%s is not a valid urgency; it will be treated as %s by testing." % (changes["urgency"], Cnf["Urgency::Default"]), "Warning: ")
changes["urgency"] = Cnf["Urgency::Default"]
- changes["urgency"] = changes["urgency"].lower()
################################################################################
def check_hashes ():
- # Make sure we recognise the format of the Files: field
- format = changes.get("format", "0.0").split(".",1)
- if len(format) == 2:
- format = int(format[0]), int(format[1])
- else:
- format = int(float(format[0])), 0
-
- check_hash(".changes", files, "md5sum", apt_pkg.md5sum)
- check_hash(".dsc", dsc_files, "md5sum", apt_pkg.md5sum)
-
- if format >= (1,8):
- hashes = [("sha1", apt_pkg.sha1sum),
- ("sha256", apt_pkg.sha256sum)]
- else:
- hashes = []
-
- for x in changes:
- if x.startswith("checksum-"):
- h = x.split("-",1)[1]
- if h not in dict(hashes):
- reject("Unsupported checksum field in .changes" % (h))
-
- for x in dsc:
- if x.startswith("checksum-"):
- h = x.split("-",1)[1]
- if h not in dict(hashes):
- reject("Unsupported checksum field in .dsc" % (h))
-
- for h,f in hashes:
- try:
- fs = utils.build_file_list(changes, 0, "checksums-%s" % h, h)
- check_hash(".changes %s" % (h), fs, h, f, files)
- except utils.no_files_exc:
- reject("No Checksums-%s: field in .changes" % (h))
- except utils.changes_parse_error_exc, line:
- reject("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
-
- if "source" not in changes["architecture"]: continue
-
- try:
- fs = utils.build_file_list(dsc, 1, "checksums-%s" % h, h)
- check_hash(".dsc %s" % (h), fs, h, f, dsc_files)
- except utils.no_files_exc:
- reject("No Checksums-%s: field in .dsc" % (h))
- except utils.changes_parse_error_exc, line:
- reject("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
-
-################################################################################
-
-def check_hash (where, lfiles, key, testfn, basedict = None):
- if basedict:
- for f in basedict.keys():
- if f not in lfiles:
- reject("%s: no %s checksum" % (f, key))
-
- for f in lfiles.keys():
- if basedict and f not in basedict:
- reject("%s: extraneous entry in %s checksums" % (f, key))
-
- try:
- file_handle = utils.open_file(f)
- except utils.cant_open_exc:
- continue
+ utils.check_hash(".changes", files, "md5", apt_pkg.md5sum)
+ utils.check_size(".changes", files)
+ utils.check_hash(".dsc", dsc_files, "md5", apt_pkg.md5sum)
+ utils.check_size(".dsc", dsc_files)
- # Check hash
- if testfn(file_handle) != lfiles[f][key]:
- reject("%s: %s check failed." % (f, key))
- file_handle.close()
- # Check size
- actual_size = os.stat(f)[stat.ST_SIZE]
- size = int(lfiles[f]["size"])
- if size != actual_size:
- reject("%s: actual file size (%s) does not match size (%s) in %s"
- % (f, actual_size, size, where))
+ # This is stupid API, but it'll have to do for now until
+ # we actually have proper abstraction
+ for m in utils.ensure_hashes(changes, dsc, files, dsc_files):
+ reject(m)
################################################################################
################################################################################
def lookup_uid_from_fingerprint(fpr):
- q = Upload.projectB.query("SELECT u.uid, u.name FROM fingerprint f, uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
+ q = Upload.projectB.query("SELECT u.uid, u.name, k.debian_maintainer FROM fingerprint f JOIN keyrings k ON (f.keyring=k.id), uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr))
qs = q.getresult()
if len(qs) == 0:
return (None, None)
def check_signed_by_key():
"""Ensure the .changes is signed by an authorized uploader."""
- (uid, uid_name) = lookup_uid_from_fingerprint(changes["fingerprint"])
+ (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(changes["fingerprint"])
if uid_name == None:
uid_name = ""
may_nmu, may_sponsor = 1, 1
# XXX by default new dds don't have a fingerprint/uid in the db atm,
# and can't get one in there if we don't allow nmu/sponsorship
- elif uid[:3] == "dm:":
- uid_email = uid[3:]
+ elif is_dm is "t":
+ uid_email = uid
may_nmu, may_sponsor = 0, 0
else:
uid_email = "%s@debian.org" % (uid)
if not sponsored and not may_nmu:
source_ids = []
- check_suites = changes["distribution"].keys()
- if "unstable" not in check_suites: check_suites.append("unstable")
- for suite in check_suites:
- suite_id = database.get_suite_id(suite)
- q = Upload.projectB.query("SELECT s.id FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND sa.suite = %d" % (changes["source"], suite_id))
- for si in q.getresult():
- if si[0] not in source_ids: source_ids.append(si[0])
-
- print "source_ids: %s" % (",".join([str(x) for x in source_ids]))
-
- is_nmu = 1
- for si in source_ids:
- is_nmu = 1
- q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT maintainer FROM src_uploaders WHERE src_uploaders.source = %s)" % (si))
+ q = Upload.projectB.query("SELECT s.id, s.version FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND s.dm_upload_allowed = 'yes'" % (changes["source"]))
+
+ highest_sid, highest_version = None, None
+
+ should_reject = True
+ for si in q.getresult():
+ if highest_version == None or apt_pkg.VersionCompare(si[1], highest_version) == 1:
+ highest_sid = si[0]
+ highest_version = si[1]
+
+ if highest_sid == None:
+ reject("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % changes["source"])
+ else:
+ q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT su.maintainer FROM src_uploaders su JOIN source s ON (s.id = su.source) WHERE su.source = %s)" % (highest_sid))
for m in q.getresult():
(rfc822, rfc2047, name, email) = utils.fix_maintainer(m[0])
if email == uid_email or name == uid_name:
- is_nmu=0
+ should_reject=False
break
- if is_nmu:
- reject("%s may not upload/NMU source package %s" % (uid, changes["source"]))
+
+ if should_reject == True:
+ reject("%s is not in Maintainer or Uploaders of source package %s" % (uid, changes["source"]))
for b in changes["binary"].keys():
for suite in changes["distribution"].keys():
if files[f].has_key("new"):
reject("%s may not upload NEW file %s" % (uid, f))
- # The remaining checks only apply to binary-only uploads right now
- if changes["architecture"].has_key("source"):
- return
-
- if not Cnf.Exists("Binary-Upload-Restrictions"):
- return
-
- restrictions = Cnf.SubTree("Binary-Upload-Restrictions")
-
- # If the restrictions only apply to certain components make sure
- # that the upload is actual targeted there.
- if restrictions.Exists("Components"):
- restricted_components = restrictions.SubTree("Components").ValueList()
- is_restricted = False
- for f in files:
- if files[f]["component"] in restricted_components:
- is_restricted = True
- break
- if not is_restricted:
- return
-
- # Assuming binary only upload restrictions are in place we then
- # iterate over suite and architecture checking the key is in the
- # allowed list. If no allowed list exists for a given suite or
- # architecture it's assumed to be open to anyone.
- for suite in changes["distribution"].keys():
- if not restrictions.Exists(suite):
- continue
- for arch in changes["architecture"].keys():
- if not restrictions.SubTree(suite).Exists(arch):
- continue
- allowed_keys = restrictions.SubTree("%s::%s" % (suite, arch)).ValueList()
- if changes["fingerprint"] not in allowed_keys:
- base_filename = os.path.basename(pkg.changes_file)
- reject("%s: not signed by authorised uploader for %s/%s"
- % (base_filename, suite, arch))
################################################################################
################################################################################
Upload.Subst["__SUMMARY__"] = summary
Upload.check_override()
+ # Send accept mail, announce to lists, close bugs and check for
+ # override disparities
+ if not Cnf["Dinstall::Options::No-Mail"]:
+ Upload.Subst["__SUITE__"] = ""
+ mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
+ utils.send_mail(mail_message)
+ Upload.announce(short_summary, 1)
+
################################################################################
def is_embargo ():
Upload.Subst["__SUMMARY__"] = summary
Upload.check_override()
+ # Send accept mail, announce to lists, close bugs and check for
+ # override disparities
+ if not Cnf["Dinstall::Options::No-Mail"]:
+ Upload.Subst["__SUITE__"] = ""
+ mail_message = utils.TemplateSubst(Upload.Subst,Cnf["Dir::Templates"]+"/process-unchecked.accepted")
+ utils.send_mail(mail_message)
+ Upload.announce(short_summary, 1)
+
################################################################################
def is_stableupdate ():
Logger.log(["Moving to proposed-updates", pkg.changes_file]);
Upload.dump_vars(Cnf["Dir::Queue::ProposedUpdates"]);
- move_to_dir(Cnf["Dir::Queue::ProposedUpdates"])
+ move_to_dir(Cnf["Dir::Queue::ProposedUpdates"], perms=0664)
# Check for override disparities
Upload.Subst["__SUMMARY__"] = summary;
Logger.log(["Moving to oldstable-proposed-updates", pkg.changes_file]);
Upload.dump_vars(Cnf["Dir::Queue::OldProposedUpdates"]);
- move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"])
+ move_to_dir(Cnf["Dir::Queue::OldProposedUpdates"], perms=0664)
# Check for override disparities
Upload.Subst["__SUMMARY__"] = summary;