X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=dak%2Fprocess_unchecked.py;h=dc92f74fc5a2065d4d7259b788002f8966c16860;hb=a29878d1b68545cac92669617f94c81330ee4fc7;hp=412ad64aa9088f5f30be8a64401d7a82d526651f;hpb=6b9ada122cf00986a2f44d97bfc57b34e4e9ea98;p=dak.git diff --git a/dak/process_unchecked.py b/dak/process_unchecked.py index 412ad64a..dc92f74f 100755 --- a/dak/process_unchecked.py +++ b/dak/process_unchecked.py @@ -36,6 +36,8 @@ import daklib.queue import daklib.utils from types import * +from syck import * + ################################################################################ @@ -329,9 +331,7 @@ def check_deb_ar(filename, control): o control.tar.gz o data.tar.gz or data.tar.bz2 -in that order, and nothing else. If the third member is a -data.tar.bz2, an additional check is performed for the required -Pre-Depends on dpkg (>= 1.10.24).""" +in that order, and nothing else.""" cmd = "ar t %s" % (filename) (result, output) = commands.getstatusoutput(cmd) if result != 0: @@ -344,22 +344,7 @@ Pre-Depends on dpkg (>= 1.10.24).""" reject("%s: first chunk is '%s', expected 'debian-binary'." % (filename, chunks[0])) if chunks[1] != "control.tar.gz": reject("%s: second chunk is '%s', expected 'control.tar.gz'." % (filename, chunks[1])) - if chunks[2] == "data.tar.bz2": - # Packages using bzip2 compression must have a Pre-Depends on dpkg >= 1.10.24. - found_needed_predep = 0 - for parsed_dep in apt_pkg.ParseDepends(control.Find("Pre-Depends", "")): - for atom in parsed_dep: - (dep, version, constraint) = atom - if dep != "dpkg" or (constraint != ">=" and constraint != ">>") or \ - len(parsed_dep) > 1: # or'ed deps don't count - continue - if (constraint == ">=" and apt_pkg.VersionCompare(version, "1.10.24") < 0) or \ - (constraint == ">>" and apt_pkg.VersionCompare(version, "1.10.23") < 0): - continue - found_needed_predep = 1 - if not found_needed_predep: - reject("%s: uses bzip2 compression, but doesn't Pre-Depend on dpkg (>= 1.10.24)" % (filename)) - elif chunks[2] != "data.tar.gz": + if chunks[2] not in [ "data.tar.bz2", "data.tar.gz" ]: reject("%s: third chunk is '%s', expected 'data.tar.gz' or 'data.tar.bz2'." % (filename, chunks[2])) ################################################################################ @@ -404,7 +389,8 @@ def check_files(): for file in file_keys: # Ensure the file does not already exist in one of the accepted directories - for dir in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates" ]: + for dir in [ "Accepted", "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]: + if not Cnf.has_key("Dir::Queue::%s" % (dir)): continue if os.path.exists(Cnf["Dir::Queue::%s" % (dir) ]+'/'+file): reject("%s file already exists in the %s directory." % (file, dir)) if not daklib.utils.re_taint_free.match(file): @@ -421,7 +407,7 @@ def check_files(): files[file]["type"] = "unreadable" continue # If it's byhand skip remaining checks - if files[file]["section"] == "byhand" or files[file]["section"][4:] == "raw-": + if files[file]["section"] == "byhand" or files[file]["section"][:4] == "raw-": files[file]["byhand"] = 1 files[file]["type"] = "byhand" # Checks for a binary package... @@ -1015,11 +1001,139 @@ def check_timestamps(): reject("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value)) ################################################################################ +################################################################################ + +# We reject packages if the release team defined a transition for them +def check_transition(sourcepkg): + + # Only check if there is a file defined (and existant) with checks. It's a little bit + # specific to Debian, not much use for others, so return early there. + if not Cnf.has_key("Dinstall::Reject::ReleaseTransitions") or not os.path.exists("%s" % (Cnf["Dinstall::Reject::ReleaseTransitions"])): + return + + # Parse the yaml file + sourcefile = file(Cnf["Dinstall::Reject::ReleaseTransitions"], 'r') + sourcecontent = sourcefile.read() + try: + transitions = load(sourcecontent) + except error, msg: + # This shouldn't happen, the release team has a wrapper to check the file, but better + # safe then sorry + daklib.utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg)) + return + + # Now look through all defined transitions + for trans in transitions: + t = transitions[trans] + source = t["source"] + new_vers = t["new"] + + # Will be None if nothing is in testing. + curvers = daklib.database.get_testing_version(source) + + if curvers and apt_pkg.VersionCompare(new_vers, curvers) == 1: + # This is still valid, the current version in database is older than + # the new version we wait for + + # Check if the source we look at is affected by this. + if sourcepkg in t['packages']: + # The source is affected, lets reject it. + reject("""%s: part of the %s transition. + + Your package is part of a testing transition to get %s migrated. + + Transition description: %s + + This transition will finish when %s, version %s, reaches testing (it currently + has version %s). + This transition is managed by the Release Team and %s + is the Release-Team member responsible for it. + Please contact them or debian-release@lists.debian.org if you + need further assistance. + """ + % (sourcepkg, trans, source, t["reason"], source, new_vers, curvers, t["rm"])) + return 0 + +################################################################################ + +def lookup_uid_from_fingerprint(fpr): + q = Upload.projectB.query("SELECT u.uid, u.name FROM fingerprint f, uid u WHERE f.uid = u.id AND f.fingerprint = '%s'" % (fpr)) + qs = q.getresult() + if len(qs) == 0: + return (None, None) + else: + return qs[0] def check_signed_by_key(): """Ensure the .changes is signed by an authorized uploader.""" - # We only check binary-only uploads right now + (uid, uid_name) = lookup_uid_from_fingerprint(changes["fingerprint"]) + if uid_name == None: + uid_name = "" + + # match claimed name with actual name: + if uid == None: + uid, uid_email = changes["fingerprint"], uid + may_nmu, may_sponsor = 1, 1 + # XXX by default new dds don't have a fingerprint/uid in the db atm, + # and can't get one in there if we don't allow nmu/sponsorship + elif uid[:3] == "dm:": + uid_email = uid[3:] + may_nmu, may_sponsor = 0, 0 + else: + uid_email = "%s@debian.org" % (uid) + may_nmu, may_sponsor = 1, 1 + + if uid_email in [changes["maintaineremail"], changes["changedbyemail"]]: + sponsored = 0 + elif uid_name in [changes["maintainername"], changes["changedbyname"]]: + sponsored = 0 + if uid_name == "": sponsored = 1 + else: + sponsored = 1 + + if sponsored and not may_sponsor: + reject("%s is not authorised to sponsor uploads" % (uid)) + + if not sponsored and not may_nmu: + source_ids = [] + check_suites = changes["distribution"].keys() + if "unstable" not in check_suites: check_suites.append("unstable") + for suite in check_suites: + suite_id = daklib.database.get_suite_id(suite) + q = Upload.projectB.query("SELECT s.id FROM source s JOIN src_associations sa ON (s.id = sa.source) WHERE s.source = '%s' AND sa.suite = %d" % (changes["source"], suite_id)) + for si in q.getresult(): + if si[0] not in source_ids: source_ids.append(si[0]) + + print "source_ids: %s" % (",".join([str(x) for x in source_ids])) + + is_nmu = 1 + for si in source_ids: + is_nmu = 1 + q = Upload.projectB.query("SELECT m.name FROM maintainer m WHERE m.id IN (SELECT maintainer FROM src_uploaders WHERE src_uploaders.source = %s)" % (si)) + for m in q.getresult(): + (rfc822, rfc2047, name, email) = daklib.utils.fix_maintainer(m[0]) + if email == uid_email or name == uid_name: + is_nmu=0 + break + if is_nmu: + reject("%s may not upload/NMU source package %s" % (uid, changes["source"])) + + for b in changes["binary"].keys(): + for suite in changes["distribution"].keys(): + suite_id = daklib.database.get_suite_id(suite) + q = Upload.projectB.query("SELECT DISTINCT s.source FROM source s JOIN binaries b ON (s.id = b.source) JOIN bin_associations ba On (b.id = ba.bin) WHERE b.package = '%s' AND ba.suite = %s" % (b, suite_id)) + for s in q.getresult(): + if s[0] != changes["source"]: + reject("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite)) + + for file in files.keys(): + if files[file].has_key("byhand"): + reject("%s may not upload BYHAND file %s" % (uid, file)) + if files[file].has_key("new"): + reject("%s may not upload NEW file %s" % (uid, file)) + + # The remaining checks only apply to binary-only uploads right now if changes["architecture"].has_key("source"): return @@ -1227,7 +1341,8 @@ def queue_unembargo (summary, short_summary): ################################################################################ def is_embargo (): - return 0 + # if embargoed queues are enabled always embargo + return 1 def queue_embargo (summary, short_summary): print "Moving to EMBARGOED holding area." @@ -1463,6 +1578,8 @@ def process_it (changes_file): check_urgency() check_timestamps() check_signed_by_key() + if changes["architecture"].has_key("source"): + check_transition(changes["source"]) Upload.update_subst(reject_message) action() except SystemExit: