################################################################################
def recheck(upload, session):
- files = upload.pkg.files
-
- cnf = Config()
- for f in files.keys():
- # The .orig.tar.gz can disappear out from under us is it's a
- # duplicate of one in the archive.
- if not files.has_key(f):
- continue
- # Check that the source still exists
- if files[f]["type"] == "deb":
- source_version = files[f]["source version"]
- source_package = files[f]["source package"]
- if not upload.pkg.changes["architecture"].has_key("source") \
- and not upload.source_exists(source_package, source_version, upload.pkg.changes["distribution"].keys()):
- source_epochless_version = re_no_epoch.sub('', source_version)
- dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
- found = 0
- for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
- if cnf.has_key("Dir::Queue::%s" % (q)):
- if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
- found = 1
- if not found:
- upload.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
-
- # Version and file overwrite checks
- if files[f]["type"] == "deb":
- upload.check_binary_against_db(f, session)
- elif files[f]["type"] == "dsc":
- upload.check_source_against_db(f, session)
- upload.check_dsc_against_db(f, session)
-
+ upload.recheck()
if len(upload.rejects) > 0:
answer = "XXX"
if Options["No-Action"] or Options["Automatic"] or Options["Trainee"]:
answer = 'S'
- print "REJECT\n" + upload.rejects.join("\n"),
+ print "REJECT\n%s" % '\n'.join(upload.rejects)
prompt = "[R]eject, Skip, Quit ?"
while prompt.find(answer) == -1:
answer = answer[:1].upper()
if answer == 'R':
- upload.do_reject(manual=0, reject_message=upload.rejects.join("\n"))
+ upload.do_reject(manual=0, reject_message='\n'.join(upload.rejects))
os.unlink(upload.pkg.changes_file[:-8]+".dak")
return 0
elif answer == 'S':
print """Usage: dak process-new [OPTION]... [CHANGES]...
-a, --automatic automatic run
-h, --help show this help and exit.
- -C, --comments-dir=DIR use DIR as comments-dir, for [o-]p-u-new
-m, --manual-reject=MSG manual reject with `msg'
-n, --no-action don't do anything
-t, --trainee FTP Trainee mode
finally:
os.unlink(path)
-# def move_to_dir (upload, dest, perms=0660, changesperms=0664):
-# utils.move (upload.pkg.changes_file, dest, perms=changesperms)
-# file_keys = upload.pkg.files.keys()
-# for f in file_keys:
-# utils.move (f, dest, perms=perms)
-
-# def is_source_in_queue_dir(qdir):
-# entries = [ x for x in os.listdir(qdir) if x.startswith(Upload.pkg.changes["source"])
-# and x.endswith(".changes") ]
-# for entry in entries:
-# # read the .dak
-# u = queue.Upload(Cnf)
-# u.pkg.changes_file = os.path.join(qdir, entry)
-# u.update_vars()
-# if not u.pkg.changes["architecture"].has_key("source"):
-# # another binary upload, ignore
-# continue
-# if Upload.pkg.changes["version"] != u.pkg.changes["version"]:
-# # another version, ignore
-# continue
-# # found it!
-# return True
-# return False
-
-# def move_to_holding(suite, queue_dir):
-# print "Moving to %s holding area." % (suite.upper(),)
-# if Options["No-Action"]:
-# return
-# Logger.log(["Moving to %s" % (suite,), Upload.pkg.changes_file])
-# Upload.dump_vars(queue_dir)
-# move_to_dir(queue_dir, perms=0664)
-# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
def _accept(upload):
if Options["No-Action"]:
return
upload.accept(summary, short_summary, targetdir=Config()["Dir::Queue::Newstage"])
os.unlink(upload.pkg.changes_file[:-8]+".dak")
-# def do_accept_stableupdate(upload,suite, q):
-# cnf = Config()
-# queue_dir = cnf["Dir::Queue::%s" % (q,)]
-# if not upload.pkg.changes["architecture"].has_key("source"):
-# # It is not a sourceful upload. So its source may be either in p-u
-# # holding, in new, in accepted or already installed.
-# if is_source_in_queue_dir(queue_dir):
-# # It's in p-u holding, so move it there.
-# print "Binary-only upload, source in %s." % (q,)
-# move_to_holding(suite, queue_dir)
-# elif Upload.source_exists(Upload.pkg.changes["source"],
-# Upload.pkg.changes["version"]):
-# # dak tells us that there is source available. At time of
-# # writing this means that it is installed, so put it into
-# # accepted.
-# print "Binary-only upload, source installed."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::Accepted"]):
-# # The source is in accepted, the binary cleared NEW: accept it.
-# print "Binary-only upload, source in accepted."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::New"]):
-# # It's in NEW. We expect the source to land in p-u holding
-# # pretty soon.
-# print "Binary-only upload, source in new."
-# move_to_holding(suite, queue_dir)
-# elif is_source_in_queue_dir(Cnf["Dir::Queue::Newstage"]):
-# # It's in newstage. Accept into the holding area
-# print "Binary-only upload, source in newstage."
-# Logger.log([utils.getusername(), "PUNEW ACCEPT: %s" % (Upload.pkg.changes_file)])
-# _accept()
-# else:
-# # No case applicable. Bail out. Return will cause the upload
-# # to be skipped.
-# print "ERROR"
-# print "Stable update failed. Source not found."
-# return
-# else:
-# # We are handling a sourceful upload. Move to accepted if currently
-# # in p-u holding and to p-u holding otherwise.
-# if is_source_in_queue_dir(queue_dir):
-# print "Sourceful upload in %s, accepting." % (q,)
-# _accept()
-# else:
-# move_to_holding(suite, queue_dir)
-
def do_accept(upload):
print "ACCEPT"
cnf = Config()
if not Options["No-Action"]:
(summary, short_summary) = upload.build_summaries()
-# if cnf.FindB("Dinstall::SecurityQueueHandling"):
-# upload.dump_vars(cnf["Dir::Queue::Embargoed"])
-# move_to_dir(cnf["Dir::Queue::Embargoed"])
-# upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
-# # Check for override disparities
-# upload.Subst["__SUMMARY__"] = summary
-# else:
- # Stable updates need to be copied to proposed-updates holding
- # area instead of accepted. Sourceful uploads need to go
- # to it directly, binaries only if the source has not yet been
- # accepted into p-u.
- for suite, q in [("proposed-updates", "ProposedUpdates"),
- ("oldstable-proposed-updates", "OldProposedUpdates")]:
- if not upload.pkg.changes["distribution"].has_key(suite):
- continue
- utils.fubar("stable accept not supported yet")
-# return do_accept_stableupdate(suite, q)
- # Just a normal upload, accept it...
- _accept(upload)
-
-def check_status(files):
- new = byhand = 0
- for f in files.keys():
- if files[f]["type"] == "byhand":
- byhand = 1
- elif files[f].has_key("new"):
- new = 1
- return (new, byhand)
+
+ if cnf.FindB("Dinstall::SecurityQueueHandling"):
+ upload.dump_vars(cnf["Dir::Queue::Embargoed"])
+ upload.move_to_dir(cnf["Dir::Queue::Embargoed"])
+ upload.queue_build("embargoed", cnf["Dir::Queue::Embargoed"])
+ # Check for override disparities
+ upload.Subst["__SUMMARY__"] = summary
+ else:
+ # Just a normal upload, accept it...
+ _accept(upload)
def do_pkg(changes_file, session):
u = Upload()
################################################################################
-# def do_comments(dir, opref, npref, line, fn):
-# for comm in [ x for x in os.listdir(dir) if x.startswith(opref) ]:
-# lines = open("%s/%s" % (dir, comm)).readlines()
-# if len(lines) == 0 or lines[0] != line + "\n": continue
-# changes_files = [ x for x in os.listdir(".") if x.startswith(comm[7:]+"_")
-# and x.endswith(".changes") ]
-# changes_files = sort_changes(changes_files)
-# for f in changes_files:
-# f = utils.validate_changes_file_arg(f, 0)
-# if not f: continue
-# print "\n" + f
-# fn(f, "".join(lines[1:]))
-
-# if opref != npref and not Options["No-Action"]:
-# newcomm = npref + comm[len(opref):]
-# os.rename("%s/%s" % (dir, comm), "%s/%s" % (dir, newcomm))
-
-# ################################################################################
-
-# def comment_accept(changes_file, comments):
-# Upload.pkg.changes_file = changes_file
-# Upload.init_vars()
-# Upload.update_vars()
-# Upload.update_subst()
-# files = Upload.pkg.files
-
-# if not recheck():
-# return # dak wants to REJECT, crap
-
-# (new, byhand) = check_status(files)
-# if not new and not byhand:
-# do_accept()
-
-# ################################################################################
-
-# def comment_reject(changes_file, comments):
-# Upload.pkg.changes_file = changes_file
-# Upload.init_vars()
-# Upload.update_vars()
-# Upload.update_subst()
-
-# if not recheck():
-# pass # dak has its own reasons to reject as well, which is fine
-
-# reject(comments)
-# print "REJECT\n" + reject_message,
-# if not Options["No-Action"]:
-# Upload.do_reject(0, reject_message)
-# os.unlink(Upload.pkg.changes_file[:-8]+".dak")
-
-################################################################################
-
def main():
global Options, Logger, Sections, Priorities
Arguments = [('a',"automatic","Process-New::Options::Automatic"),
('h',"help","Process-New::Options::Help"),
- ('C',"comments-dir","Process-New::Options::Comments-Dir", "HasArg"),
('m',"manual-reject","Process-New::Options::Manual-Reject", "HasArg"),
('t',"trainee","Process-New::Options::Trainee"),
('n',"no-action","Process-New::Options::No-Action")]
- for i in ["automatic", "help", "manual-reject", "no-action", "version", "comments-dir", "trainee"]:
+ for i in ["automatic", "help", "manual-reject", "no-action", "version", "trainee"]:
if not cnf.has_key("Process-New::Options::%s" % (i)):
cnf["Process-New::Options::%s" % (i)] = ""
changes_files = apt_pkg.ParseCommandLine(cnf.Cnf,Arguments,sys.argv)
- if len(changes_files) == 0 and not cnf.get("Process-New::Options::Comments-Dir",""):
+ if len(changes_files) == 0:
changes_files = utils.get_changes_files(cnf["Dir::Queue::New"])
Options = cnf.SubTree("Process-New::Options")
# Kill me now? **FIXME**
cnf["Dinstall::Options::No-Mail"] = ""
-# commentsdir = cnf.get("Process-New::Options::Comments-Dir","")
-# if commentsdir:
-# if changes_files != []:
-# sys.stderr.write("Can't specify any changes files if working with comments-dir")
-# sys.exit(1)
-# do_comments(commentsdir, "ACCEPT.", "ACCEPTED.", "OK", comment_accept)
-# do_comments(commentsdir, "REJECT.", "REJECTED.", "NOTOK", comment_reject)
-# else:
- if True:
- for changes_file in changes_files:
- changes_file = utils.validate_changes_file_arg(changes_file, 0)
- if not changes_file:
- continue
- print "\n" + changes_file
-
- do_pkg (changes_file, session)
+ for changes_file in changes_files:
+ changes_file = utils.validate_changes_file_arg(changes_file, 0)
+ if not changes_file:
+ continue
+ print "\n" + changes_file
+
+ do_pkg (changes_file, session)
end()
###############################################################################
-def lookup_uid_from_fingerprint(fpr, session):
- uid = None
- uid_name = ""
- # This is a stupid default, but see the comments below
- is_dm = False
-
- user = get_uid_from_fingerprint(fpr, session)
-
- if user is not None:
- uid = user.uid
- if user.name is None:
- uid_name = ''
- else:
- uid_name = user.name
-
- # Check the relevant fingerprint (which we have to have)
- for f in user.fingerprint:
- if f.fingerprint == fpr:
- is_dm = f.keyring.debian_maintainer
- break
-
- return (uid, uid_name, is_dm)
+def check_status(files):
+ new = byhand = 0
+ for f in files.keys():
+ if files[f]["type"] == "byhand":
+ byhand = 1
+ elif files[f].has_key("new"):
+ new = 1
+ return (new, byhand)
###############################################################################
self.ensure_hashes()
###########################################################################
- def check_lintian(self):
- cnf = Config()
-
- # Only check some distributions
- valid_dist = False
- for dist in ('unstable', 'experimental'):
- if dist in self.pkg.changes['distribution']:
- valid_dist = True
- break
-
- if not valid_dist:
- return
- tagfile = cnf.get("Dinstall::LintianTags")
- if tagfile is None:
- # We don't have a tagfile, so just don't do anything.
- return
+ def ensure_orig(self, target_dir='.', session=None):
+ """
+ Ensures that all orig files mentioned in the changes file are present
+ in target_dir. If they do not exist, they are symlinked into place.
- # Parse the yaml file
- sourcefile = file(tagfile, 'r')
- sourcecontent = sourcefile.read()
- sourcefile.close()
- try:
- lintiantags = yaml.load(sourcecontent)['lintian']
- except yaml.YAMLError, msg:
- utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
- return
+ An list containing the symlinks that were created are returned (so they
+ can be removed).
+ """
- # Try and find all orig mentioned in the .dsc
- target_dir = '.'
symlinked = []
+ cnf = Config()
+
for filename, entry in self.pkg.dsc_files.iteritems():
if not re_is_orig_source.match(filename):
# File is not an orig; ignore
return True
- session = DBConn().session()
+ session_ = session
+ if session is None:
+ session_ = DBConn().session()
+
found = False
# Look in the pool
- for poolfile in get_poolfile_like_name('/%s' % filename, session):
+ for poolfile in get_poolfile_like_name('/%s' % filename, session_):
poolfile_path = os.path.join(
poolfile.location.path, poolfile.filename
)
found = True
break
- session.close()
+ if session is None:
+ session_.close()
if found:
continue
'OldProposedUpdates', 'Embargoed', 'Unembargoed')
for queue in queues:
- if 'Dir::Queue::%s' % directory not in cnf:
+ if not cnf.get('Dir::Queue::%s' % queue):
continue
queuefile_path = os.path.join(
- cnf['Dir::Queue::%s' % directory], filename
+ cnf['Dir::Queue::%s' % queue], filename
)
if not os.path.exists(queuefile_path):
if symlink_if_valid(queuefile_path):
break
+ return symlinked
+
+ ###########################################################################
+
+ def check_lintian(self):
+ cnf = Config()
+
+ # Only check some distributions
+ valid_dist = False
+ for dist in ('unstable', 'experimental'):
+ if dist in self.pkg.changes['distribution']:
+ valid_dist = True
+ break
+
+ if not valid_dist:
+ return
+
+ tagfile = cnf.get("Dinstall::LintianTags")
+ if tagfile is None:
+ # We don't have a tagfile, so just don't do anything.
+ return
+
+ # Parse the yaml file
+ sourcefile = file(tagfile, 'r')
+ sourcecontent = sourcefile.read()
+ sourcefile.close()
+ try:
+ lintiantags = yaml.load(sourcecontent)['lintian']
+ except yaml.YAMLError, msg:
+ utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
+ return
+
+ # Try and find all orig mentioned in the .dsc
+ symlinked = self.ensure_orig()
+
# Now setup the input file for lintian. lintian wants "one tag per line" only,
# so put it together like it. We put all types of tags in one file and then sort
# through lintians output later to see if its a fatal tag we detected, or not.
except:
self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
+ def check_if_upload_is_sponsored(self, uid_email, uid_name):
+ if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
+ sponsored = False
+ elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
+ sponsored = False
+ if uid_name == "":
+ sponsored = True
+ else:
+ sponsored = True
+ if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
+ sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
+ if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
+ self.pkg.changes["changedbyemail"] not in sponsor_addresses):
+ self.pkg.changes["sponsoremail"] = uid_email
+
+ return sponsored
+
+
+ ###########################################################################
+ # check_signed_by_key checks
###########################################################################
+
+ def check_signed_by_key(self):
+ """Ensure the .changes is signed by an authorized uploader."""
+ session = DBConn().session()
+
+ # First of all we check that the person has proper upload permissions
+ # and that this upload isn't blocked
+ fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
+
+ if fpr is None:
+ self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
+ return
+
+ # TODO: Check that import-keyring adds UIDs properly
+ if not fpr.uid:
+ self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
+ return
+
+ # Check that the fingerprint which uploaded has permission to do so
+ self.check_upload_permissions(fpr, session)
+
+ # Check that this package is not in a transition
+ self.check_transition(session)
+
+ session.close()
+
+
+ def check_upload_permissions(self, fpr, session):
+ # Check any one-off upload blocks
+ self.check_upload_blocks(fpr, session)
+
+ # Start with DM as a special case
+ # DM is a special case unfortunately, so we check it first
+ # (keys with no source access get more access than DMs in one
+ # way; DMs can only upload for their packages whether source
+ # or binary, whereas keys with no access might be able to
+ # upload some binaries)
+ if fpr.source_acl.access_level == 'dm':
+ self.check_dm_source_upload(fpr, session)
+ else:
+ # Check source-based permissions for other types
+ if self.pkg.changes["architecture"].has_key("source"):
+ if fpr.source_acl.access_level is None:
+ rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
+ rej += '\nPlease contact ftpmaster if you think this is incorrect'
+ self.rejects.append(rej)
+ return
+ else:
+ # If not a DM, we allow full upload rights
+ uid_email = "%s@debian.org" % (fpr.uid.uid)
+ self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
+
+
+ # Check binary upload permissions
+ # By this point we know that DMs can't have got here unless they
+ # are allowed to deal with the package concerned so just apply
+ # normal checks
+ if fpr.binary_acl.access_level == 'full':
+ return
+
+ # Otherwise we're in the map case
+ tmparches = self.pkg.changes["architecture"].copy()
+ tmparches.pop('source', None)
+
+ for bam in fpr.binary_acl_map:
+ tmparches.pop(bam.architecture.arch_string, None)
+
+ if len(tmparches.keys()) > 0:
+ if fpr.binary_reject:
+ rej = ".changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
+ rej += "\narchitectures involved are: ", ",".join(tmparches.keys())
+ self.rejects.append(rej)
+ else:
+ # TODO: This is where we'll implement reject vs throw away binaries later
+ rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
+ rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
+ rej += "\nFingerprint: %s", (fpr.fingerprint)
+ self.rejects.append(rej)
+
+
+ def check_upload_blocks(self, fpr, session):
+ """Check whether any upload blocks apply to this source, source
+ version, uid / fpr combination"""
+
+ def block_rej_template(fb):
+ rej = 'Manual upload block in place for package %s' % fb.source
+ if fb.version is not None:
+ rej += ', version %s' % fb.version
+ return rej
+
+ for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
+ # version is None if the block applies to all versions
+ if fb.version is None or fb.version == self.pkg.changes['version']:
+ # Check both fpr and uid - either is enough to cause a reject
+ if fb.fpr is not None:
+ if fb.fpr.fingerprint == fpr.fingerprint:
+ self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
+ if fb.uid is not None:
+ if fb.uid == fpr.uid:
+ self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
+
+
+ def check_dm_upload(self, fpr, session):
+ # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
+ ## none of the uploaded packages are NEW
+ rej = False
+ for f in self.pkg.files.keys():
+ if self.pkg.files[f].has_key("byhand"):
+ self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
+ rej = True
+ if self.pkg.files[f].has_key("new"):
+ self.rejects.append("%s may not upload NEW file %s" % (uid, f))
+ rej = True
+
+ if rej:
+ return
+
+ ## the most recent version of the package uploaded to unstable or
+ ## experimental includes the field "DM-Upload-Allowed: yes" in the source
+ ## section of its control file
+ q = session.query(DBSource).filter_by(source=self.pkg.changes["source"])
+ q = q.join(SrcAssociation)
+ q = q.join(Suite).filter(Suite.suite_name.in_(['unstable', 'experimental']))
+ q = q.order_by(desc('source.version')).limit(1)
+
+ r = q.all()
+
+ if len(r) != 1:
+ rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
+ self.rejects.append(rej)
+ return
+
+ r = r[0]
+ if not r.dm_upload_allowed:
+ rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
+ self.rejects.append(rej)
+ return
+
+ ## the Maintainer: field of the uploaded .changes file corresponds with
+ ## the owner of the key used (ie, non-developer maintainers may not sponsor
+ ## uploads)
+ if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
+ self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
+
+ ## the most recent version of the package uploaded to unstable or
+ ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
+ ## non-developer maintainers cannot NMU or hijack packages)
+
+ # srcuploaders includes the maintainer
+ accept = False
+ for sup in r.srcuploaders:
+ (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
+ # Eww - I hope we never have two people with the same name in Debian
+ if email == fpr.uid.uid or name == fpr.uid.name:
+ accept = True
+ break
+
+ if not accept:
+ self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
+ return
+
+ ## none of the packages are being taken over from other source packages
+ for b in self.pkg.changes["binary"].keys():
+ for suite in self.pkg.changes["distribution"].keys():
+ q = session.query(DBSource)
+ q = q.join(DBBinary).filter_by(package=b)
+ q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
+
+ for s in q.all():
+ if s.source != self.pkg.changes["source"]:
+ self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
+
+
+
def check_transition(self, session):
cnf = Config()
return
###########################################################################
- def check_signed_by_key(self):
- """Ensure the .changes is signed by an authorized uploader."""
- session = DBConn().session()
-
- self.check_transition(session)
-
- (uid, uid_name, is_dm) = lookup_uid_from_fingerprint(self.pkg.changes["fingerprint"], session=session)
-
- # match claimed name with actual name:
- if uid is None:
- # This is fundamentally broken but need us to refactor how we get
- # the UIDs/Fingerprints in order for us to fix it properly
- uid, uid_email = self.pkg.changes["fingerprint"], uid
- may_nmu, may_sponsor = 1, 1
- # XXX by default new dds don't have a fingerprint/uid in the db atm,
- # and can't get one in there if we don't allow nmu/sponsorship
- elif is_dm is False:
- # If is_dm is False, we allow full upload rights
- uid_email = "%s@debian.org" % (uid)
- may_nmu, may_sponsor = 1, 1
- else:
- # Assume limited upload rights unless we've discovered otherwise
- uid_email = uid
- may_nmu, may_sponsor = 0, 0
-
- if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
- sponsored = 0
- elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
- sponsored = 0
- if uid_name == "": sponsored = 1
- else:
- sponsored = 1
- if ("source" in self.pkg.changes["architecture"] and
- uid_email and utils.is_email_alias(uid_email)):
- sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
- if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
- self.pkg.changes["changedbyemail"] not in sponsor_addresses):
- self.pkg.changes["sponsoremail"] = uid_email
-
- if sponsored and not may_sponsor:
- self.rejects.append("%s is not authorised to sponsor uploads" % (uid))
-
- if not sponsored and not may_nmu:
- should_reject = True
- highest_sid, highest_version = None, None
-
- # XXX: This reimplements in SQLA what existed before but it's fundamentally fucked
- # It ignores higher versions with the dm_upload_allowed flag set to false
- # I'm keeping the existing behaviour for now until I've gone back and
- # checked exactly what the GR says - mhy
- for si in get_sources_from_name(source=self.pkg.changes['source'], dm_upload_allowed=True, session=session):
- if highest_version is None or apt_pkg.VersionCompare(si.version, highest_version) == 1:
- highest_sid = si.source_id
- highest_version = si.version
-
- if highest_sid is None:
- self.rejects.append("Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version" % self.pkg.changes["source"])
- else:
- for sup in session.query(SrcUploader).join(DBSource).filter_by(source_id=highest_sid):
- (rfc822, rfc2047, name, email) = sup.maintainer.get_split_maintainer()
- if email == uid_email or name == uid_name:
- should_reject = False
- break
-
- if should_reject is True:
- self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (uid, self.pkg.changes["source"]))
-
- for b in self.pkg.changes["binary"].keys():
- for suite in self.pkg.changes["distribution"].keys():
- q = session.query(DBSource)
- q = q.join(DBBinary).filter_by(package=b)
- q = q.join(BinAssociation).join(Suite).filter_by(suite_name=suite)
-
- for s in q.all():
- if s.source != self.pkg.changes["source"]:
- self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (uid, b, s, suite))
-
- for f in self.pkg.files.keys():
- if self.pkg.files[f].has_key("byhand"):
- self.rejects.append("%s may not upload BYHAND file %s" % (uid, f))
- if self.pkg.files[f].has_key("new"):
- self.rejects.append("%s may not upload NEW file %s" % (uid, f))
-
- session.close()
-
+ # End check_signed_by_key checks
###########################################################################
+
def build_summaries(self):
""" Build a summary of changes the upload introduces. """
if actual_size != int(dsc_entry["size"]):
self.rejects.append("size for %s doesn't match %s." % (found, file))
+ ################################################################################
+ # This is used by process-new and process-holding to recheck a changes file
+ # at the time we're running. It mainly wraps various other internal functions
+ # and is similar to accepted_checks - these should probably be tidied up
+ # and combined
+ def recheck(self, session):
+ cnf = Config()
+ for f in self.pkg.files.keys():
+ # The .orig.tar.gz can disappear out from under us is it's a
+ # duplicate of one in the archive.
+ if not self.pkg.files.has_key(f):
+ continue
+
+ entry = self.pkg.files[f]
+
+ # Check that the source still exists
+ if entry["type"] == "deb":
+ source_version = entry["source version"]
+ source_package = entry["source package"]
+ if not self.pkg.changes["architecture"].has_key("source") \
+ and not source_exists(source_package, source_version, self.pkg.changes["distribution"].keys(), session):
+ source_epochless_version = re_no_epoch.sub('', source_version)
+ dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
+ found = False
+ for q in ["Accepted", "Embargoed", "Unembargoed", "Newstage"]:
+ if cnf.has_key("Dir::Queue::%s" % (q)):
+ if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
+ found = True
+ if not found:
+ self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
+
+ # Version and file overwrite checks
+ if entry["type"] == "deb":
+ self.check_binary_against_db(f, session)
+ elif entry["type"] == "dsc":
+ self.check_source_against_db(f, session)
+ self.check_dsc_against_db(f, session)
+
################################################################################
def accepted_checks(self, overwrite_checks, session):
# Recheck anything that relies on the database; since that's not