5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
80 elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81 utils.warn('unreadable source file (will continue and hope for the best)')
85 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
87 # Validate the override type
88 type_id = get_override_type(file_type, session)
90 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
94 ################################################################################
96 # Determine what parts in a .changes are NEW
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
100 Determine what parts in a C{changes} file are NEW.
103 @param filename: changes filename
105 @type changes: Upload.Pkg.changes dict
106 @param changes: Changes dictionary
108 @type files: Upload.Pkg.files dict
109 @param files: Files dictionary
112 @param warn: Warn if overrides are added for (old)stable
114 @type dsc: Upload.Pkg.dsc dict
115 @param dsc: (optional); Dsc dictionary
118 @param new: new packages as returned by a previous call to this function, but override information may have changed
121 @return: dictionary of NEW components.
124 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Try to get the Package-Set field from an included .dsc file (if possible).
136 for package, entry in build_package_set(dsc, session).items():
137 if not new.has_key(package):
140 # Build up a list of potentially new things
141 for name, f in files.items():
142 # Keep a record of byhand elements
143 if f["section"] == "byhand":
148 priority = f["priority"]
149 section = f["section"]
150 file_type = get_type(f, session)
151 component = f["component"]
153 if file_type == "dsc":
156 if not new.has_key(pkg):
158 new[pkg]["priority"] = priority
159 new[pkg]["section"] = section
160 new[pkg]["type"] = file_type
161 new[pkg]["component"] = component
162 new[pkg]["files"] = []
164 old_type = new[pkg]["type"]
165 if old_type != file_type:
166 # source gets trumped by deb or udeb
167 if old_type == "dsc":
168 new[pkg]["priority"] = priority
169 new[pkg]["section"] = section
170 new[pkg]["type"] = file_type
171 new[pkg]["component"] = component
173 new[pkg]["files"].append(name)
175 if f.has_key("othercomponents"):
176 new[pkg]["othercomponents"] = f["othercomponents"]
178 # Fix up the list of target suites
180 for suite in changes["suite"].keys():
181 oldsuite = get_suite(suite, session)
183 print "WARNING: Invalid suite %s found" % suite
186 if oldsuite.overridesuite:
187 newsuite = get_suite(oldsuite.overridesuite, session)
190 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191 oldsuite.overridesuite, suite)
192 del changes["suite"][suite]
193 changes["suite"][oldsuite.overridesuite] = 1
195 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
196 oldsuite.overridesuite, suite)
198 # Check for unprocessed byhand files
199 if dbchg is not None:
200 for b in byhand.keys():
201 # Find the file entry in the database
203 for f in dbchg.files:
206 # If it's processed, we can ignore it
212 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
214 # Check for new stuff
215 for suite in changes["suite"].keys():
216 for pkg in new.keys():
217 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
219 for file_entry in new[pkg]["files"]:
220 if files[file_entry].has_key("new"):
221 del files[file_entry]["new"]
225 for s in ['stable', 'oldstable']:
226 if changes["suite"].has_key(s):
227 print "WARNING: overrides will be added for %s!" % s
228 for pkg in new.keys():
229 if new[pkg].has_key("othercomponents"):
230 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
234 ################################################################################
236 def check_valid(new, session = None):
238 Check if section and priority for NEW packages exist in database.
239 Additionally does sanity checks:
240 - debian-installer packages have to be udeb (or source)
241 - non debian-installer packages can not be udeb
242 - source priority can only be assigned to dsc file types
245 @param new: Dict of new packages with their section, priority and type.
248 for pkg in new.keys():
249 section_name = new[pkg]["section"]
250 priority_name = new[pkg]["priority"]
251 file_type = new[pkg]["type"]
253 section = get_section(section_name, session)
255 new[pkg]["section id"] = -1
257 new[pkg]["section id"] = section.section_id
259 priority = get_priority(priority_name, session)
261 new[pkg]["priority id"] = -1
263 new[pkg]["priority id"] = priority.priority_id
266 di = section_name.find("debian-installer") != -1
268 # If d-i, we must be udeb and vice-versa
269 if (di and file_type not in ("udeb", "dsc")) or \
270 (not di and file_type == "udeb"):
271 new[pkg]["section id"] = -1
273 # If dsc we need to be source and vice-versa
274 if (priority == "source" and file_type != "dsc") or \
275 (priority != "source" and file_type == "dsc"):
276 new[pkg]["priority id"] = -1
278 ###############################################################################
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282 def __init__(self, future_cutoff, past_cutoff):
284 self.future_cutoff = future_cutoff
285 self.past_cutoff = past_cutoff
288 self.future_files = {}
289 self.ancient_files = {}
291 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
292 if MTime > self.future_cutoff:
293 self.future_files[Name] = MTime
294 if MTime < self.past_cutoff:
295 self.ancient_files[Name] = MTime
297 ###############################################################################
299 def prod_maintainer(notes, upload):
302 # Here we prepare an editor and get them ready to prod...
303 (fd, temp_filename) = utils.temp_filename()
304 temp_file = os.fdopen(fd, 'w')
306 temp_file.write(note.comment)
308 editor = os.environ.get("EDITOR","vi")
311 os.system("%s %s" % (editor, temp_filename))
312 temp_fh = utils.open_file(temp_filename)
313 prod_message = "".join(temp_fh.readlines())
315 print "Prod message:"
316 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
317 prompt = "[P]rod, Edit, Abandon, Quit ?"
319 while prompt.find(answer) == -1:
320 answer = utils.our_raw_input(prompt)
321 m = re_default_answer.search(prompt)
324 answer = answer[:1].upper()
325 os.unlink(temp_filename)
331 # Otherwise, do the proding...
332 user_email_address = utils.whoami() + " <%s>" % (
333 cnf["Dinstall::MyAdminAddress"])
337 Subst["__FROM_ADDRESS__"] = user_email_address
338 Subst["__PROD_MESSAGE__"] = prod_message
339 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
341 prod_mail_message = utils.TemplateSubst(
342 Subst,cnf["Dir::Templates"]+"/process-new.prod")
345 utils.send_mail(prod_mail_message)
347 print "Sent prodding message"
349 ################################################################################
351 def edit_note(note, upload, session, trainee=False):
352 # Write the current data to a temporary file
353 (fd, temp_filename) = utils.temp_filename()
354 editor = os.environ.get("EDITOR","vi")
357 os.system("%s %s" % (editor, temp_filename))
358 temp_file = utils.open_file(temp_filename)
359 newnote = temp_file.read().rstrip()
362 print utils.prefix_multi_line_string(newnote," ")
363 prompt = "[D]one, Edit, Abandon, Quit ?"
365 while prompt.find(answer) == -1:
366 answer = utils.our_raw_input(prompt)
367 m = re_default_answer.search(prompt)
370 answer = answer[:1].upper()
371 os.unlink(temp_filename)
378 comment = NewComment()
379 comment.package = upload.pkg.changes["source"]
380 comment.version = upload.pkg.changes["version"]
381 comment.comment = newnote
382 comment.author = utils.whoami()
383 comment.trainee = trainee
387 ###############################################################################
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
392 def get_newest_source(source, session):
393 'returns the newest DBSource object in dm_suites'
394 ## the most recent version of the package uploaded to unstable or
395 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396 ## section of its control file
397 q = session.query(DBSource).filter_by(source = source). \
398 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399 order_by(desc('source.version'))
402 def get_suite_version_by_source(source, session):
403 'returns a list of tuples (suite_name, version) for source package'
404 q = session.query(Suite.suite_name, DBSource.version). \
405 join(Suite.sources).filter_by(source = source)
408 def get_source_by_package_and_suite(package, suite_name, session):
410 returns a DBSource query filtered by DBBinary.package and this package's
413 return session.query(DBSource). \
414 join(DBSource.binaries).filter_by(package = package). \
415 join(DBBinary.suites).filter_by(suite_name = suite_name)
417 def get_suite_version_by_package(package, arch_string, session):
419 returns a list of tuples (suite_name, version) for binary package and
422 return session.query(Suite.suite_name, DBBinary.version). \
423 join(Suite.binaries).filter_by(package = package). \
424 join(DBBinary.architecture). \
425 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
427 class Upload(object):
429 Everything that has to do with an upload processed.
437 ###########################################################################
440 """ Reset a number of internal variables."""
442 # Initialize the substitution template map
445 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
447 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
448 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
454 self.later_check_files = []
458 def package_info(self):
460 Format various messages from this Upload to send to the maintainer.
464 ('Reject Reasons', self.rejects),
465 ('Warnings', self.warnings),
466 ('Notes', self.notes),
470 for title, messages in msgs:
472 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
477 ###########################################################################
478 def update_subst(self):
479 """ Set up the per-package template substitution mappings """
483 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
484 if not self.pkg.changes.has_key("architecture") or not \
485 isinstance(self.pkg.changes["architecture"], dict):
486 self.pkg.changes["architecture"] = { "Unknown" : "" }
488 # and maintainer2047 may not exist.
489 if not self.pkg.changes.has_key("maintainer2047"):
490 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
492 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
493 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
494 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
496 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
497 if self.pkg.changes["architecture"].has_key("source") and \
498 self.pkg.changes["changedby822"] != "" and \
499 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
501 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
502 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
503 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
505 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
506 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
507 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
509 # Process policy doesn't set the fingerprint field and I don't want to make it
510 # do it for now as I don't want to have to deal with the case where we accepted
511 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
512 # the meantime so the package will be remarked as rejectable. Urgh.
513 # TODO: Fix this properly
514 if self.pkg.changes.has_key('fingerprint'):
515 session = DBConn().session()
516 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
517 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
518 if self.pkg.changes.has_key("sponsoremail"):
519 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
522 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
523 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
525 # Apply any global override of the Maintainer field
526 if cnf.get("Dinstall::OverrideMaintainer"):
527 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
528 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
530 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
531 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
532 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
533 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
535 ###########################################################################
536 def load_changes(self, filename):
538 Load a changes file and setup a dictionary around it. Also checks for mandantory
541 @type filename: string
542 @param filename: Changes filename, full path.
545 @return: whether the changes file was valid or not. We may want to
546 reject even if this is True (see what gets put in self.rejects).
547 This is simply to prevent us even trying things later which will
548 fail because we couldn't properly parse the file.
551 self.pkg.changes_file = filename
553 # Parse the .changes field into a dictionary
555 self.pkg.changes.update(parse_changes(filename))
556 except CantOpenError:
557 self.rejects.append("%s: can't read file." % (filename))
559 except ParseChangesError, line:
560 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
562 except ChangesUnicodeError:
563 self.rejects.append("%s: changes file not proper utf-8" % (filename))
566 # Parse the Files field from the .changes into another dictionary
568 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
569 except ParseChangesError, line:
570 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572 except UnknownFormatError, format:
573 self.rejects.append("%s: unknown format '%s'." % (filename, format))
576 # Check for mandatory fields
577 for i in ("distribution", "source", "binary", "architecture",
578 "version", "maintainer", "files", "changes", "description"):
579 if not self.pkg.changes.has_key(i):
580 # Avoid undefined errors later
581 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
584 # Strip a source version in brackets from the source field
585 if re_strip_srcver.search(self.pkg.changes["source"]):
586 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
588 # Ensure the source field is a valid package name.
589 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
590 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
592 # Split multi-value fields into a lower-level dictionary
593 for i in ("architecture", "distribution", "binary", "closes"):
594 o = self.pkg.changes.get(i, "")
596 del self.pkg.changes[i]
598 self.pkg.changes[i] = {}
601 self.pkg.changes[i][j] = 1
603 # Fix the Maintainer: field to be RFC822/2047 compatible
605 (self.pkg.changes["maintainer822"],
606 self.pkg.changes["maintainer2047"],
607 self.pkg.changes["maintainername"],
608 self.pkg.changes["maintaineremail"]) = \
609 fix_maintainer (self.pkg.changes["maintainer"])
610 except ParseMaintError, msg:
611 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
612 % (filename, self.pkg.changes["maintainer"], msg))
614 # ...likewise for the Changed-By: field if it exists.
616 (self.pkg.changes["changedby822"],
617 self.pkg.changes["changedby2047"],
618 self.pkg.changes["changedbyname"],
619 self.pkg.changes["changedbyemail"]) = \
620 fix_maintainer (self.pkg.changes.get("changed-by", ""))
621 except ParseMaintError, msg:
622 self.pkg.changes["changedby822"] = ""
623 self.pkg.changes["changedby2047"] = ""
624 self.pkg.changes["changedbyname"] = ""
625 self.pkg.changes["changedbyemail"] = ""
627 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
628 % (filename, self.pkg.changes["changed-by"], msg))
630 # Ensure all the values in Closes: are numbers
631 if self.pkg.changes.has_key("closes"):
632 for i in self.pkg.changes["closes"].keys():
633 if re_isanum.match (i) == None:
634 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
636 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
637 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
638 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
640 # Check the .changes is non-empty
641 if not self.pkg.files:
642 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
645 # Changes was syntactically valid even if we'll reject
648 ###########################################################################
650 def check_distributions(self):
651 "Check and map the Distribution field"
655 # Handle suite mappings
656 for m in Cnf.ValueList("SuiteMappings"):
659 if mtype == "map" or mtype == "silent-map":
660 (source, dest) = args[1:3]
661 if self.pkg.changes["distribution"].has_key(source):
662 del self.pkg.changes["distribution"][source]
663 self.pkg.changes["distribution"][dest] = 1
664 if mtype != "silent-map":
665 self.notes.append("Mapping %s to %s." % (source, dest))
666 if self.pkg.changes.has_key("distribution-version"):
667 if self.pkg.changes["distribution-version"].has_key(source):
668 self.pkg.changes["distribution-version"][source]=dest
669 elif mtype == "map-unreleased":
670 (source, dest) = args[1:3]
671 if self.pkg.changes["distribution"].has_key(source):
672 for arch in self.pkg.changes["architecture"].keys():
673 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
674 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
675 del self.pkg.changes["distribution"][source]
676 self.pkg.changes["distribution"][dest] = 1
678 elif mtype == "ignore":
680 if self.pkg.changes["distribution"].has_key(suite):
681 del self.pkg.changes["distribution"][suite]
682 self.warnings.append("Ignoring %s as a target suite." % (suite))
683 elif mtype == "reject":
685 if self.pkg.changes["distribution"].has_key(suite):
686 self.rejects.append("Uploads to %s are not accepted." % (suite))
687 elif mtype == "propup-version":
688 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
690 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
691 if self.pkg.changes["distribution"].has_key(args[1]):
692 self.pkg.changes.setdefault("distribution-version", {})
693 for suite in args[2:]:
694 self.pkg.changes["distribution-version"][suite] = suite
696 # Ensure there is (still) a target distribution
697 if len(self.pkg.changes["distribution"].keys()) < 1:
698 self.rejects.append("No valid distribution remaining.")
700 # Ensure target distributions exist
701 for suite in self.pkg.changes["distribution"].keys():
702 if not Cnf.has_key("Suite::%s" % (suite)):
703 self.rejects.append("Unknown distribution `%s'." % (suite))
705 ###########################################################################
707 def binary_file_checks(self, f, session):
709 entry = self.pkg.files[f]
711 # Extract package control information
712 deb_file = utils.open_file(f)
714 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
716 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
718 # Can't continue, none of the checks on control would work.
721 # Check for mandantory "Description:"
724 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
726 self.rejects.append("%s: Missing Description in binary package" % (f))
731 # Check for mandatory fields
732 for field in [ "Package", "Architecture", "Version" ]:
733 if control.Find(field) == None:
735 self.rejects.append("%s: No %s field in control." % (f, field))
738 # Ensure the package name matches the one give in the .changes
739 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
740 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
742 # Validate the package field
743 package = control.Find("Package")
744 if not re_valid_pkg_name.match(package):
745 self.rejects.append("%s: invalid package name '%s'." % (f, package))
747 # Validate the version field
748 version = control.Find("Version")
749 if not re_valid_version.match(version):
750 self.rejects.append("%s: invalid version number '%s'." % (f, version))
752 # Ensure the architecture of the .deb is one we know about.
753 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
754 architecture = control.Find("Architecture")
755 upload_suite = self.pkg.changes["distribution"].keys()[0]
757 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
758 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
759 self.rejects.append("Unknown architecture '%s'." % (architecture))
761 # Ensure the architecture of the .deb is one of the ones
762 # listed in the .changes.
763 if not self.pkg.changes["architecture"].has_key(architecture):
764 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
766 # Sanity-check the Depends field
767 depends = control.Find("Depends")
769 self.rejects.append("%s: Depends field is empty." % (f))
771 # Sanity-check the Provides field
772 provides = control.Find("Provides")
774 provide = re_spacestrip.sub('', provides)
776 self.rejects.append("%s: Provides field is empty." % (f))
777 prov_list = provide.split(",")
778 for prov in prov_list:
779 if not re_valid_pkg_name.match(prov):
780 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
782 # If there is a Built-Using field, we need to check we can find the
783 # exact source version
784 built_using = control.Find("Built-Using")
787 entry["built-using"] = []
788 for dep in apt_pkg.parse_depends(built_using):
789 bu_s, bu_v, bu_e = dep[0]
790 # Check that it's an exact match dependency and we have
791 # some form of version
792 if bu_e != "=" or len(bu_v) < 1:
793 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
795 # Find the source id for this version
796 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
798 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
800 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
802 except ValueError, e:
803 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
806 # Check the section & priority match those given in the .changes (non-fatal)
807 if control.Find("Section") and entry["section"] != "" \
808 and entry["section"] != control.Find("Section"):
809 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
810 (f, control.Find("Section", ""), entry["section"]))
811 if control.Find("Priority") and entry["priority"] != "" \
812 and entry["priority"] != control.Find("Priority"):
813 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
814 (f, control.Find("Priority", ""), entry["priority"]))
816 entry["package"] = package
817 entry["architecture"] = architecture
818 entry["version"] = version
819 entry["maintainer"] = control.Find("Maintainer", "")
821 if f.endswith(".udeb"):
822 self.pkg.files[f]["dbtype"] = "udeb"
823 elif f.endswith(".deb"):
824 self.pkg.files[f]["dbtype"] = "deb"
826 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
828 entry["source"] = control.Find("Source", entry["package"])
830 # Get the source version
831 source = entry["source"]
834 if source.find("(") != -1:
835 m = re_extract_src_version.match(source)
837 source_version = m.group(2)
839 if not source_version:
840 source_version = self.pkg.files[f]["version"]
842 entry["source package"] = source
843 entry["source version"] = source_version
845 # Ensure the filename matches the contents of the .deb
846 m = re_isadeb.match(f)
849 file_package = m.group(1)
850 if entry["package"] != file_package:
851 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
852 (f, file_package, entry["dbtype"], entry["package"]))
853 epochless_version = re_no_epoch.sub('', control.Find("Version"))
856 file_version = m.group(2)
857 if epochless_version != file_version:
858 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
859 (f, file_version, entry["dbtype"], epochless_version))
862 file_architecture = m.group(3)
863 if entry["architecture"] != file_architecture:
864 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
865 (f, file_architecture, entry["dbtype"], entry["architecture"]))
867 # Check for existent source
868 source_version = entry["source version"]
869 source_package = entry["source package"]
870 if self.pkg.changes["architecture"].has_key("source"):
871 if source_version != self.pkg.changes["version"]:
872 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
873 (source_version, f, self.pkg.changes["version"]))
875 # Check in the SQL database
876 if not source_exists(source_package, source_version, suites = \
877 self.pkg.changes["distribution"].keys(), session = session):
878 # Check in one of the other directories
879 source_epochless_version = re_no_epoch.sub('', source_version)
880 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
881 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
883 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
886 dsc_file_exists = False
887 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
888 if cnf.has_key("Dir::Queue::%s" % (myq)):
889 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
890 dsc_file_exists = True
893 if not dsc_file_exists:
894 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
896 # Check the version and for file overwrites
897 self.check_binary_against_db(f, session)
899 def source_file_checks(self, f, session):
900 entry = self.pkg.files[f]
902 m = re_issource.match(f)
906 entry["package"] = m.group(1)
907 entry["version"] = m.group(2)
908 entry["type"] = m.group(3)
910 # Ensure the source package name matches the Source filed in the .changes
911 if self.pkg.changes["source"] != entry["package"]:
912 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
914 # Ensure the source version matches the version in the .changes file
915 if re_is_orig_source.match(f):
916 changes_version = self.pkg.changes["chopversion2"]
918 changes_version = self.pkg.changes["chopversion"]
920 if changes_version != entry["version"]:
921 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
923 # Ensure the .changes lists source in the Architecture field
924 if not self.pkg.changes["architecture"].has_key("source"):
925 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
927 # Check the signature of a .dsc file
928 if entry["type"] == "dsc":
929 # check_signature returns either:
930 # (None, [list, of, rejects]) or (signature, [])
931 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
933 self.rejects.append(j)
935 entry["architecture"] = "source"
937 def per_suite_file_checks(self, f, suite, session):
939 entry = self.pkg.files[f]
942 if entry.has_key("byhand"):
945 # Check we have fields we need to do these checks
947 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
948 if not entry.has_key(m):
949 self.rejects.append("file '%s' does not have field %s set" % (f, m))
955 # Handle component mappings
956 for m in cnf.ValueList("ComponentMappings"):
957 (source, dest) = m.split()
958 if entry["component"] == source:
959 entry["original component"] = source
960 entry["component"] = dest
962 # Ensure the component is valid for the target suite
963 if cnf.has_key("Suite:%s::Components" % (suite)) and \
964 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
965 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
968 # Validate the component
969 if not get_component(entry["component"], session):
970 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
973 # See if the package is NEW
974 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
977 # Validate the priority
978 if entry["priority"].find('/') != -1:
979 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
981 # Determine the location
982 location = cnf["Dir::Pool"]
983 l = get_location(location, entry["component"], session=session)
985 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
986 entry["location id"] = -1
988 entry["location id"] = l.location_id
990 # Check the md5sum & size against existing files (if any)
991 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
993 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
994 entry["size"], entry["md5sum"], entry["location id"])
997 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
998 elif found is False and poolfile is not None:
999 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1001 if poolfile is None:
1002 entry["files id"] = None
1004 entry["files id"] = poolfile.file_id
1006 # Check for packages that have moved from one component to another
1007 entry['suite'] = suite
1008 arch_list = [entry["architecture"], 'all']
1009 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1010 [suite], arch_list = arch_list, session = session)
1011 if component is not None:
1012 entry["othercomponents"] = component
1014 def check_files(self, action=True):
1015 file_keys = self.pkg.files.keys()
1021 os.chdir(self.pkg.directory)
1023 ret = holding.copy_to_holding(f)
1025 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1029 # check we already know the changes file
1030 # [NB: this check must be done post-suite mapping]
1031 base_filename = os.path.basename(self.pkg.changes_file)
1033 session = DBConn().session()
1036 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1037 # if in the pool or in a queue other than unchecked, reject
1038 if (dbc.in_queue is None) \
1039 or (dbc.in_queue is not None
1040 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1041 self.rejects.append("%s file already known to dak" % base_filename)
1042 except NoResultFound, e:
1046 has_binaries = False
1049 for f, entry in self.pkg.files.items():
1050 # Ensure the file does not already exist in one of the accepted directories
1051 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1052 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1053 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1054 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1056 if not re_taint_free.match(f):
1057 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1059 # Check the file is readable
1060 if os.access(f, os.R_OK) == 0:
1061 # When running in -n, copy_to_holding() won't have
1062 # generated the reject_message, so we need to.
1064 if os.path.exists(f):
1065 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1067 # Don't directly reject, mark to check later to deal with orig's
1068 # we can find in the pool
1069 self.later_check_files.append(f)
1070 entry["type"] = "unreadable"
1073 # If it's byhand skip remaining checks
1074 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1076 entry["type"] = "byhand"
1078 # Checks for a binary package...
1079 elif re_isadeb.match(f):
1081 entry["type"] = "deb"
1083 # This routine appends to self.rejects/warnings as appropriate
1084 self.binary_file_checks(f, session)
1086 # Checks for a source package...
1087 elif re_issource.match(f):
1090 # This routine appends to self.rejects/warnings as appropriate
1091 self.source_file_checks(f, session)
1093 # Not a binary or source package? Assume byhand...
1096 entry["type"] = "byhand"
1098 # Per-suite file checks
1099 entry["oldfiles"] = {}
1100 for suite in self.pkg.changes["distribution"].keys():
1101 self.per_suite_file_checks(f, suite, session)
1105 # If the .changes file says it has source, it must have source.
1106 if self.pkg.changes["architecture"].has_key("source"):
1108 self.rejects.append("no source found and Architecture line in changes mention source.")
1110 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1111 self.rejects.append("source only uploads are not supported.")
1113 ###########################################################################
1115 def __dsc_filename(self):
1117 Returns: (Status, Dsc_Filename)
1119 Status: Boolean; True when there was no error, False otherwise
1120 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1125 for name, entry in self.pkg.files.items():
1126 if entry.has_key("type") and entry["type"] == "dsc":
1128 return False, "cannot process a .changes file with multiple .dsc's."
1132 if not dsc_filename:
1133 return False, "source uploads must contain a dsc file"
1135 return True, dsc_filename
1137 def load_dsc(self, action=True, signing_rules=1):
1139 Find and load the dsc from self.pkg.files into self.dsc
1141 Returns: (Status, Reason)
1143 Status: Boolean; True when there was no error, False otherwise
1144 Reason: String; When Status is False this describes the error
1148 (status, dsc_filename) = self.__dsc_filename()
1150 # If status is false, dsc_filename has the reason
1151 return False, dsc_filename
1154 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1155 except CantOpenError:
1157 return False, "%s: can't read file." % (dsc_filename)
1158 except ParseChangesError, line:
1159 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1160 except InvalidDscError, line:
1161 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1162 except ChangesUnicodeError:
1163 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1167 ###########################################################################
1169 def check_dsc(self, action=True, session=None):
1170 """Returns bool indicating whether or not the source changes are valid"""
1171 # Ensure there is source to check
1172 if not self.pkg.changes["architecture"].has_key("source"):
1175 (status, reason) = self.load_dsc(action=action)
1177 self.rejects.append(reason)
1179 (status, dsc_filename) = self.__dsc_filename()
1181 # If status is false, dsc_filename has the reason
1182 self.rejects.append(dsc_filename)
1185 # Build up the file list of files mentioned by the .dsc
1187 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1188 except NoFilesFieldError:
1189 self.rejects.append("%s: no Files: field." % (dsc_filename))
1191 except UnknownFormatError, format:
1192 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1194 except ParseChangesError, line:
1195 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1198 # Enforce mandatory fields
1199 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1200 if not self.pkg.dsc.has_key(i):
1201 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1204 # Validate the source and version fields
1205 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1206 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1207 if not re_valid_version.match(self.pkg.dsc["version"]):
1208 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1210 # Only a limited list of source formats are allowed in each suite
1211 for dist in self.pkg.changes["distribution"].keys():
1212 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1213 if self.pkg.dsc["format"] not in allowed:
1214 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1216 # Validate the Maintainer field
1218 # We ignore the return value
1219 fix_maintainer(self.pkg.dsc["maintainer"])
1220 except ParseMaintError, msg:
1221 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1222 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1224 # Validate the build-depends field(s)
1225 for field_name in [ "build-depends", "build-depends-indep" ]:
1226 field = self.pkg.dsc.get(field_name)
1228 # Have apt try to parse them...
1230 apt_pkg.ParseSrcDepends(field)
1232 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1234 # Ensure the version number in the .dsc matches the version number in the .changes
1235 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1236 changes_version = self.pkg.files[dsc_filename]["version"]
1238 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1239 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1241 # Ensure the Files field contain only what's expected
1242 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1244 # Ensure source is newer than existing source in target suites
1245 session = DBConn().session()
1246 self.check_source_against_db(dsc_filename, session)
1247 self.check_dsc_against_db(dsc_filename, session)
1249 dbchg = get_dbchange(self.pkg.changes_file, session)
1251 # Finally, check if we're missing any files
1252 for f in self.later_check_files:
1254 # Check if we've already processed this file if we have a dbchg object
1257 for pf in dbchg.files:
1258 if pf.filename == f and pf.processed:
1259 self.notes.append('%s was already processed so we can go ahead' % f)
1261 del self.pkg.files[f]
1263 self.rejects.append("Could not find file %s references in changes" % f)
1267 return (len(self.rejects) == 0)
1269 ###########################################################################
1271 def get_changelog_versions(self, source_dir):
1272 """Extracts a the source package and (optionally) grabs the
1273 version history out of debian/changelog for the BTS."""
1277 # Find the .dsc (again)
1279 for f in self.pkg.files.keys():
1280 if self.pkg.files[f]["type"] == "dsc":
1283 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1284 if not dsc_filename:
1287 # Create a symlink mirror of the source files in our temporary directory
1288 for f in self.pkg.files.keys():
1289 m = re_issource.match(f)
1291 src = os.path.join(source_dir, f)
1292 # If a file is missing for whatever reason, give up.
1293 if not os.path.exists(src):
1296 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1297 self.pkg.orig_files[f].has_key("path"):
1299 dest = os.path.join(os.getcwd(), f)
1300 os.symlink(src, dest)
1302 # If the orig files are not a part of the upload, create symlinks to the
1304 for orig_file in self.pkg.orig_files.keys():
1305 if not self.pkg.orig_files[orig_file].has_key("path"):
1307 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1308 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1310 # Extract the source
1312 unpacked = UnpackedSource(dsc_filename)
1314 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1317 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1320 # Get the upstream version
1321 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1322 if re_strip_revision.search(upstr_version):
1323 upstr_version = re_strip_revision.sub('', upstr_version)
1325 # Ensure the changelog file exists
1326 changelog_file = unpacked.get_changelog_file()
1327 if changelog_file is None:
1328 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1331 # Parse the changelog
1332 self.pkg.dsc["bts changelog"] = ""
1333 for line in changelog_file.readlines():
1334 m = re_changelog_versions.match(line)
1336 self.pkg.dsc["bts changelog"] += line
1337 changelog_file.close()
1340 # Check we found at least one revision in the changelog
1341 if not self.pkg.dsc["bts changelog"]:
1342 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1344 def check_source(self):
1346 # a) there's no source
1347 if not self.pkg.changes["architecture"].has_key("source"):
1350 tmpdir = utils.temp_dirname()
1352 # Move into the temporary directory
1356 # Get the changelog version history
1357 self.get_changelog_versions(cwd)
1359 # Move back and cleanup the temporary tree
1363 shutil.rmtree(tmpdir)
1365 if e.errno != errno.EACCES:
1367 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1369 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1370 # We probably have u-r or u-w directories so chmod everything
1372 cmd = "chmod -R u+rwx %s" % (tmpdir)
1373 result = os.system(cmd)
1375 utils.fubar("'%s' failed with result %s." % (cmd, result))
1376 shutil.rmtree(tmpdir)
1377 except Exception, e:
1378 print "foobar2 (%s)" % e
1379 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1381 ###########################################################################
1382 def ensure_hashes(self):
1383 # Make sure we recognise the format of the Files: field in the .changes
1384 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1385 if len(format) == 2:
1386 format = int(format[0]), int(format[1])
1388 format = int(float(format[0])), 0
1390 # We need to deal with the original changes blob, as the fields we need
1391 # might not be in the changes dict serialised into the .dak anymore.
1392 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1394 # Copy the checksums over to the current changes dict. This will keep
1395 # the existing modifications to it intact.
1396 for field in orig_changes:
1397 if field.startswith('checksums-'):
1398 self.pkg.changes[field] = orig_changes[field]
1400 # Check for unsupported hashes
1401 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1402 self.rejects.append(j)
1404 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1405 self.rejects.append(j)
1407 # We have to calculate the hash if we have an earlier changes version than
1408 # the hash appears in rather than require it exist in the changes file
1409 for hashname, hashfunc, version in utils.known_hashes:
1410 # TODO: Move _ensure_changes_hash into this class
1411 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1412 self.rejects.append(j)
1413 if "source" in self.pkg.changes["architecture"]:
1414 # TODO: Move _ensure_dsc_hash into this class
1415 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1416 self.rejects.append(j)
1418 def check_hashes(self):
1419 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1420 self.rejects.append(m)
1422 for m in utils.check_size(".changes", self.pkg.files):
1423 self.rejects.append(m)
1425 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1426 self.rejects.append(m)
1428 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1429 self.rejects.append(m)
1431 self.ensure_hashes()
1433 ###########################################################################
1435 def ensure_orig(self, target_dir='.', session=None):
1437 Ensures that all orig files mentioned in the changes file are present
1438 in target_dir. If they do not exist, they are symlinked into place.
1440 An list containing the symlinks that were created are returned (so they
1447 for filename, entry in self.pkg.dsc_files.iteritems():
1448 if not re_is_orig_source.match(filename):
1449 # File is not an orig; ignore
1452 if os.path.exists(filename):
1453 # File exists, no need to continue
1456 def symlink_if_valid(path):
1457 f = utils.open_file(path)
1458 md5sum = apt_pkg.md5sum(f)
1461 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1462 expected = (int(entry['size']), entry['md5sum'])
1464 if fingerprint != expected:
1467 dest = os.path.join(target_dir, filename)
1469 os.symlink(path, dest)
1470 symlinked.append(dest)
1476 session_ = DBConn().session()
1481 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1482 poolfile_path = os.path.join(
1483 poolfile.location.path, poolfile.filename
1486 if symlink_if_valid(poolfile_path):
1496 # Look in some other queues for the file
1497 queues = ('New', 'Byhand', 'ProposedUpdates',
1498 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1500 for queue in queues:
1501 if not cnf.get('Dir::Queue::%s' % queue):
1504 queuefile_path = os.path.join(
1505 cnf['Dir::Queue::%s' % queue], filename
1508 if not os.path.exists(queuefile_path):
1509 # Does not exist in this queue
1512 if symlink_if_valid(queuefile_path):
1517 ###########################################################################
1519 def check_lintian(self):
1521 Extends self.rejects by checking the output of lintian against tags
1522 specified in Dinstall::LintianTags.
1527 # Don't reject binary uploads
1528 if not self.pkg.changes['architecture'].has_key('source'):
1531 # Only check some distributions
1532 for dist in ('unstable', 'experimental'):
1533 if dist in self.pkg.changes['distribution']:
1538 # If we do not have a tagfile, don't do anything
1539 tagfile = cnf.get("Dinstall::LintianTags")
1543 # Parse the yaml file
1544 sourcefile = file(tagfile, 'r')
1545 sourcecontent = sourcefile.read()
1549 lintiantags = yaml.load(sourcecontent)['lintian']
1550 except yaml.YAMLError, msg:
1551 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1554 # Try and find all orig mentioned in the .dsc
1555 symlinked = self.ensure_orig()
1557 # Setup the input file for lintian
1558 fd, temp_filename = utils.temp_filename()
1559 temptagfile = os.fdopen(fd, 'w')
1560 for tags in lintiantags.values():
1561 temptagfile.writelines(['%s\n' % x for x in tags])
1565 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1566 (temp_filename, self.pkg.changes_file)
1568 result, output = commands.getstatusoutput(cmd)
1570 # Remove our tempfile and any symlinks we created
1571 os.unlink(temp_filename)
1573 for symlink in symlinked:
1577 utils.warn("lintian failed for %s [return code: %s]." % \
1578 (self.pkg.changes_file, result))
1579 utils.warn(utils.prefix_multi_line_string(output, \
1580 " [possible output:] "))
1585 [self.pkg.changes_file, "check_lintian"] + list(txt)
1589 parsed_tags = parse_lintian_output(output)
1590 self.rejects.extend(
1591 generate_reject_messages(parsed_tags, lintiantags, log=log)
1594 ###########################################################################
1595 def check_urgency(self):
1597 if self.pkg.changes["architecture"].has_key("source"):
1598 if not self.pkg.changes.has_key("urgency"):
1599 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1600 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1601 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1602 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1603 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1604 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1606 ###########################################################################
1608 # Sanity check the time stamps of files inside debs.
1609 # [Files in the near future cause ugly warnings and extreme time
1610 # travel can cause errors on extraction]
1612 def check_timestamps(self):
1615 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1616 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1617 tar = TarTime(future_cutoff, past_cutoff)
1619 for filename, entry in self.pkg.files.items():
1620 if entry["type"] == "deb":
1623 deb_file = utils.open_file(filename)
1624 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1627 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1628 except SystemError, e:
1629 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1630 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1633 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1637 future_files = tar.future_files.keys()
1639 num_future_files = len(future_files)
1640 future_file = future_files[0]
1641 future_date = tar.future_files[future_file]
1642 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1643 % (filename, num_future_files, future_file, time.ctime(future_date)))
1645 ancient_files = tar.ancient_files.keys()
1647 num_ancient_files = len(ancient_files)
1648 ancient_file = ancient_files[0]
1649 ancient_date = tar.ancient_files[ancient_file]
1650 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1651 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1653 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1655 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1656 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1658 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1664 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1665 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1666 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1667 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1668 self.pkg.changes["sponsoremail"] = uid_email
1673 ###########################################################################
1674 # check_signed_by_key checks
1675 ###########################################################################
1677 def check_signed_by_key(self):
1678 """Ensure the .changes is signed by an authorized uploader."""
1679 session = DBConn().session()
1681 # First of all we check that the person has proper upload permissions
1682 # and that this upload isn't blocked
1683 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1686 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1689 # TODO: Check that import-keyring adds UIDs properly
1691 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1694 # Check that the fingerprint which uploaded has permission to do so
1695 self.check_upload_permissions(fpr, session)
1697 # Check that this package is not in a transition
1698 self.check_transition(session)
1703 def check_upload_permissions(self, fpr, session):
1704 # Check any one-off upload blocks
1705 self.check_upload_blocks(fpr, session)
1707 # If the source_acl is None, source is never allowed
1708 if fpr.source_acl is None:
1709 if self.pkg.changes["architecture"].has_key("source"):
1710 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1711 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1712 self.rejects.append(rej)
1714 # Do DM as a special case
1715 # DM is a special case unfortunately, so we check it first
1716 # (keys with no source access get more access than DMs in one
1717 # way; DMs can only upload for their packages whether source
1718 # or binary, whereas keys with no access might be able to
1719 # upload some binaries)
1720 elif fpr.source_acl.access_level == 'dm':
1721 self.check_dm_upload(fpr, session)
1723 # If not a DM, we allow full upload rights
1724 uid_email = "%s@debian.org" % (fpr.uid.uid)
1725 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1728 # Check binary upload permissions
1729 # By this point we know that DMs can't have got here unless they
1730 # are allowed to deal with the package concerned so just apply
1732 if fpr.binary_acl.access_level == 'full':
1735 # Otherwise we're in the map case
1736 tmparches = self.pkg.changes["architecture"].copy()
1737 tmparches.pop('source', None)
1739 for bam in fpr.binary_acl_map:
1740 tmparches.pop(bam.architecture.arch_string, None)
1742 if len(tmparches.keys()) > 0:
1743 if fpr.binary_reject:
1744 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1745 if len(tmparches.keys()) == 1:
1746 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1748 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1749 self.rejects.append(rej)
1751 # TODO: This is where we'll implement reject vs throw away binaries later
1752 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1753 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1754 rej += "\nFingerprint: %s", (fpr.fingerprint)
1755 self.rejects.append(rej)
1758 def check_upload_blocks(self, fpr, session):
1759 """Check whether any upload blocks apply to this source, source
1760 version, uid / fpr combination"""
1762 def block_rej_template(fb):
1763 rej = 'Manual upload block in place for package %s' % fb.source
1764 if fb.version is not None:
1765 rej += ', version %s' % fb.version
1768 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1769 # version is None if the block applies to all versions
1770 if fb.version is None or fb.version == self.pkg.changes['version']:
1771 # Check both fpr and uid - either is enough to cause a reject
1772 if fb.fpr is not None:
1773 if fb.fpr.fingerprint == fpr.fingerprint:
1774 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1775 if fb.uid is not None:
1776 if fb.uid == fpr.uid:
1777 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1780 def check_dm_upload(self, fpr, session):
1781 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1782 ## none of the uploaded packages are NEW
1784 for f in self.pkg.files.keys():
1785 if self.pkg.files[f].has_key("byhand"):
1786 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1788 if self.pkg.files[f].has_key("new"):
1789 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1795 r = get_newest_source(self.pkg.changes["source"], session)
1798 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1799 self.rejects.append(rej)
1802 if not r.dm_upload_allowed:
1803 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1804 self.rejects.append(rej)
1807 ## the Maintainer: field of the uploaded .changes file corresponds with
1808 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1810 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1811 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1813 ## the most recent version of the package uploaded to unstable or
1814 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1815 ## non-developer maintainers cannot NMU or hijack packages)
1817 # uploader includes the maintainer
1819 for uploader in r.uploaders:
1820 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1821 # Eww - I hope we never have two people with the same name in Debian
1822 if email == fpr.uid.uid or name == fpr.uid.name:
1827 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1830 ## none of the packages are being taken over from other source packages
1831 for b in self.pkg.changes["binary"].keys():
1832 for suite in self.pkg.changes["distribution"].keys():
1833 for s in get_source_by_package_and_suite(b, suite, session):
1834 if s.source != self.pkg.changes["source"]:
1835 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1839 def check_transition(self, session):
1842 sourcepkg = self.pkg.changes["source"]
1844 # No sourceful upload -> no need to do anything else, direct return
1845 # We also work with unstable uploads, not experimental or those going to some
1846 # proposed-updates queue
1847 if "source" not in self.pkg.changes["architecture"] or \
1848 "unstable" not in self.pkg.changes["distribution"]:
1851 # Also only check if there is a file defined (and existant) with
1853 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1854 if transpath == "" or not os.path.exists(transpath):
1857 # Parse the yaml file
1858 sourcefile = file(transpath, 'r')
1859 sourcecontent = sourcefile.read()
1861 transitions = yaml.load(sourcecontent)
1862 except yaml.YAMLError, msg:
1863 # This shouldn't happen, there is a wrapper to edit the file which
1864 # checks it, but we prefer to be safe than ending up rejecting
1866 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1869 # Now look through all defined transitions
1870 for trans in transitions:
1871 t = transitions[trans]
1872 source = t["source"]
1875 # Will be None if nothing is in testing.
1876 current = get_source_in_suite(source, "testing", session)
1877 if current is not None:
1878 compare = apt_pkg.VersionCompare(current.version, expected)
1880 if current is None or compare < 0:
1881 # This is still valid, the current version in testing is older than
1882 # the new version we wait for, or there is none in testing yet
1884 # Check if the source we look at is affected by this.
1885 if sourcepkg in t['packages']:
1886 # The source is affected, lets reject it.
1888 rejectmsg = "%s: part of the %s transition.\n\n" % (
1891 if current is not None:
1892 currentlymsg = "at version %s" % (current.version)
1894 currentlymsg = "not present in testing"
1896 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1898 rejectmsg += "\n".join(textwrap.wrap("""Your package
1899 is part of a testing transition designed to get %s migrated (it is
1900 currently %s, we need version %s). This transition is managed by the
1901 Release Team, and %s is the Release-Team member responsible for it.
1902 Please mail debian-release@lists.debian.org or contact %s directly if you
1903 need further assistance. You might want to upload to experimental until this
1904 transition is done."""
1905 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1907 self.rejects.append(rejectmsg)
1910 ###########################################################################
1911 # End check_signed_by_key checks
1912 ###########################################################################
1914 def build_summaries(self):
1915 """ Build a summary of changes the upload introduces. """
1917 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1919 short_summary = summary
1921 # This is for direport's benefit...
1922 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1925 summary += "Changes: " + f
1927 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1929 summary += self.announce(short_summary, 0)
1931 return (summary, short_summary)
1933 ###########################################################################
1935 def close_bugs(self, summary, action):
1937 Send mail to close bugs as instructed by the closes field in the changes file.
1938 Also add a line to summary if any work was done.
1940 @type summary: string
1941 @param summary: summary text, as given by L{build_summaries}
1944 @param action: Set to false no real action will be done.
1947 @return: summary. If action was taken, extended by the list of closed bugs.
1951 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1953 bugs = self.pkg.changes["closes"].keys()
1959 summary += "Closing bugs: "
1961 summary += "%s " % (bug)
1964 self.Subst["__BUG_NUMBER__"] = bug
1965 if self.pkg.changes["distribution"].has_key("stable"):
1966 self.Subst["__STABLE_WARNING__"] = """
1967 Note that this package is not part of the released stable Debian
1968 distribution. It may have dependencies on other unreleased software,
1969 or other instabilities. Please take care if you wish to install it.
1970 The update will eventually make its way into the next released Debian
1973 self.Subst["__STABLE_WARNING__"] = ""
1974 mail_message = utils.TemplateSubst(self.Subst, template)
1975 utils.send_mail(mail_message)
1977 # Clear up after ourselves
1978 del self.Subst["__BUG_NUMBER__"]
1979 del self.Subst["__STABLE_WARNING__"]
1981 if action and self.logger:
1982 self.logger.log(["closing bugs"] + bugs)
1988 ###########################################################################
1990 def announce(self, short_summary, action):
1992 Send an announce mail about a new upload.
1994 @type short_summary: string
1995 @param short_summary: Short summary text to include in the mail
1998 @param action: Set to false no real action will be done.
2001 @return: Textstring about action taken.
2006 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2008 # Only do announcements for source uploads with a recent dpkg-dev installed
2009 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2010 self.pkg.changes["architecture"].has_key("source"):
2016 self.Subst["__SHORT_SUMMARY__"] = short_summary
2018 for dist in self.pkg.changes["distribution"].keys():
2019 suite = get_suite(dist)
2020 if suite is None: continue
2021 announce_list = suite.announce
2022 if announce_list == "" or lists_done.has_key(announce_list):
2025 lists_done[announce_list] = 1
2026 summary += "Announcing to %s\n" % (announce_list)
2030 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2031 if cnf.get("Dinstall::TrackingServer") and \
2032 self.pkg.changes["architecture"].has_key("source"):
2033 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2034 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2036 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2037 utils.send_mail(mail_message)
2039 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2041 if cnf.FindB("Dinstall::CloseBugs"):
2042 summary = self.close_bugs(summary, action)
2044 del self.Subst["__SHORT_SUMMARY__"]
2048 ###########################################################################
2050 def accept (self, summary, short_summary, session=None):
2054 This moves all files referenced from the .changes into the pool,
2055 sends the accepted mail, announces to lists, closes bugs and
2056 also checks for override disparities. If enabled it will write out
2057 the version history for the BTS Version Tracking and will finally call
2060 @type summary: string
2061 @param summary: Summary text
2063 @type short_summary: string
2064 @param short_summary: Short summary
2068 stats = SummaryStats()
2071 self.logger.log(["installing changes", self.pkg.changes_file])
2076 # Add the .dsc file to the DB first
2077 for newfile, entry in self.pkg.files.items():
2078 if entry["type"] == "dsc":
2079 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2083 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2084 for newfile, entry in self.pkg.files.items():
2085 if entry["type"] == "deb":
2086 b, pf = add_deb_to_db(self, newfile, session)
2088 poolfiles.append(pf)
2090 # If this is a sourceful diff only upload that is moving
2091 # cross-component we need to copy the .orig files into the new
2092 # component too for the same reasons as above.
2093 # XXX: mhy: I think this should be in add_dsc_to_db
2094 if self.pkg.changes["architecture"].has_key("source"):
2095 for orig_file in self.pkg.orig_files.keys():
2096 if not self.pkg.orig_files[orig_file].has_key("id"):
2097 continue # Skip if it's not in the pool
2098 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2099 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2100 continue # Skip if the location didn't change
2103 oldf = get_poolfile_by_id(orig_file_id, session)
2104 old_filename = os.path.join(oldf.location.path, oldf.filename)
2105 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2106 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2108 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2110 # TODO: Care about size/md5sum collisions etc
2111 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2113 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2115 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2116 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2120 # Don't reference the old file from this changes
2122 if p.file_id == oldf.file_id:
2125 poolfiles.append(newf)
2127 # Fix up the DSC references
2130 for df in source.srcfiles:
2131 if df.poolfile.file_id == oldf.file_id:
2132 # Add a new DSC entry and mark the old one for deletion
2133 # Don't do it in the loop so we don't change the thing we're iterating over
2135 newdscf.source_id = source.source_id
2136 newdscf.poolfile_id = newf.file_id
2137 session.add(newdscf)
2147 # Make sure that our source object is up-to-date
2148 session.expire(source)
2150 # Add changelog information to the database
2151 self.store_changelog()
2153 # Install the files into the pool
2154 for newfile, entry in self.pkg.files.items():
2155 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2156 utils.move(newfile, destination)
2157 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2158 stats.accept_bytes += float(entry["size"])
2160 # Copy the .changes file across for suite which need it.
2161 copy_changes = dict([(x.copychanges, '')
2162 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2163 if x.copychanges is not None])
2165 for dest in copy_changes.keys():
2166 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2168 # We're done - commit the database changes
2170 # Our SQL session will automatically start a new transaction after
2173 # Now ensure that the metadata has been added
2174 # This has to be done after we copy the files into the pool
2175 # For source if we have it:
2176 if self.pkg.changes["architecture"].has_key("source"):
2177 import_metadata_into_db(source, session)
2179 # Now for any of our binaries
2181 import_metadata_into_db(b, session)
2185 # Move the .changes into the 'done' directory
2186 ye, mo, da = time.gmtime()[0:3]
2187 donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2188 if not os.path.isdir(donedir):
2189 os.makedirs(donedir)
2191 utils.move(self.pkg.changes_file,
2192 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2194 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2195 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2198 self.Subst["__SUMMARY__"] = summary
2199 mail_message = utils.TemplateSubst(self.Subst,
2200 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2201 utils.send_mail(mail_message)
2202 self.announce(short_summary, 1)
2204 ## Helper stuff for DebBugs Version Tracking
2205 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2206 if self.pkg.changes["architecture"].has_key("source"):
2207 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2208 version_history = os.fdopen(fd, 'w')
2209 version_history.write(self.pkg.dsc["bts changelog"])
2210 version_history.close()
2211 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2212 self.pkg.changes_file[:-8]+".versions")
2213 os.rename(temp_filename, filename)
2214 os.chmod(filename, 0644)
2216 # Write out the binary -> source mapping.
2217 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2218 debinfo = os.fdopen(fd, 'w')
2219 for name, entry in sorted(self.pkg.files.items()):
2220 if entry["type"] == "deb":
2221 line = " ".join([entry["package"], entry["version"],
2222 entry["architecture"], entry["source package"],
2223 entry["source version"]])
2224 debinfo.write(line+"\n")
2226 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2227 self.pkg.changes_file[:-8]+".debinfo")
2228 os.rename(temp_filename, filename)
2229 os.chmod(filename, 0644)
2233 # Set up our copy queues (e.g. buildd queues)
2234 for suite_name in self.pkg.changes["distribution"].keys():
2235 suite = get_suite(suite_name, session)
2236 for q in suite.copy_queues:
2238 q.add_file_from_pool(f)
2243 stats.accept_count += 1
2245 def check_override(self):
2247 Checks override entries for validity. Mails "Override disparity" warnings,
2248 if that feature is enabled.
2250 Abandons the check if
2251 - override disparity checks are disabled
2252 - mail sending is disabled
2257 # Abandon the check if override disparity checks have been disabled
2258 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2261 summary = self.pkg.check_override()
2266 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2269 self.Subst["__SUMMARY__"] = summary
2270 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2271 utils.send_mail(mail_message)
2272 del self.Subst["__SUMMARY__"]
2274 ###########################################################################
2276 def remove(self, from_dir=None):
2278 Used (for instance) in p-u to remove the package from unchecked
2280 Also removes the package from holding area.
2282 if from_dir is None:
2283 from_dir = self.pkg.directory
2286 for f in self.pkg.files.keys():
2287 os.unlink(os.path.join(from_dir, f))
2288 if os.path.exists(os.path.join(h.holding_dir, f)):
2289 os.unlink(os.path.join(h.holding_dir, f))
2291 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2292 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2293 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2295 ###########################################################################
2297 def move_to_queue (self, queue):
2299 Move files to a destination queue using the permissions in the table
2302 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2303 queue.path, perms=int(queue.change_perms, 8))
2304 for f in self.pkg.files.keys():
2305 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2307 ###########################################################################
2309 def force_reject(self, reject_files):
2311 Forcefully move files from the current directory to the
2312 reject directory. If any file already exists in the reject
2313 directory it will be moved to the morgue to make way for
2316 @type reject_files: dict
2317 @param reject_files: file dictionary
2323 for file_entry in reject_files:
2324 # Skip any files which don't exist or which we don't have permission to copy.
2325 if os.access(file_entry, os.R_OK) == 0:
2328 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2331 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2333 # File exists? Let's find a new name by adding a number
2334 if e.errno == errno.EEXIST:
2336 dest_file = utils.find_next_free(dest_file, 255)
2337 except NoFreeFilenameError:
2338 # Something's either gone badly Pete Tong, or
2339 # someone is trying to exploit us.
2340 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2343 # Make sure we really got it
2345 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2348 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2352 # If we got here, we own the destination file, so we can
2353 # safely overwrite it.
2354 utils.move(file_entry, dest_file, 1, perms=0660)
2357 ###########################################################################
2358 def do_reject (self, manual=0, reject_message="", notes=""):
2360 Reject an upload. If called without a reject message or C{manual} is
2361 true, spawn an editor so the user can write one.
2364 @param manual: manual or automated rejection
2366 @type reject_message: string
2367 @param reject_message: A reject message
2372 # If we weren't given a manual rejection message, spawn an
2373 # editor so the user can add one in...
2374 if manual and not reject_message:
2375 (fd, temp_filename) = utils.temp_filename()
2376 temp_file = os.fdopen(fd, 'w')
2379 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2380 % (note.author, note.version, note.notedate, note.comment))
2382 editor = os.environ.get("EDITOR","vi")
2384 while answer == 'E':
2385 os.system("%s %s" % (editor, temp_filename))
2386 temp_fh = utils.open_file(temp_filename)
2387 reject_message = "".join(temp_fh.readlines())
2389 print "Reject message:"
2390 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2391 prompt = "[R]eject, Edit, Abandon, Quit ?"
2393 while prompt.find(answer) == -1:
2394 answer = utils.our_raw_input(prompt)
2395 m = re_default_answer.search(prompt)
2398 answer = answer[:1].upper()
2399 os.unlink(temp_filename)
2405 print "Rejecting.\n"
2409 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2410 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2412 # Move all the files into the reject directory
2413 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2414 self.force_reject(reject_files)
2416 # If we fail here someone is probably trying to exploit the race
2417 # so let's just raise an exception ...
2418 if os.path.exists(reason_filename):
2419 os.unlink(reason_filename)
2420 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2422 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2426 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2427 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2428 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2429 os.write(reason_fd, reject_message)
2430 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2432 # Build up the rejection email
2433 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2434 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2435 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2436 self.Subst["__REJECT_MESSAGE__"] = ""
2437 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2438 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2439 # Write the rejection email out as the <foo>.reason file
2440 os.write(reason_fd, reject_mail_message)
2442 del self.Subst["__REJECTOR_ADDRESS__"]
2443 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2444 del self.Subst["__CC__"]
2448 # Send the rejection mail
2449 utils.send_mail(reject_mail_message)
2452 self.logger.log(["rejected", self.pkg.changes_file])
2454 stats = SummaryStats()
2455 stats.reject_count += 1
2458 ################################################################################
2459 def in_override_p(self, package, component, suite, binary_type, filename, session):
2461 Check if a package already has override entries in the DB
2463 @type package: string
2464 @param package: package name
2466 @type component: string
2467 @param component: database id of the component
2470 @param suite: database id of the suite
2472 @type binary_type: string
2473 @param binary_type: type of the package
2475 @type filename: string
2476 @param filename: filename we check
2478 @return: the database result. But noone cares anyway.
2484 if binary_type == "": # must be source
2487 file_type = binary_type
2489 # Override suite name; used for example with proposed-updates
2490 oldsuite = get_suite(suite, session)
2491 if (not oldsuite is None) and oldsuite.overridesuite:
2492 suite = oldsuite.overridesuite
2494 result = get_override(package, suite, component, file_type, session)
2496 # If checking for a source package fall back on the binary override type
2497 if file_type == "dsc" and len(result) < 1:
2498 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2500 # Remember the section and priority so we can check them later if appropriate
2503 self.pkg.files[filename]["override section"] = result.section.section
2504 self.pkg.files[filename]["override priority"] = result.priority.priority
2509 ################################################################################
2510 def get_anyversion(self, sv_list, suite):
2513 @param sv_list: list of (suite, version) tuples to check
2516 @param suite: suite name
2522 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2523 for (s, v) in sv_list:
2524 if s in [ x.lower() for x in anysuite ]:
2525 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2530 ################################################################################
2532 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2535 @param sv_list: list of (suite, version) tuples to check
2537 @type filename: string
2538 @param filename: XXX
2540 @type new_version: string
2541 @param new_version: XXX
2543 Ensure versions are newer than existing packages in target
2544 suites and that cross-suite version checking rules as
2545 set out in the conf file are satisfied.
2550 # Check versions for each target suite
2551 for target_suite in self.pkg.changes["distribution"].keys():
2552 # Check we can find the target suite
2553 ts = get_suite(target_suite)
2555 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2558 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2559 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2561 # Enforce "must be newer than target suite" even if conffile omits it
2562 if target_suite not in must_be_newer_than:
2563 must_be_newer_than.append(target_suite)
2565 for (suite, existent_version) in sv_list:
2566 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2568 if suite in must_be_newer_than and sourceful and vercmp < 1:
2569 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2571 if suite in must_be_older_than and vercmp > -1:
2574 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2575 # we really use the other suite, ignoring the conflicting one ...
2576 addsuite = self.pkg.changes["distribution-version"][suite]
2578 add_version = self.get_anyversion(sv_list, addsuite)
2579 target_version = self.get_anyversion(sv_list, target_suite)
2582 # not add_version can only happen if we map to a suite
2583 # that doesn't enhance the suite we're propup'ing from.
2584 # so "propup-ver x a b c; map a d" is a problem only if
2585 # d doesn't enhance a.
2587 # i think we could always propagate in this case, rather
2588 # than complaining. either way, this isn't a REJECT issue
2590 # And - we really should complain to the dorks who configured dak
2591 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2592 self.pkg.changes.setdefault("propdistribution", {})
2593 self.pkg.changes["propdistribution"][addsuite] = 1
2595 elif not target_version:
2596 # not targets_version is true when the package is NEW
2597 # we could just stick with the "...old version..." REJECT
2598 # for this, I think.
2599 self.rejects.append("Won't propogate NEW packages.")
2600 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2601 # propogation would be redundant. no need to reject though.
2602 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2604 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2605 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2607 self.warnings.append("Propogating upload to %s" % (addsuite))
2608 self.pkg.changes.setdefault("propdistribution", {})
2609 self.pkg.changes["propdistribution"][addsuite] = 1
2613 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2615 ################################################################################
2616 def check_binary_against_db(self, filename, session):
2617 # Ensure version is sane
2618 self.cross_suite_version_check( \
2619 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2620 self.pkg.files[filename]["architecture"], session),
2621 filename, self.pkg.files[filename]["version"], sourceful=False)
2623 # Check for any existing copies of the file
2624 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2625 q = q.filter_by(version=self.pkg.files[filename]["version"])
2626 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2629 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2631 ################################################################################
2633 def check_source_against_db(self, filename, session):
2634 source = self.pkg.dsc.get("source")
2635 version = self.pkg.dsc.get("version")
2637 # Ensure version is sane
2638 self.cross_suite_version_check( \
2639 get_suite_version_by_source(source, session), filename, version,
2642 ################################################################################
2643 def check_dsc_against_db(self, filename, session):
2646 @warning: NB: this function can remove entries from the 'files' index [if
2647 the orig tarball is a duplicate of the one in the archive]; if
2648 you're iterating over 'files' and call this function as part of
2649 the loop, be sure to add a check to the top of the loop to
2650 ensure you haven't just tried to dereference the deleted entry.
2655 self.pkg.orig_files = {} # XXX: do we need to clear it?
2656 orig_files = self.pkg.orig_files
2658 # Try and find all files mentioned in the .dsc. This has
2659 # to work harder to cope with the multiple possible
2660 # locations of an .orig.tar.gz.
2661 # The ordering on the select is needed to pick the newest orig
2662 # when it exists in multiple places.
2663 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2665 if self.pkg.files.has_key(dsc_name):
2666 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2667 actual_size = int(self.pkg.files[dsc_name]["size"])
2668 found = "%s in incoming" % (dsc_name)
2670 # Check the file does not already exist in the archive
2671 ql = get_poolfile_like_name(dsc_name, session)
2673 # Strip out anything that isn't '%s' or '/%s$'
2675 if not i.filename.endswith(dsc_name):
2678 # "[dak] has not broken them. [dak] has fixed a
2679 # brokenness. Your crappy hack exploited a bug in
2682 # "(Come on! I thought it was always obvious that
2683 # one just doesn't release different files with
2684 # the same name and version.)"
2685 # -- ajk@ on d-devel@l.d.o
2688 # Ignore exact matches for .orig.tar.gz
2690 if re_is_orig_source.match(dsc_name):
2692 if self.pkg.files.has_key(dsc_name) and \
2693 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2694 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2695 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2696 # TODO: Don't delete the entry, just mark it as not needed
2697 # This would fix the stupidity of changing something we often iterate over
2698 # whilst we're doing it
2699 del self.pkg.files[dsc_name]
2700 dsc_entry["files id"] = i.file_id
2701 if not orig_files.has_key(dsc_name):
2702 orig_files[dsc_name] = {}
2703 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2706 # Don't bitch that we couldn't find this file later
2708 self.later_check_files.remove(dsc_name)
2714 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2716 elif re_is_orig_source.match(dsc_name):
2718 ql = get_poolfile_like_name(dsc_name, session)
2720 # Strip out anything that isn't '%s' or '/%s$'
2721 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2723 if not i.filename.endswith(dsc_name):
2727 # Unfortunately, we may get more than one match here if,
2728 # for example, the package was in potato but had an -sa
2729 # upload in woody. So we need to choose the right one.
2731 # default to something sane in case we don't match any or have only one
2736 old_file = os.path.join(i.location.path, i.filename)
2737 old_file_fh = utils.open_file(old_file)
2738 actual_md5 = apt_pkg.md5sum(old_file_fh)
2740 actual_size = os.stat(old_file)[stat.ST_SIZE]
2741 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2744 old_file = os.path.join(i.location.path, i.filename)
2745 old_file_fh = utils.open_file(old_file)
2746 actual_md5 = apt_pkg.md5sum(old_file_fh)
2748 actual_size = os.stat(old_file)[stat.ST_SIZE]
2750 suite_type = x.location.archive_type
2751 # need this for updating dsc_files in install()
2752 dsc_entry["files id"] = x.file_id
2753 # See install() in process-accepted...
2754 if not orig_files.has_key(dsc_name):
2755 orig_files[dsc_name] = {}
2756 orig_files[dsc_name]["id"] = x.file_id
2757 orig_files[dsc_name]["path"] = old_file
2758 orig_files[dsc_name]["location"] = x.location.location_id
2760 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2761 # Not there? Check the queue directories...
2762 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2763 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2765 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2766 if os.path.exists(in_otherdir):
2767 in_otherdir_fh = utils.open_file(in_otherdir)
2768 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2769 in_otherdir_fh.close()
2770 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2772 if not orig_files.has_key(dsc_name):
2773 orig_files[dsc_name] = {}
2774 orig_files[dsc_name]["path"] = in_otherdir
2777 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2780 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2782 if actual_md5 != dsc_entry["md5sum"]:
2783 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2784 if actual_size != int(dsc_entry["size"]):
2785 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2787 ################################################################################
2788 # This is used by process-new and process-holding to recheck a changes file
2789 # at the time we're running. It mainly wraps various other internal functions
2790 # and is similar to accepted_checks - these should probably be tidied up
2792 def recheck(self, session):
2794 for f in self.pkg.files.keys():
2795 # The .orig.tar.gz can disappear out from under us is it's a
2796 # duplicate of one in the archive.
2797 if not self.pkg.files.has_key(f):
2800 entry = self.pkg.files[f]
2802 # Check that the source still exists
2803 if entry["type"] == "deb":
2804 source_version = entry["source version"]
2805 source_package = entry["source package"]
2806 if not self.pkg.changes["architecture"].has_key("source") \
2807 and not source_exists(source_package, source_version, \
2808 suites = self.pkg.changes["distribution"].keys(), session = session):
2809 source_epochless_version = re_no_epoch.sub('', source_version)
2810 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2812 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2813 if cnf.has_key("Dir::Queue::%s" % (q)):
2814 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2817 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2819 # Version and file overwrite checks
2820 if entry["type"] == "deb":
2821 self.check_binary_against_db(f, session)
2822 elif entry["type"] == "dsc":
2823 self.check_source_against_db(f, session)
2824 self.check_dsc_against_db(f, session)
2826 ################################################################################
2827 def accepted_checks(self, overwrite_checks, session):
2828 # Recheck anything that relies on the database; since that's not
2829 # frozen between accept and our run time when called from p-a.
2831 # overwrite_checks is set to False when installing to stable/oldstable
2836 # Find the .dsc (again)
2838 for f in self.pkg.files.keys():
2839 if self.pkg.files[f]["type"] == "dsc":
2842 for checkfile in self.pkg.files.keys():
2843 # The .orig.tar.gz can disappear out from under us is it's a
2844 # duplicate of one in the archive.
2845 if not self.pkg.files.has_key(checkfile):
2848 entry = self.pkg.files[checkfile]
2850 # Check that the source still exists
2851 if entry["type"] == "deb":
2852 source_version = entry["source version"]
2853 source_package = entry["source package"]
2854 if not self.pkg.changes["architecture"].has_key("source") \
2855 and not source_exists(source_package, source_version, \
2856 suites = self.pkg.changes["distribution"].keys(), \
2858 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2860 # Version and file overwrite checks
2861 if overwrite_checks:
2862 if entry["type"] == "deb":
2863 self.check_binary_against_db(checkfile, session)
2864 elif entry["type"] == "dsc":
2865 self.check_source_against_db(checkfile, session)
2866 self.check_dsc_against_db(dsc_filename, session)
2868 # propogate in the case it is in the override tables:
2869 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2870 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2871 propogate[suite] = 1
2873 nopropogate[suite] = 1
2875 for suite in propogate.keys():
2876 if suite in nopropogate:
2878 self.pkg.changes["distribution"][suite] = 1
2880 for checkfile in self.pkg.files.keys():
2881 # Check the package is still in the override tables
2882 for suite in self.pkg.changes["distribution"].keys():
2883 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2884 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2886 ################################################################################
2887 # If any file of an upload has a recent mtime then chances are good
2888 # the file is still being uploaded.
2890 def upload_too_new(self):
2893 # Move back to the original directory to get accurate time stamps
2895 os.chdir(self.pkg.directory)
2896 file_list = self.pkg.files.keys()
2897 file_list.extend(self.pkg.dsc_files.keys())
2898 file_list.append(self.pkg.changes_file)
2901 last_modified = time.time()-os.path.getmtime(f)
2902 if last_modified < int(cnf["Dinstall::SkipTime"]):
2911 def store_changelog(self):
2913 # Skip binary-only upload if it is not a bin-NMU
2914 if not self.pkg.changes['architecture'].has_key('source'):
2915 from daklib.regexes import re_bin_only_nmu
2916 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2919 session = DBConn().session()
2921 # Check if upload already has a changelog entry
2922 query = """SELECT changelog_id FROM changes WHERE source = :source
2923 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2924 if session.execute(query, {'source': self.pkg.changes['source'], \
2925 'version': self.pkg.changes['version'], \
2926 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2930 # Add current changelog text into changelogs_text table, return created ID
2931 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2932 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2934 # Link ID to the upload available in changes table
2935 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2936 AND version = :version AND architecture = :architecture"""
2937 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2938 'version': self.pkg.changes['version'], \
2939 'architecture': " ".join(self.pkg.changes['architecture'].keys())})