5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_list
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 ###############################################################################
61 def get_type(f, session):
63 Get the file type of C{f}
66 @param f: file entry from Changes object
68 @type session: SQLA Session
69 @param session: SQL Alchemy session object
76 if f.has_key("dbtype"):
77 file_type = f["dbtype"]
78 elif re_source_ext.match(f["type"]):
80 elif f['architecture'] == 'source' and f["type"] == 'unreadable':
81 utils.warn('unreadable source file (will continue and hope for the best)')
85 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
87 # Validate the override type
88 type_id = get_override_type(file_type, session)
90 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
94 ################################################################################
96 # Determine what parts in a .changes are NEW
98 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
100 Determine what parts in a C{changes} file are NEW.
103 @param filename: changes filename
105 @type changes: Upload.Pkg.changes dict
106 @param changes: Changes dictionary
108 @type files: Upload.Pkg.files dict
109 @param files: Files dictionary
112 @param warn: Warn if overrides are added for (old)stable
114 @type dsc: Upload.Pkg.dsc dict
115 @param dsc: (optional); Dsc dictionary
118 @param new: new packages as returned by a previous call to this function, but override information may have changed
121 @return: dictionary of NEW components.
124 # TODO: This should all use the database instead of parsing the changes
130 dbchg = get_dbchange(filename, session)
132 print "Warning: cannot find changes file in database; won't check byhand"
134 # Try to get the Package-Set field from an included .dsc file (if possible).
136 for package, entry in build_package_list(dsc, session).items():
137 if package not in new:
140 # Build up a list of potentially new things
141 for name, f in files.items():
142 # Keep a record of byhand elements
143 if f["section"] == "byhand":
148 priority = f["priority"]
149 section = f["section"]
150 file_type = get_type(f, session)
151 component = f["component"]
153 if file_type == "dsc":
156 if not new.has_key(pkg):
158 new[pkg]["priority"] = priority
159 new[pkg]["section"] = section
160 new[pkg]["type"] = file_type
161 new[pkg]["component"] = component
162 new[pkg]["files"] = []
164 old_type = new[pkg]["type"]
165 if old_type != file_type:
166 # source gets trumped by deb or udeb
167 if old_type == "dsc":
168 new[pkg]["priority"] = priority
169 new[pkg]["section"] = section
170 new[pkg]["type"] = file_type
171 new[pkg]["component"] = component
173 new[pkg]["files"].append(name)
175 if f.has_key("othercomponents"):
176 new[pkg]["othercomponents"] = f["othercomponents"]
178 # Fix up the list of target suites
180 for suite in changes["suite"].keys():
181 oldsuite = get_suite(suite, session)
183 print "WARNING: Invalid suite %s found" % suite
186 if oldsuite.overridesuite:
187 newsuite = get_suite(oldsuite.overridesuite, session)
190 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
191 oldsuite.overridesuite, suite)
192 del changes["suite"][suite]
193 changes["suite"][oldsuite.overridesuite] = 1
195 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
196 oldsuite.overridesuite, suite)
198 # Check for unprocessed byhand files
199 if dbchg is not None:
200 for b in byhand.keys():
201 # Find the file entry in the database
203 for f in dbchg.files:
206 # If it's processed, we can ignore it
212 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
214 # Check for new stuff
215 for suite in changes["suite"].keys():
216 for pkg in new.keys():
217 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
219 for file_entry in new[pkg]["files"]:
220 if files[file_entry].has_key("new"):
221 del files[file_entry]["new"]
225 for s in ['stable', 'oldstable']:
226 if changes["suite"].has_key(s):
227 print "WARNING: overrides will be added for %s!" % s
228 for pkg in new.keys():
229 if new[pkg].has_key("othercomponents"):
230 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
234 ################################################################################
236 def check_valid(new, session = None):
238 Check if section and priority for NEW packages exist in database.
239 Additionally does sanity checks:
240 - debian-installer packages have to be udeb (or source)
241 - non debian-installer packages can not be udeb
242 - source priority can only be assigned to dsc file types
245 @param new: Dict of new packages with their section, priority and type.
248 for pkg in new.keys():
249 section_name = new[pkg]["section"]
250 priority_name = new[pkg]["priority"]
251 file_type = new[pkg]["type"]
253 section = get_section(section_name, session)
255 new[pkg]["section id"] = -1
257 new[pkg]["section id"] = section.section_id
259 priority = get_priority(priority_name, session)
261 new[pkg]["priority id"] = -1
263 new[pkg]["priority id"] = priority.priority_id
266 di = section_name.find("debian-installer") != -1
268 # If d-i, we must be udeb and vice-versa
269 if (di and file_type not in ("udeb", "dsc")) or \
270 (not di and file_type == "udeb"):
271 new[pkg]["section id"] = -1
273 # If dsc we need to be source and vice-versa
274 if (priority == "source" and file_type != "dsc") or \
275 (priority != "source" and file_type == "dsc"):
276 new[pkg]["priority id"] = -1
278 ###############################################################################
280 # Used by Upload.check_timestamps
281 class TarTime(object):
282 def __init__(self, future_cutoff, past_cutoff):
284 self.future_cutoff = future_cutoff
285 self.past_cutoff = past_cutoff
288 self.future_files = {}
289 self.ancient_files = {}
291 def callback(self, member, data):
292 if member.mtime > self.future_cutoff:
293 self.future_files[Name] = member.mtime
294 if member.mtime < self.past_cutoff:
295 self.ancient_files[Name] = member.mtime
297 ###############################################################################
299 def prod_maintainer(notes, upload):
302 # Here we prepare an editor and get them ready to prod...
303 (fd, temp_filename) = utils.temp_filename()
304 temp_file = os.fdopen(fd, 'w')
306 temp_file.write(note.comment)
308 editor = os.environ.get("EDITOR","vi")
311 os.system("%s %s" % (editor, temp_filename))
312 temp_fh = utils.open_file(temp_filename)
313 prod_message = "".join(temp_fh.readlines())
315 print "Prod message:"
316 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
317 prompt = "[P]rod, Edit, Abandon, Quit ?"
319 while prompt.find(answer) == -1:
320 answer = utils.our_raw_input(prompt)
321 m = re_default_answer.search(prompt)
324 answer = answer[:1].upper()
325 os.unlink(temp_filename)
331 # Otherwise, do the proding...
332 user_email_address = utils.whoami() + " <%s>" % (
333 cnf["Dinstall::MyAdminAddress"])
337 Subst["__FROM_ADDRESS__"] = user_email_address
338 Subst["__PROD_MESSAGE__"] = prod_message
339 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
341 prod_mail_message = utils.TemplateSubst(
342 Subst,cnf["Dir::Templates"]+"/process-new.prod")
345 utils.send_mail(prod_mail_message)
347 print "Sent prodding message"
349 ################################################################################
351 def edit_note(note, upload, session, trainee=False):
352 # Write the current data to a temporary file
353 (fd, temp_filename) = utils.temp_filename()
354 editor = os.environ.get("EDITOR","vi")
357 os.system("%s %s" % (editor, temp_filename))
358 temp_file = utils.open_file(temp_filename)
359 newnote = temp_file.read().rstrip()
362 print utils.prefix_multi_line_string(newnote," ")
363 prompt = "[D]one, Edit, Abandon, Quit ?"
365 while prompt.find(answer) == -1:
366 answer = utils.our_raw_input(prompt)
367 m = re_default_answer.search(prompt)
370 answer = answer[:1].upper()
371 os.unlink(temp_filename)
378 comment = NewComment()
379 comment.package = upload.pkg.changes["source"]
380 comment.version = upload.pkg.changes["version"]
381 comment.comment = newnote
382 comment.author = utils.whoami()
383 comment.trainee = trainee
387 ###############################################################################
389 # suite names DMs can upload to
390 dm_suites = ['unstable', 'experimental']
392 def get_newest_source(source, session):
393 'returns the newest DBSource object in dm_suites'
394 ## the most recent version of the package uploaded to unstable or
395 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
396 ## section of its control file
397 q = session.query(DBSource).filter_by(source = source). \
398 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
399 order_by(desc('source.version'))
402 def get_suite_version_by_source(source, session):
403 'returns a list of tuples (suite_name, version) for source package'
404 q = session.query(Suite.suite_name, DBSource.version). \
405 join(Suite.sources).filter_by(source = source)
408 def get_source_by_package_and_suite(package, suite_name, session):
410 returns a DBSource query filtered by DBBinary.package and this package's
413 return session.query(DBSource). \
414 join(DBSource.binaries).filter_by(package = package). \
415 join(DBBinary.suites).filter_by(suite_name = suite_name)
417 def get_suite_version_by_package(package, arch_string, session):
419 returns a list of tuples (suite_name, version) for binary package and
422 return session.query(Suite.suite_name, DBBinary.version). \
423 join(Suite.binaries).filter_by(package = package). \
424 join(DBBinary.architecture). \
425 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
427 class Upload(object):
429 Everything that has to do with an upload processed.
437 ###########################################################################
440 """ Reset a number of internal variables."""
442 # Initialize the substitution template map
445 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
446 if cnf.has_key("Dinstall::BugServer"):
447 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
448 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
449 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
455 self.later_check_files = []
459 def package_info(self):
461 Format various messages from this Upload to send to the maintainer.
465 ('Reject Reasons', self.rejects),
466 ('Warnings', self.warnings),
467 ('Notes', self.notes),
471 for title, messages in msgs:
473 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
478 ###########################################################################
479 def update_subst(self):
480 """ Set up the per-package template substitution mappings """
484 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
485 if not self.pkg.changes.has_key("architecture") or not \
486 isinstance(self.pkg.changes["architecture"], dict):
487 self.pkg.changes["architecture"] = { "Unknown" : "" }
489 # and maintainer2047 may not exist.
490 if not self.pkg.changes.has_key("maintainer2047"):
491 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
493 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
494 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
495 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
497 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
498 if self.pkg.changes["architecture"].has_key("source") and \
499 self.pkg.changes["changedby822"] != "" and \
500 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
502 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
503 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
504 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
506 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
507 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
508 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
510 # Process policy doesn't set the fingerprint field and I don't want to make it
511 # do it for now as I don't want to have to deal with the case where we accepted
512 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
513 # the meantime so the package will be remarked as rejectable. Urgh.
514 # TODO: Fix this properly
515 if self.pkg.changes.has_key('fingerprint'):
516 session = DBConn().session()
517 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
518 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
519 if self.pkg.changes.has_key("sponsoremail"):
520 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
523 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
524 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
526 # Apply any global override of the Maintainer field
527 if cnf.get("Dinstall::OverrideMaintainer"):
528 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
529 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
531 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
532 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
533 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
534 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
536 ###########################################################################
537 def load_changes(self, filename):
539 Load a changes file and setup a dictionary around it. Also checks for mandantory
542 @type filename: string
543 @param filename: Changes filename, full path.
546 @return: whether the changes file was valid or not. We may want to
547 reject even if this is True (see what gets put in self.rejects).
548 This is simply to prevent us even trying things later which will
549 fail because we couldn't properly parse the file.
552 self.pkg.changes_file = filename
554 # Parse the .changes field into a dictionary
556 self.pkg.changes.update(parse_changes(filename))
557 except CantOpenError:
558 self.rejects.append("%s: can't read file." % (filename))
560 except ParseChangesError as line:
561 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
563 except ChangesUnicodeError:
564 self.rejects.append("%s: changes file not proper utf-8" % (filename))
567 # Parse the Files field from the .changes into another dictionary
569 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
570 except ParseChangesError as line:
571 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
573 except UnknownFormatError as format:
574 self.rejects.append("%s: unknown format '%s'." % (filename, format))
577 # Check for mandatory fields
578 for i in ("distribution", "source", "binary", "architecture",
579 "version", "maintainer", "files", "changes", "description"):
580 if not self.pkg.changes.has_key(i):
581 # Avoid undefined errors later
582 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
585 # Strip a source version in brackets from the source field
586 if re_strip_srcver.search(self.pkg.changes["source"]):
587 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
589 # Ensure the source field is a valid package name.
590 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
591 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
593 # Split multi-value fields into a lower-level dictionary
594 for i in ("architecture", "distribution", "binary", "closes"):
595 o = self.pkg.changes.get(i, "")
597 del self.pkg.changes[i]
599 self.pkg.changes[i] = {}
602 self.pkg.changes[i][j] = 1
604 # Fix the Maintainer: field to be RFC822/2047 compatible
606 (self.pkg.changes["maintainer822"],
607 self.pkg.changes["maintainer2047"],
608 self.pkg.changes["maintainername"],
609 self.pkg.changes["maintaineremail"]) = \
610 fix_maintainer (self.pkg.changes["maintainer"])
611 except ParseMaintError as msg:
612 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
613 % (filename, self.pkg.changes["maintainer"], msg))
615 # ...likewise for the Changed-By: field if it exists.
617 (self.pkg.changes["changedby822"],
618 self.pkg.changes["changedby2047"],
619 self.pkg.changes["changedbyname"],
620 self.pkg.changes["changedbyemail"]) = \
621 fix_maintainer (self.pkg.changes.get("changed-by", ""))
622 except ParseMaintError as msg:
623 self.pkg.changes["changedby822"] = ""
624 self.pkg.changes["changedby2047"] = ""
625 self.pkg.changes["changedbyname"] = ""
626 self.pkg.changes["changedbyemail"] = ""
628 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
629 % (filename, self.pkg.changes["changed-by"], msg))
631 # Ensure all the values in Closes: are numbers
632 if self.pkg.changes.has_key("closes"):
633 for i in self.pkg.changes["closes"].keys():
634 if re_isanum.match (i) == None:
635 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
637 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
638 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
639 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
641 # Check the .changes is non-empty
642 if not self.pkg.files:
643 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
646 # Changes was syntactically valid even if we'll reject
649 ###########################################################################
651 def check_distributions(self):
652 "Check and map the Distribution field"
656 # Handle suite mappings
657 for m in Cnf.ValueList("SuiteMappings"):
660 if mtype == "map" or mtype == "silent-map":
661 (source, dest) = args[1:3]
662 if self.pkg.changes["distribution"].has_key(source):
663 del self.pkg.changes["distribution"][source]
664 self.pkg.changes["distribution"][dest] = 1
665 if mtype != "silent-map":
666 self.notes.append("Mapping %s to %s." % (source, dest))
667 if self.pkg.changes.has_key("distribution-version"):
668 if self.pkg.changes["distribution-version"].has_key(source):
669 self.pkg.changes["distribution-version"][source]=dest
670 elif mtype == "map-unreleased":
671 (source, dest) = args[1:3]
672 if self.pkg.changes["distribution"].has_key(source):
673 for arch in self.pkg.changes["architecture"].keys():
674 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
675 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
676 del self.pkg.changes["distribution"][source]
677 self.pkg.changes["distribution"][dest] = 1
679 elif mtype == "ignore":
681 if self.pkg.changes["distribution"].has_key(suite):
682 del self.pkg.changes["distribution"][suite]
683 self.warnings.append("Ignoring %s as a target suite." % (suite))
684 elif mtype == "reject":
686 if self.pkg.changes["distribution"].has_key(suite):
687 self.rejects.append("Uploads to %s are not accepted." % (suite))
688 elif mtype == "propup-version":
689 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
691 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
692 if self.pkg.changes["distribution"].has_key(args[1]):
693 self.pkg.changes.setdefault("distribution-version", {})
694 for suite in args[2:]:
695 self.pkg.changes["distribution-version"][suite] = suite
697 # Ensure there is (still) a target distribution
698 if len(self.pkg.changes["distribution"].keys()) < 1:
699 self.rejects.append("No valid distribution remaining.")
701 # Ensure target distributions exist
702 for suite in self.pkg.changes["distribution"].keys():
703 if not get_suite(suite.lower()):
704 self.rejects.append("Unknown distribution `%s'." % (suite))
706 ###########################################################################
708 def binary_file_checks(self, f, session):
710 entry = self.pkg.files[f]
712 # Extract package control information
713 deb_file = utils.open_file(f)
715 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
717 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_info()[0]))
719 # Can't continue, none of the checks on control would work.
722 # Check for mandantory "Description:"
725 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
727 self.rejects.append("%s: Missing Description in binary package" % (f))
732 # Check for mandatory fields
733 for field in [ "Package", "Architecture", "Version" ]:
734 if control.Find(field) == None:
736 self.rejects.append("%s: No %s field in control." % (f, field))
739 # Ensure the package name matches the one give in the .changes
740 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
741 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
743 # Validate the package field
744 package = control.Find("Package")
745 if not re_valid_pkg_name.match(package):
746 self.rejects.append("%s: invalid package name '%s'." % (f, package))
748 # Validate the version field
749 version = control.Find("Version")
750 if not re_valid_version.match(version):
751 self.rejects.append("%s: invalid version number '%s'." % (f, version))
753 # Ensure the architecture of the .deb is one we know about.
754 default_suite = cnf.get("Dinstall::DefaultSuite", "unstable")
755 architecture = control.Find("Architecture")
756 upload_suite = self.pkg.changes["distribution"].keys()[0]
758 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
759 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
760 self.rejects.append("Unknown architecture '%s'." % (architecture))
762 # Ensure the architecture of the .deb is one of the ones
763 # listed in the .changes.
764 if not self.pkg.changes["architecture"].has_key(architecture):
765 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
767 # Sanity-check the Depends field
768 depends = control.Find("Depends")
770 self.rejects.append("%s: Depends field is empty." % (f))
772 # Sanity-check the Provides field
773 provides = control.Find("Provides")
775 provide = re_spacestrip.sub('', provides)
777 self.rejects.append("%s: Provides field is empty." % (f))
778 prov_list = provide.split(",")
779 for prov in prov_list:
780 if not re_valid_pkg_name.match(prov):
781 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
783 # If there is a Built-Using field, we need to check we can find the
784 # exact source version
785 built_using = control.Find("Built-Using")
788 entry["built-using"] = []
789 for dep in apt_pkg.parse_depends(built_using):
790 bu_s, bu_v, bu_e = dep[0]
791 # Check that it's an exact match dependency and we have
792 # some form of version
793 if bu_e != "=" or len(bu_v) < 1:
794 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
796 # Find the source id for this version
797 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
799 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
801 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
803 except ValueError as e:
804 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
807 # Check the section & priority match those given in the .changes (non-fatal)
808 if control.Find("Section") and entry["section"] != "" \
809 and entry["section"] != control.Find("Section"):
810 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
811 (f, control.Find("Section", ""), entry["section"]))
812 if control.Find("Priority") and entry["priority"] != "" \
813 and entry["priority"] != control.Find("Priority"):
814 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
815 (f, control.Find("Priority", ""), entry["priority"]))
817 entry["package"] = package
818 entry["architecture"] = architecture
819 entry["version"] = version
820 entry["maintainer"] = control.Find("Maintainer", "")
822 if f.endswith(".udeb"):
823 self.pkg.files[f]["dbtype"] = "udeb"
824 elif f.endswith(".deb"):
825 self.pkg.files[f]["dbtype"] = "deb"
827 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
829 entry["source"] = control.Find("Source", entry["package"])
831 # Get the source version
832 source = entry["source"]
835 if source.find("(") != -1:
836 m = re_extract_src_version.match(source)
838 source_version = m.group(2)
840 if not source_version:
841 source_version = self.pkg.files[f]["version"]
843 entry["source package"] = source
844 entry["source version"] = source_version
846 # Ensure the filename matches the contents of the .deb
847 m = re_isadeb.match(f)
850 file_package = m.group(1)
851 if entry["package"] != file_package:
852 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
853 (f, file_package, entry["dbtype"], entry["package"]))
854 epochless_version = re_no_epoch.sub('', control.Find("Version"))
857 file_version = m.group(2)
858 if epochless_version != file_version:
859 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
860 (f, file_version, entry["dbtype"], epochless_version))
863 file_architecture = m.group(3)
864 if entry["architecture"] != file_architecture:
865 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
866 (f, file_architecture, entry["dbtype"], entry["architecture"]))
868 # Check for existent source
869 source_version = entry["source version"]
870 source_package = entry["source package"]
871 if self.pkg.changes["architecture"].has_key("source"):
872 if source_version != self.pkg.changes["version"]:
873 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
874 (source_version, f, self.pkg.changes["version"]))
876 # Check in the SQL database
877 if not source_exists(source_package, source_version, suites = \
878 self.pkg.changes["distribution"].keys(), session = session):
879 # Check in one of the other directories
880 source_epochless_version = re_no_epoch.sub('', source_version)
881 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
883 byhand_dir = get_policy_queue('byhand', session).path
884 new_dir = get_policy_queue('new', session).path
886 if os.path.exists(os.path.join(byhand_dir, dsc_filename)):
888 elif os.path.exists(os.path.join(new_dir, dsc_filename)):
891 dsc_file_exists = False
892 # TODO: Don't hardcode this list: use all relevant queues
893 # The question is how to determine what is relevant
894 for queue_name in ["embargoed", "unembargoed", "proposedupdates", "oldproposedupdates"]:
895 queue = get_policy_queue(queue_name, session)
897 if os.path.exists(os.path.join(queue.path, dsc_filename)):
898 dsc_file_exists = True
901 if not dsc_file_exists:
902 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
904 # Check the version and for file overwrites
905 self.check_binary_against_db(f, session)
907 def source_file_checks(self, f, session):
908 entry = self.pkg.files[f]
910 m = re_issource.match(f)
914 entry["package"] = m.group(1)
915 entry["version"] = m.group(2)
916 entry["type"] = m.group(3)
918 # Ensure the source package name matches the Source filed in the .changes
919 if self.pkg.changes["source"] != entry["package"]:
920 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
922 # Ensure the source version matches the version in the .changes file
923 if re_is_orig_source.match(f):
924 changes_version = self.pkg.changes["chopversion2"]
926 changes_version = self.pkg.changes["chopversion"]
928 if changes_version != entry["version"]:
929 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
931 # Ensure the .changes lists source in the Architecture field
932 if not self.pkg.changes["architecture"].has_key("source"):
933 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
935 # Check the signature of a .dsc file
936 if entry["type"] == "dsc":
937 # check_signature returns either:
938 # (None, [list, of, rejects]) or (signature, [])
939 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
941 self.rejects.append(j)
943 entry["architecture"] = "source"
945 def per_suite_file_checks(self, f, suite, session):
947 entry = self.pkg.files[f]
950 if entry.has_key("byhand"):
953 # Check we have fields we need to do these checks
955 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
956 if not entry.has_key(m):
957 self.rejects.append("file '%s' does not have field %s set" % (f, m))
963 # Handle component mappings
964 for m in cnf.ValueList("ComponentMappings"):
965 (source, dest) = m.split()
966 if entry["component"] == source:
967 entry["original component"] = source
968 entry["component"] = dest
970 # Ensure the component is valid for the target suite
971 if entry["component"] not in get_component_names(session):
972 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
975 # Validate the component
976 if not get_component(entry["component"], session):
977 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
980 # See if the package is NEW
981 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
984 # Validate the priority
985 if entry["priority"].find('/') != -1:
986 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
988 # Determine the location
989 location = cnf["Dir::Pool"]
990 l = get_location(location, entry["component"], session=session)
992 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
993 entry["location id"] = -1
995 entry["location id"] = l.location_id
997 # Check the md5sum & size against existing files (if any)
998 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1000 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1001 entry["size"], entry["md5sum"], entry["location id"])
1004 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1005 elif found is False and poolfile is not None:
1006 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1008 if poolfile is None:
1009 entry["files id"] = None
1011 entry["files id"] = poolfile.file_id
1013 # Check for packages that have moved from one component to another
1014 entry['suite'] = suite
1015 arch_list = [entry["architecture"], 'all']
1016 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1017 [suite], arch_list = arch_list, session = session)
1018 if component is not None:
1019 entry["othercomponents"] = component
1021 def check_files(self, action=True):
1022 file_keys = self.pkg.files.keys()
1028 os.chdir(self.pkg.directory)
1030 ret = holding.copy_to_holding(f)
1032 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1036 # check we already know the changes file
1037 # [NB: this check must be done post-suite mapping]
1038 base_filename = os.path.basename(self.pkg.changes_file)
1040 session = DBConn().session()
1043 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1044 # if in the pool or in a queue other than unchecked, reject
1045 if (dbc.in_queue is None) \
1046 or (dbc.in_queue is not None
1047 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1048 self.rejects.append("%s file already known to dak" % base_filename)
1049 except NoResultFound as e:
1053 has_binaries = False
1056 for f, entry in self.pkg.files.items():
1057 # Ensure the file does not already exist in one of the accepted directories
1058 # TODO: Dynamically generate this list
1059 for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
1060 queue = get_policy_queue(queue_name, session)
1061 if queue and os.path.exists(os.path.join(queue.path, f)):
1062 self.rejects.append("%s file already exists in the %s queue." % (f, queue_name))
1064 if not re_taint_free.match(f):
1065 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1067 # Check the file is readable
1068 if os.access(f, os.R_OK) == 0:
1069 # When running in -n, copy_to_holding() won't have
1070 # generated the reject_message, so we need to.
1072 if os.path.exists(f):
1073 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1075 # Don't directly reject, mark to check later to deal with orig's
1076 # we can find in the pool
1077 self.later_check_files.append(f)
1078 entry["type"] = "unreadable"
1081 # If it's byhand skip remaining checks
1082 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1084 entry["type"] = "byhand"
1086 # Checks for a binary package...
1087 elif re_isadeb.match(f):
1089 entry["type"] = "deb"
1091 # This routine appends to self.rejects/warnings as appropriate
1092 self.binary_file_checks(f, session)
1094 # Checks for a source package...
1095 elif re_issource.match(f):
1098 # This routine appends to self.rejects/warnings as appropriate
1099 self.source_file_checks(f, session)
1101 # Not a binary or source package? Assume byhand...
1104 entry["type"] = "byhand"
1106 # Per-suite file checks
1107 entry["oldfiles"] = {}
1108 for suite in self.pkg.changes["distribution"].keys():
1109 self.per_suite_file_checks(f, suite, session)
1113 # If the .changes file says it has source, it must have source.
1114 if self.pkg.changes["architecture"].has_key("source"):
1116 self.rejects.append("no source found and Architecture line in changes mention source.")
1118 if (not has_binaries) and (not cnf.FindB("Dinstall::AllowSourceOnlyUploads")):
1119 self.rejects.append("source only uploads are not supported.")
1121 ###########################################################################
1123 def __dsc_filename(self):
1125 Returns: (Status, Dsc_Filename)
1127 Status: Boolean; True when there was no error, False otherwise
1128 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1133 for name, entry in self.pkg.files.items():
1134 if entry.has_key("type") and entry["type"] == "dsc":
1136 return False, "cannot process a .changes file with multiple .dsc's."
1140 if not dsc_filename:
1141 return False, "source uploads must contain a dsc file"
1143 return True, dsc_filename
1145 def load_dsc(self, action=True, signing_rules=1):
1147 Find and load the dsc from self.pkg.files into self.dsc
1149 Returns: (Status, Reason)
1151 Status: Boolean; True when there was no error, False otherwise
1152 Reason: String; When Status is False this describes the error
1156 (status, dsc_filename) = self.__dsc_filename()
1158 # If status is false, dsc_filename has the reason
1159 return False, dsc_filename
1162 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1163 except CantOpenError:
1165 return False, "%s: can't read file." % (dsc_filename)
1166 except ParseChangesError as line:
1167 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1168 except InvalidDscError as line:
1169 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1170 except ChangesUnicodeError:
1171 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1175 ###########################################################################
1177 def check_dsc(self, action=True, session=None):
1178 """Returns bool indicating whether or not the source changes are valid"""
1179 # Ensure there is source to check
1180 if not self.pkg.changes["architecture"].has_key("source"):
1184 session = DBConn().session()
1186 (status, reason) = self.load_dsc(action=action)
1188 self.rejects.append(reason)
1190 (status, dsc_filename) = self.__dsc_filename()
1192 # If status is false, dsc_filename has the reason
1193 self.rejects.append(dsc_filename)
1196 # Build up the file list of files mentioned by the .dsc
1198 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1199 except NoFilesFieldError:
1200 self.rejects.append("%s: no Files: field." % (dsc_filename))
1202 except UnknownFormatError as format:
1203 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1205 except ParseChangesError as line:
1206 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1209 # Enforce mandatory fields
1210 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1211 if not self.pkg.dsc.has_key(i):
1212 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1215 # Validate the source and version fields
1216 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1217 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1218 if not re_valid_version.match(self.pkg.dsc["version"]):
1219 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1221 # Only a limited list of source formats are allowed in each suite
1222 for dist in self.pkg.changes["distribution"].keys():
1223 suite = get_suite(dist, session=session)
1225 self.rejects.append("%s: cannot find suite %s when checking source formats" % (dsc_filename, dist))
1227 allowed = [ x.format_name for x in suite.srcformats ]
1228 if self.pkg.dsc["format"] not in allowed:
1229 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1231 # Validate the Maintainer field
1233 # We ignore the return value
1234 fix_maintainer(self.pkg.dsc["maintainer"])
1235 except ParseMaintError as msg:
1236 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1237 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1239 # Validate the build-depends field(s)
1240 for field_name in [ "build-depends", "build-depends-indep" ]:
1241 field = self.pkg.dsc.get(field_name)
1243 # Have apt try to parse them...
1245 apt_pkg.ParseSrcDepends(field)
1247 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1249 # Ensure the version number in the .dsc matches the version number in the .changes
1250 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1251 changes_version = self.pkg.files[dsc_filename]["version"]
1253 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1254 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1256 # Ensure the Files field contain only what's expected
1257 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1259 # Ensure source is newer than existing source in target suites
1260 session = DBConn().session()
1261 self.check_source_against_db(dsc_filename, session)
1262 self.check_dsc_against_db(dsc_filename, session)
1264 dbchg = get_dbchange(self.pkg.changes_file, session)
1266 # Finally, check if we're missing any files
1267 for f in self.later_check_files:
1269 # Check if we've already processed this file if we have a dbchg object
1272 for pf in dbchg.files:
1273 if pf.filename == f and pf.processed:
1274 self.notes.append('%s was already processed so we can go ahead' % f)
1276 del self.pkg.files[f]
1278 self.rejects.append("Could not find file %s references in changes" % f)
1282 return (len(self.rejects) == 0)
1284 ###########################################################################
1286 def get_changelog_versions(self, source_dir):
1287 """Extracts a the source package and (optionally) grabs the
1288 version history out of debian/changelog for the BTS."""
1292 # Find the .dsc (again)
1294 for f in self.pkg.files.keys():
1295 if self.pkg.files[f]["type"] == "dsc":
1298 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1299 if not dsc_filename:
1302 # Create a symlink mirror of the source files in our temporary directory
1303 for f in self.pkg.files.keys():
1304 m = re_issource.match(f)
1306 src = os.path.join(source_dir, f)
1307 # If a file is missing for whatever reason, give up.
1308 if not os.path.exists(src):
1311 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1312 self.pkg.orig_files[f].has_key("path"):
1314 dest = os.path.join(os.getcwd(), f)
1315 os.symlink(src, dest)
1317 # If the orig files are not a part of the upload, create symlinks to the
1319 for orig_file in self.pkg.orig_files.keys():
1320 if not self.pkg.orig_files[orig_file].has_key("path"):
1322 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1323 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1325 # Extract the source
1327 unpacked = UnpackedSource(dsc_filename)
1328 except Exception as e:
1329 self.rejects.append("'dpkg-source -x' failed for %s. (%s)" % (dsc_filename, str(e)))
1332 if not cnf.Find("Dir::BTSVersionTrack"):
1335 # Get the upstream version
1336 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1337 if re_strip_revision.search(upstr_version):
1338 upstr_version = re_strip_revision.sub('', upstr_version)
1340 # Ensure the changelog file exists
1341 changelog_file = unpacked.get_changelog_file()
1342 if changelog_file is None:
1343 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1346 # Parse the changelog
1347 self.pkg.dsc["bts changelog"] = ""
1348 for line in changelog_file.readlines():
1349 m = re_changelog_versions.match(line)
1351 self.pkg.dsc["bts changelog"] += line
1352 changelog_file.close()
1355 # Check we found at least one revision in the changelog
1356 if not self.pkg.dsc["bts changelog"]:
1357 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1359 def check_source(self):
1361 # a) there's no source
1362 if not self.pkg.changes["architecture"].has_key("source"):
1365 tmpdir = utils.temp_dirname()
1367 # Move into the temporary directory
1371 # Get the changelog version history
1372 self.get_changelog_versions(cwd)
1374 # Move back and cleanup the temporary tree
1378 shutil.rmtree(tmpdir)
1379 except OSError as e:
1380 if e.errno != errno.EACCES:
1382 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1384 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1385 # We probably have u-r or u-w directories so chmod everything
1387 cmd = "chmod -R u+rwx %s" % (tmpdir)
1388 result = os.system(cmd)
1390 utils.fubar("'%s' failed with result %s." % (cmd, result))
1391 shutil.rmtree(tmpdir)
1392 except Exception as e:
1393 print "foobar2 (%s)" % e
1394 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1396 ###########################################################################
1397 def ensure_hashes(self):
1398 # Make sure we recognise the format of the Files: field in the .changes
1399 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1400 if len(format) == 2:
1401 format = int(format[0]), int(format[1])
1403 format = int(float(format[0])), 0
1405 # We need to deal with the original changes blob, as the fields we need
1406 # might not be in the changes dict serialised into the .dak anymore.
1407 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1409 # Copy the checksums over to the current changes dict. This will keep
1410 # the existing modifications to it intact.
1411 for field in orig_changes:
1412 if field.startswith('checksums-'):
1413 self.pkg.changes[field] = orig_changes[field]
1415 # Check for unsupported hashes
1416 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1417 self.rejects.append(j)
1419 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1420 self.rejects.append(j)
1422 # We have to calculate the hash if we have an earlier changes version than
1423 # the hash appears in rather than require it exist in the changes file
1424 for hashname, hashfunc, version in utils.known_hashes:
1425 # TODO: Move _ensure_changes_hash into this class
1426 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1427 self.rejects.append(j)
1428 if "source" in self.pkg.changes["architecture"]:
1429 # TODO: Move _ensure_dsc_hash into this class
1430 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1431 self.rejects.append(j)
1433 def check_hashes(self):
1434 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1435 self.rejects.append(m)
1437 for m in utils.check_size(".changes", self.pkg.files):
1438 self.rejects.append(m)
1440 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1441 self.rejects.append(m)
1443 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1444 self.rejects.append(m)
1446 self.ensure_hashes()
1448 ###########################################################################
1450 def ensure_orig(self, target_dir='.', session=None):
1452 Ensures that all orig files mentioned in the changes file are present
1453 in target_dir. If they do not exist, they are symlinked into place.
1455 An list containing the symlinks that were created are returned (so they
1462 for filename, entry in self.pkg.dsc_files.iteritems():
1463 if not re_is_orig_source.match(filename):
1464 # File is not an orig; ignore
1467 if os.path.exists(filename):
1468 # File exists, no need to continue
1471 def symlink_if_valid(path):
1472 f = utils.open_file(path)
1473 md5sum = apt_pkg.md5sum(f)
1476 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1477 expected = (int(entry['size']), entry['md5sum'])
1479 if fingerprint != expected:
1482 dest = os.path.join(target_dir, filename)
1484 os.symlink(path, dest)
1485 symlinked.append(dest)
1491 session_ = DBConn().session()
1496 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1497 poolfile_path = os.path.join(
1498 poolfile.location.path, poolfile.filename
1501 if symlink_if_valid(poolfile_path):
1511 # Look in some other queues for the file
1512 queue_names = ['new', 'byhand',
1513 'proposedupdates', 'oldproposedupdates',
1514 'embargoed', 'unembargoed']
1516 for queue_name in queue_names:
1517 queue = get_policy_queue(queue_name, session)
1521 queuefile_path = os.path.join(queue.path, filename)
1523 if not os.path.exists(queuefile_path):
1524 # Does not exist in this queue
1527 if symlink_if_valid(queuefile_path):
1532 ###########################################################################
1534 def check_lintian(self):
1536 Extends self.rejects by checking the output of lintian against tags
1537 specified in Dinstall::LintianTags.
1542 # Don't reject binary uploads
1543 if not self.pkg.changes['architecture'].has_key('source'):
1546 # Only check some distributions
1547 for dist in ('unstable', 'experimental'):
1548 if dist in self.pkg.changes['distribution']:
1553 # If we do not have a tagfile, don't do anything
1554 tagfile = cnf.get("Dinstall::LintianTags")
1558 # Parse the yaml file
1559 sourcefile = file(tagfile, 'r')
1560 sourcecontent = sourcefile.read()
1564 lintiantags = yaml.load(sourcecontent)['lintian']
1565 except yaml.YAMLError as msg:
1566 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1569 # Try and find all orig mentioned in the .dsc
1570 symlinked = self.ensure_orig()
1572 # Setup the input file for lintian
1573 fd, temp_filename = utils.temp_filename()
1574 temptagfile = os.fdopen(fd, 'w')
1575 for tags in lintiantags.values():
1576 temptagfile.writelines(['%s\n' % x for x in tags])
1580 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1581 (temp_filename, self.pkg.changes_file)
1583 result, output = commands.getstatusoutput(cmd)
1585 # Remove our tempfile and any symlinks we created
1586 os.unlink(temp_filename)
1588 for symlink in symlinked:
1592 utils.warn("lintian failed for %s [return code: %s]." % \
1593 (self.pkg.changes_file, result))
1594 utils.warn(utils.prefix_multi_line_string(output, \
1595 " [possible output:] "))
1600 [self.pkg.changes_file, "check_lintian"] + list(txt)
1604 parsed_tags = parse_lintian_output(output)
1605 self.rejects.extend(
1606 generate_reject_messages(parsed_tags, lintiantags, log=log)
1609 ###########################################################################
1610 def check_urgency(self):
1612 if self.pkg.changes["architecture"].has_key("source"):
1613 if not self.pkg.changes.has_key("urgency"):
1614 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1615 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1616 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1617 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1618 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1619 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1621 ###########################################################################
1623 # Sanity check the time stamps of files inside debs.
1624 # [Files in the near future cause ugly warnings and extreme time
1625 # travel can cause errors on extraction]
1627 def check_timestamps(self):
1630 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1631 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1632 tar = TarTime(future_cutoff, past_cutoff)
1634 for filename, entry in self.pkg.files.items():
1635 if entry["type"] == "deb":
1638 deb = apt_inst.DebFile(filename)
1639 deb.control.go(tar.callback)
1641 future_files = tar.future_files.keys()
1643 num_future_files = len(future_files)
1644 future_file = future_files[0]
1645 future_date = tar.future_files[future_file]
1646 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1647 % (filename, num_future_files, future_file, time.ctime(future_date)))
1649 ancient_files = tar.ancient_files.keys()
1651 num_ancient_files = len(ancient_files)
1652 ancient_file = ancient_files[0]
1653 ancient_date = tar.ancient_files[ancient_file]
1654 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1655 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1657 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_info()[0], sys.exc_info()[1]))
1659 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1660 for key in "maintaineremail", "changedbyemail", "maintainername", "changedbyname":
1661 if not self.pkg.changes.has_key(key):
1663 uid_email = '@'.join(uid_email.split('@')[:2])
1664 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1666 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1672 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1673 debian_emails = filter(lambda addr: addr.endswith('@debian.org'), sponsor_addresses)
1674 if uid_email not in debian_emails:
1676 uid_email = debian_emails[0]
1677 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1678 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1679 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1680 self.pkg.changes["sponsoremail"] = uid_email
1685 ###########################################################################
1686 # check_signed_by_key checks
1687 ###########################################################################
1689 def check_signed_by_key(self):
1690 """Ensure the .changes is signed by an authorized uploader."""
1691 session = DBConn().session()
1693 # First of all we check that the person has proper upload permissions
1694 # and that this upload isn't blocked
1695 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1698 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1701 # TODO: Check that import-keyring adds UIDs properly
1703 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1706 # Check that the fingerprint which uploaded has permission to do so
1707 self.check_upload_permissions(fpr, session)
1709 # Check that this package is not in a transition
1710 self.check_transition(session)
1715 def check_upload_permissions(self, fpr, session):
1716 # Check any one-off upload blocks
1717 self.check_upload_blocks(fpr, session)
1719 # If the source_acl is None, source is never allowed
1720 if fpr.source_acl is None:
1721 if self.pkg.changes["architecture"].has_key("source"):
1722 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1723 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1724 self.rejects.append(rej)
1726 # Do DM as a special case
1727 # DM is a special case unfortunately, so we check it first
1728 # (keys with no source access get more access than DMs in one
1729 # way; DMs can only upload for their packages whether source
1730 # or binary, whereas keys with no access might be able to
1731 # upload some binaries)
1732 elif fpr.source_acl.access_level == 'dm':
1733 self.check_dm_upload(fpr, session)
1735 # If not a DM, we allow full upload rights
1736 uid_email = "%s@debian.org" % (fpr.uid.uid)
1737 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1740 # Check binary upload permissions
1741 # By this point we know that DMs can't have got here unless they
1742 # are allowed to deal with the package concerned so just apply
1744 if fpr.binary_acl.access_level == 'full':
1747 # Otherwise we're in the map case
1748 tmparches = self.pkg.changes["architecture"].copy()
1749 tmparches.pop('source', None)
1751 for bam in fpr.binary_acl_map:
1752 tmparches.pop(bam.architecture.arch_string, None)
1754 if len(tmparches.keys()) > 0:
1755 if fpr.binary_reject:
1756 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1757 if len(tmparches.keys()) == 1:
1758 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1760 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1761 self.rejects.append(rej)
1763 # TODO: This is where we'll implement reject vs throw away binaries later
1764 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1765 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1766 rej += "\nFingerprint: %s", (fpr.fingerprint)
1767 self.rejects.append(rej)
1770 def check_upload_blocks(self, fpr, session):
1771 """Check whether any upload blocks apply to this source, source
1772 version, uid / fpr combination"""
1774 def block_rej_template(fb):
1775 rej = 'Manual upload block in place for package %s' % fb.source
1776 if fb.version is not None:
1777 rej += ', version %s' % fb.version
1780 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1781 # version is None if the block applies to all versions
1782 if fb.version is None or fb.version == self.pkg.changes['version']:
1783 # Check both fpr and uid - either is enough to cause a reject
1784 if fb.fpr is not None:
1785 if fb.fpr.fingerprint == fpr.fingerprint:
1786 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1787 if fb.uid is not None:
1788 if fb.uid == fpr.uid:
1789 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1792 def check_dm_upload(self, fpr, session):
1793 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1794 ## none of the uploaded packages are NEW
1796 for f in self.pkg.files.keys():
1797 if self.pkg.files[f].has_key("byhand"):
1798 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1800 if self.pkg.files[f].has_key("new"):
1801 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1807 r = get_newest_source(self.pkg.changes["source"], session)
1810 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1811 self.rejects.append(rej)
1814 if not r.dm_upload_allowed:
1815 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1816 self.rejects.append(rej)
1819 ## the Maintainer: field of the uploaded .changes file corresponds with
1820 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1822 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1823 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1825 ## the most recent version of the package uploaded to unstable or
1826 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1827 ## non-developer maintainers cannot NMU or hijack packages)
1829 # uploader includes the maintainer
1831 for uploader in r.uploaders:
1832 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1833 # Eww - I hope we never have two people with the same name in Debian
1834 if email == fpr.uid.uid or name == fpr.uid.name:
1839 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1842 ## none of the packages are being taken over from other source packages
1843 for b in self.pkg.changes["binary"].keys():
1844 for suite in self.pkg.changes["distribution"].keys():
1845 for s in get_source_by_package_and_suite(b, suite, session):
1846 if s.source != self.pkg.changes["source"]:
1847 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1851 def check_transition(self, session):
1854 sourcepkg = self.pkg.changes["source"]
1856 # No sourceful upload -> no need to do anything else, direct return
1857 # We also work with unstable uploads, not experimental or those going to some
1858 # proposed-updates queue
1859 if "source" not in self.pkg.changes["architecture"] or \
1860 "unstable" not in self.pkg.changes["distribution"]:
1863 # Also only check if there is a file defined (and existant) with
1865 transpath = cnf.get("Dinstall::ReleaseTransitions", "")
1866 if transpath == "" or not os.path.exists(transpath):
1869 # Parse the yaml file
1870 sourcefile = file(transpath, 'r')
1871 sourcecontent = sourcefile.read()
1873 transitions = yaml.load(sourcecontent)
1874 except yaml.YAMLError as msg:
1875 # This shouldn't happen, there is a wrapper to edit the file which
1876 # checks it, but we prefer to be safe than ending up rejecting
1878 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1881 # Now look through all defined transitions
1882 for trans in transitions:
1883 t = transitions[trans]
1884 source = t["source"]
1887 # Will be None if nothing is in testing.
1888 current = get_source_in_suite(source, "testing", session)
1889 if current is not None:
1890 compare = apt_pkg.VersionCompare(current.version, expected)
1892 if current is None or compare < 0:
1893 # This is still valid, the current version in testing is older than
1894 # the new version we wait for, or there is none in testing yet
1896 # Check if the source we look at is affected by this.
1897 if sourcepkg in t['packages']:
1898 # The source is affected, lets reject it.
1900 rejectmsg = "%s: part of the %s transition.\n\n" % (
1903 if current is not None:
1904 currentlymsg = "at version %s" % (current.version)
1906 currentlymsg = "not present in testing"
1908 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1910 rejectmsg += "\n".join(textwrap.wrap("""Your package
1911 is part of a testing transition designed to get %s migrated (it is
1912 currently %s, we need version %s). This transition is managed by the
1913 Release Team, and %s is the Release-Team member responsible for it.
1914 Please mail debian-release@lists.debian.org or contact %s directly if you
1915 need further assistance. You might want to upload to experimental until this
1916 transition is done."""
1917 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1919 self.rejects.append(rejectmsg)
1922 ###########################################################################
1923 # End check_signed_by_key checks
1924 ###########################################################################
1926 def build_summaries(self):
1927 """ Build a summary of changes the upload introduces. """
1929 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1931 short_summary = summary
1933 # This is for direport's benefit...
1934 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1936 summary += "\n\nChanges:\n" + f
1938 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1940 summary += self.announce(short_summary, 0)
1942 return (summary, short_summary)
1944 ###########################################################################
1946 def close_bugs(self, summary, action):
1948 Send mail to close bugs as instructed by the closes field in the changes file.
1949 Also add a line to summary if any work was done.
1951 @type summary: string
1952 @param summary: summary text, as given by L{build_summaries}
1955 @param action: Set to false no real action will be done.
1958 @return: summary. If action was taken, extended by the list of closed bugs.
1962 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1964 bugs = self.pkg.changes["closes"].keys()
1970 summary += "Closing bugs: "
1972 summary += "%s " % (bug)
1975 self.Subst["__BUG_NUMBER__"] = bug
1976 if self.pkg.changes["distribution"].has_key("stable"):
1977 self.Subst["__STABLE_WARNING__"] = """
1978 Note that this package is not part of the released stable Debian
1979 distribution. It may have dependencies on other unreleased software,
1980 or other instabilities. Please take care if you wish to install it.
1981 The update will eventually make its way into the next released Debian
1984 self.Subst["__STABLE_WARNING__"] = ""
1985 mail_message = utils.TemplateSubst(self.Subst, template)
1986 utils.send_mail(mail_message)
1988 # Clear up after ourselves
1989 del self.Subst["__BUG_NUMBER__"]
1990 del self.Subst["__STABLE_WARNING__"]
1992 if action and self.logger:
1993 self.logger.log(["closing bugs"] + bugs)
1999 ###########################################################################
2001 def announce(self, short_summary, action):
2003 Send an announce mail about a new upload.
2005 @type short_summary: string
2006 @param short_summary: Short summary text to include in the mail
2009 @param action: Set to false no real action will be done.
2012 @return: Textstring about action taken.
2018 # Skip all of this if not sending mail to avoid confusing people
2019 if cnf.has_key("Dinstall::Options::No-Mail") and cnf["Dinstall::Options::No-Mail"]:
2022 # Only do announcements for source uploads with a recent dpkg-dev installed
2023 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2024 self.pkg.changes["architecture"].has_key("source"):
2027 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2032 # Get a unique list of target lists
2033 for dist in self.pkg.changes["distribution"].keys():
2034 suite = get_suite(dist)
2035 if suite is None: continue
2036 for tgt in suite.announce:
2039 self.Subst["__SHORT_SUMMARY__"] = short_summary
2041 for announce_list in lists_todo.keys():
2042 summary += "Announcing to %s\n" % (announce_list)
2046 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2047 if cnf.get("Dinstall::TrackingServer") and \
2048 self.pkg.changes["architecture"].has_key("source"):
2049 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2050 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2052 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2053 utils.send_mail(mail_message)
2055 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2057 if cnf.FindB("Dinstall::CloseBugs") and cnf.has_key("Dinstall::BugServer"):
2058 summary = self.close_bugs(summary, action)
2060 del self.Subst["__SHORT_SUMMARY__"]
2064 ###########################################################################
2066 def accept (self, summary, short_summary, session=None):
2070 This moves all files referenced from the .changes into the pool,
2071 sends the accepted mail, announces to lists, closes bugs and
2072 also checks for override disparities. If enabled it will write out
2073 the version history for the BTS Version Tracking and will finally call
2076 @type summary: string
2077 @param summary: Summary text
2079 @type short_summary: string
2080 @param short_summary: Short summary
2084 stats = SummaryStats()
2087 self.logger.log(["installing changes", self.pkg.changes_file])
2092 # Add the .dsc file to the DB first
2093 for newfile, entry in self.pkg.files.items():
2094 if entry["type"] == "dsc":
2095 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2099 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2100 for newfile, entry in self.pkg.files.items():
2101 if entry["type"] == "deb":
2102 b, pf = add_deb_to_db(self, newfile, session)
2104 poolfiles.append(pf)
2106 # If this is a sourceful diff only upload that is moving
2107 # cross-component we need to copy the .orig files into the new
2108 # component too for the same reasons as above.
2109 # XXX: mhy: I think this should be in add_dsc_to_db
2110 if self.pkg.changes["architecture"].has_key("source"):
2111 for orig_file in self.pkg.orig_files.keys():
2112 if not self.pkg.orig_files[orig_file].has_key("id"):
2113 continue # Skip if it's not in the pool
2114 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2115 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2116 continue # Skip if the location didn't change
2119 oldf = get_poolfile_by_id(orig_file_id, session)
2120 old_filename = os.path.join(oldf.location.path, oldf.filename)
2121 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2122 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2124 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2126 # TODO: Care about size/md5sum collisions etc
2127 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2129 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2131 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2132 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2136 # Don't reference the old file from this changes
2138 if p.file_id == oldf.file_id:
2141 poolfiles.append(newf)
2143 # Fix up the DSC references
2146 for df in source.srcfiles:
2147 if df.poolfile.file_id == oldf.file_id:
2148 # Add a new DSC entry and mark the old one for deletion
2149 # Don't do it in the loop so we don't change the thing we're iterating over
2151 newdscf.source_id = source.source_id
2152 newdscf.poolfile_id = newf.file_id
2153 session.add(newdscf)
2163 # Make sure that our source object is up-to-date
2164 session.expire(source)
2166 # Add changelog information to the database
2167 self.store_changelog()
2169 # Install the files into the pool
2170 for newfile, entry in self.pkg.files.items():
2171 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2172 utils.move(newfile, destination)
2173 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2174 stats.accept_bytes += float(entry["size"])
2176 # Copy the .changes file across for suite which need it.
2177 copy_changes = dict([(x.copychanges, '')
2178 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2179 if x.copychanges is not None])
2181 for dest in copy_changes.keys():
2182 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2184 # We're done - commit the database changes
2186 # Our SQL session will automatically start a new transaction after
2189 # Now ensure that the metadata has been added
2190 # This has to be done after we copy the files into the pool
2191 # For source if we have it:
2192 if self.pkg.changes["architecture"].has_key("source"):
2193 import_metadata_into_db(source, session)
2195 # Now for any of our binaries
2197 import_metadata_into_db(b, session)
2201 # Move the .changes into the 'done' directory
2202 ye, mo, da = time.gmtime()[0:3]
2203 donedir = os.path.join(cnf["Dir::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2204 if not os.path.isdir(donedir):
2205 os.makedirs(donedir)
2207 utils.move(self.pkg.changes_file,
2208 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2210 if self.pkg.changes["architecture"].has_key("source"):
2211 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2214 self.Subst["__SUMMARY__"] = summary
2215 mail_message = utils.TemplateSubst(self.Subst,
2216 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2217 utils.send_mail(mail_message)
2218 self.announce(short_summary, 1)
2220 ## Helper stuff for DebBugs Version Tracking
2221 if cnf.Find("Dir::BTSVersionTrack"):
2222 if self.pkg.changes["architecture"].has_key("source"):
2223 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2224 version_history = os.fdopen(fd, 'w')
2225 version_history.write(self.pkg.dsc["bts changelog"])
2226 version_history.close()
2227 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2228 self.pkg.changes_file[:-8]+".versions")
2229 os.rename(temp_filename, filename)
2230 os.chmod(filename, 0o644)
2232 # Write out the binary -> source mapping.
2233 (fd, temp_filename) = utils.temp_filename(cnf["Dir::BTSVersionTrack"], prefix=".")
2234 debinfo = os.fdopen(fd, 'w')
2235 for name, entry in sorted(self.pkg.files.items()):
2236 if entry["type"] == "deb":
2237 line = " ".join([entry["package"], entry["version"],
2238 entry["architecture"], entry["source package"],
2239 entry["source version"]])
2240 debinfo.write(line+"\n")
2242 filename = "%s/%s" % (cnf["Dir::BTSVersionTrack"],
2243 self.pkg.changes_file[:-8]+".debinfo")
2244 os.rename(temp_filename, filename)
2245 os.chmod(filename, 0o644)
2249 # Set up our copy queues (e.g. buildd queues)
2250 for suite_name in self.pkg.changes["distribution"].keys():
2251 suite = get_suite(suite_name, session)
2252 for q in suite.copy_queues:
2254 q.add_file_from_pool(f)
2259 stats.accept_count += 1
2261 def check_override(self):
2263 Checks override entries for validity. Mails "Override disparity" warnings,
2264 if that feature is enabled.
2266 Abandons the check if
2267 - override disparity checks are disabled
2268 - mail sending is disabled
2273 # Abandon the check if override disparity checks have been disabled
2274 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2277 summary = self.pkg.check_override()
2282 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2285 self.Subst["__SUMMARY__"] = summary
2286 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2287 utils.send_mail(mail_message)
2288 del self.Subst["__SUMMARY__"]
2290 ###########################################################################
2292 def remove(self, from_dir=None):
2294 Used (for instance) in p-u to remove the package from unchecked
2296 Also removes the package from holding area.
2298 if from_dir is None:
2299 from_dir = self.pkg.directory
2302 for f in self.pkg.files.keys():
2303 os.unlink(os.path.join(from_dir, f))
2304 if os.path.exists(os.path.join(h.holding_dir, f)):
2305 os.unlink(os.path.join(h.holding_dir, f))
2307 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2308 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2309 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2311 ###########################################################################
2313 def move_to_queue (self, queue):
2315 Move files to a destination queue using the permissions in the table
2318 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2319 queue.path, perms=int(queue.change_perms, 8))
2320 for f in self.pkg.files.keys():
2321 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2323 ###########################################################################
2325 def force_reject(self, reject_files):
2327 Forcefully move files from the current directory to the
2328 reject directory. If any file already exists in the reject
2329 directory it will be moved to the morgue to make way for
2332 @type reject_files: dict
2333 @param reject_files: file dictionary
2339 for file_entry in reject_files:
2340 # Skip any files which don't exist or which we don't have permission to copy.
2341 if os.access(file_entry, os.R_OK) == 0:
2344 dest_file = os.path.join(cnf["Dir::Reject"], file_entry)
2347 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0o644)
2348 except OSError as e:
2349 # File exists? Let's find a new name by adding a number
2350 if e.errno == errno.EEXIST:
2352 dest_file = utils.find_next_free(dest_file, 255)
2353 except NoFreeFilenameError:
2354 # Something's either gone badly Pete Tong, or
2355 # someone is trying to exploit us.
2356 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Reject"]))
2359 # Make sure we really got it
2361 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2362 except OSError as e:
2364 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2368 # If we got here, we own the destination file, so we can
2369 # safely overwrite it.
2370 utils.move(file_entry, dest_file, 1, perms=0o660)
2373 ###########################################################################
2374 def do_reject (self, manual=0, reject_message="", notes=""):
2376 Reject an upload. If called without a reject message or C{manual} is
2377 true, spawn an editor so the user can write one.
2380 @param manual: manual or automated rejection
2382 @type reject_message: string
2383 @param reject_message: A reject message
2388 # If we weren't given a manual rejection message, spawn an
2389 # editor so the user can add one in...
2390 if manual and not reject_message:
2391 (fd, temp_filename) = utils.temp_filename()
2392 temp_file = os.fdopen(fd, 'w')
2395 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2396 % (note.author, note.version, note.notedate, note.comment))
2398 editor = os.environ.get("EDITOR","vi")
2400 while answer == 'E':
2401 os.system("%s %s" % (editor, temp_filename))
2402 temp_fh = utils.open_file(temp_filename)
2403 reject_message = "".join(temp_fh.readlines())
2405 print "Reject message:"
2406 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2407 prompt = "[R]eject, Edit, Abandon, Quit ?"
2409 while prompt.find(answer) == -1:
2410 answer = utils.our_raw_input(prompt)
2411 m = re_default_answer.search(prompt)
2414 answer = answer[:1].upper()
2415 os.unlink(temp_filename)
2421 print "Rejecting.\n"
2425 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2426 reason_filename = os.path.join(cnf["Dir::Reject"], reason_filename)
2427 changesfile = os.path.join(cnf["Dir::Reject"], self.pkg.changes_file)
2429 # Move all the files into the reject directory
2430 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2431 self.force_reject(reject_files)
2433 # Change permissions of the .changes file to be world readable
2434 os.chmod(changesfile, os.stat(changesfile).st_mode | stat.S_IROTH)
2436 # If we fail here someone is probably trying to exploit the race
2437 # so let's just raise an exception ...
2438 if os.path.exists(reason_filename):
2439 os.unlink(reason_filename)
2440 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0o644)
2442 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2446 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2447 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2448 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2449 os.write(reason_fd, reject_message)
2450 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2452 # Build up the rejection email
2453 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2454 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2455 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2456 self.Subst["__REJECT_MESSAGE__"] = ""
2457 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2458 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2459 # Write the rejection email out as the <foo>.reason file
2460 os.write(reason_fd, reject_mail_message)
2462 del self.Subst["__REJECTOR_ADDRESS__"]
2463 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2464 del self.Subst["__CC__"]
2468 # Send the rejection mail
2469 utils.send_mail(reject_mail_message)
2472 self.logger.log(["rejected", self.pkg.changes_file])
2474 stats = SummaryStats()
2475 stats.reject_count += 1
2478 ################################################################################
2479 def in_override_p(self, package, component, suite, binary_type, filename, session):
2481 Check if a package already has override entries in the DB
2483 @type package: string
2484 @param package: package name
2486 @type component: string
2487 @param component: database id of the component
2490 @param suite: database id of the suite
2492 @type binary_type: string
2493 @param binary_type: type of the package
2495 @type filename: string
2496 @param filename: filename we check
2498 @return: the database result. But noone cares anyway.
2504 if binary_type == "": # must be source
2507 file_type = binary_type
2509 # Override suite name; used for example with proposed-updates
2510 oldsuite = get_suite(suite, session)
2511 if (not oldsuite is None) and oldsuite.overridesuite:
2512 suite = oldsuite.overridesuite
2514 result = get_override(package, suite, component, file_type, session)
2516 # If checking for a source package fall back on the binary override type
2517 if file_type == "dsc" and len(result) < 1:
2518 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2520 # Remember the section and priority so we can check them later if appropriate
2523 self.pkg.files[filename]["override section"] = result.section.section
2524 self.pkg.files[filename]["override priority"] = result.priority.priority
2529 ################################################################################
2530 def get_anyversion(self, sv_list, suite):
2533 @param sv_list: list of (suite, version) tuples to check
2536 @param suite: suite name
2542 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2543 for (s, v) in sv_list:
2544 if s in [ x.lower() for x in anysuite ]:
2545 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2550 ################################################################################
2552 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2555 @param sv_list: list of (suite, version) tuples to check
2557 @type filename: string
2558 @param filename: XXX
2560 @type new_version: string
2561 @param new_version: XXX
2563 Ensure versions are newer than existing packages in target
2564 suites and that cross-suite version checking rules as
2565 set out in the conf file are satisfied.
2570 # Check versions for each target suite
2571 for target_suite in self.pkg.changes["distribution"].keys():
2572 # Check we can find the target suite
2573 ts = get_suite(target_suite)
2575 self.rejects.append("Cannot find target suite %s to perform version checks" % target_suite)
2578 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2579 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2581 # Enforce "must be newer than target suite" even if conffile omits it
2582 if target_suite not in must_be_newer_than:
2583 must_be_newer_than.append(target_suite)
2585 for (suite, existent_version) in sv_list:
2586 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2588 if suite in must_be_newer_than and sourceful and vercmp < 1:
2589 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2591 if suite in must_be_older_than and vercmp > -1:
2594 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2595 # we really use the other suite, ignoring the conflicting one ...
2596 addsuite = self.pkg.changes["distribution-version"][suite]
2598 add_version = self.get_anyversion(sv_list, addsuite)
2599 target_version = self.get_anyversion(sv_list, target_suite)
2602 # not add_version can only happen if we map to a suite
2603 # that doesn't enhance the suite we're propup'ing from.
2604 # so "propup-ver x a b c; map a d" is a problem only if
2605 # d doesn't enhance a.
2607 # i think we could always propagate in this case, rather
2608 # than complaining. either way, this isn't a REJECT issue
2610 # And - we really should complain to the dorks who configured dak
2611 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2612 self.pkg.changes.setdefault("propdistribution", {})
2613 self.pkg.changes["propdistribution"][addsuite] = 1
2615 elif not target_version:
2616 # not targets_version is true when the package is NEW
2617 # we could just stick with the "...old version..." REJECT
2618 # for this, I think.
2619 self.rejects.append("Won't propogate NEW packages.")
2620 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2621 # propogation would be redundant. no need to reject though.
2622 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2624 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2625 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2627 self.warnings.append("Propogating upload to %s" % (addsuite))
2628 self.pkg.changes.setdefault("propdistribution", {})
2629 self.pkg.changes["propdistribution"][addsuite] = 1
2633 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2635 ################################################################################
2636 def check_binary_against_db(self, filename, session):
2637 # Ensure version is sane
2638 self.cross_suite_version_check( \
2639 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2640 self.pkg.files[filename]["architecture"], session),
2641 filename, self.pkg.files[filename]["version"], sourceful=False)
2643 # Check for any existing copies of the file
2644 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2645 q = q.filter_by(version=self.pkg.files[filename]["version"])
2646 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2649 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2651 ################################################################################
2653 def check_source_against_db(self, filename, session):
2654 source = self.pkg.dsc.get("source")
2655 version = self.pkg.dsc.get("version")
2657 # Ensure version is sane
2658 self.cross_suite_version_check( \
2659 get_suite_version_by_source(source, session), filename, version,
2662 ################################################################################
2663 def check_dsc_against_db(self, filename, session):
2666 @warning: NB: this function can remove entries from the 'files' index [if
2667 the orig tarball is a duplicate of the one in the archive]; if
2668 you're iterating over 'files' and call this function as part of
2669 the loop, be sure to add a check to the top of the loop to
2670 ensure you haven't just tried to dereference the deleted entry.
2675 self.pkg.orig_files = {} # XXX: do we need to clear it?
2676 orig_files = self.pkg.orig_files
2678 # Try and find all files mentioned in the .dsc. This has
2679 # to work harder to cope with the multiple possible
2680 # locations of an .orig.tar.gz.
2681 # The ordering on the select is needed to pick the newest orig
2682 # when it exists in multiple places.
2683 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2685 if self.pkg.files.has_key(dsc_name):
2686 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2687 actual_size = int(self.pkg.files[dsc_name]["size"])
2688 found = "%s in incoming" % (dsc_name)
2690 # Check the file does not already exist in the archive
2691 ql = get_poolfile_like_name(dsc_name, session)
2693 # Strip out anything that isn't '%s' or '/%s$'
2695 if not i.filename.endswith(dsc_name):
2698 # "[dak] has not broken them. [dak] has fixed a
2699 # brokenness. Your crappy hack exploited a bug in
2702 # "(Come on! I thought it was always obvious that
2703 # one just doesn't release different files with
2704 # the same name and version.)"
2705 # -- ajk@ on d-devel@l.d.o
2708 # Ignore exact matches for .orig.tar.gz
2710 if re_is_orig_source.match(dsc_name):
2712 if self.pkg.files.has_key(dsc_name) and \
2713 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2714 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2715 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2716 # TODO: Don't delete the entry, just mark it as not needed
2717 # This would fix the stupidity of changing something we often iterate over
2718 # whilst we're doing it
2719 del self.pkg.files[dsc_name]
2720 dsc_entry["files id"] = i.file_id
2721 if not orig_files.has_key(dsc_name):
2722 orig_files[dsc_name] = {}
2723 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2726 # Don't bitch that we couldn't find this file later
2728 self.later_check_files.remove(dsc_name)
2734 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2736 elif re_is_orig_source.match(dsc_name):
2738 ql = get_poolfile_like_name(dsc_name, session)
2740 # Strip out anything that isn't '%s' or '/%s$'
2741 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2743 if not i.filename.endswith(dsc_name):
2747 # Unfortunately, we may get more than one match here if,
2748 # for example, the package was in potato but had an -sa
2749 # upload in woody. So we need to choose the right one.
2751 # default to something sane in case we don't match any or have only one
2756 old_file = os.path.join(i.location.path, i.filename)
2757 old_file_fh = utils.open_file(old_file)
2758 actual_md5 = apt_pkg.md5sum(old_file_fh)
2760 actual_size = os.stat(old_file)[stat.ST_SIZE]
2761 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2764 old_file = os.path.join(i.location.path, i.filename)
2765 old_file_fh = utils.open_file(old_file)
2766 actual_md5 = apt_pkg.md5sum(old_file_fh)
2768 actual_size = os.stat(old_file)[stat.ST_SIZE]
2770 suite_type = x.location.archive_type
2771 # need this for updating dsc_files in install()
2772 dsc_entry["files id"] = x.file_id
2773 # See install() in process-accepted...
2774 if not orig_files.has_key(dsc_name):
2775 orig_files[dsc_name] = {}
2776 orig_files[dsc_name]["id"] = x.file_id
2777 orig_files[dsc_name]["path"] = old_file
2778 orig_files[dsc_name]["location"] = x.location.location_id
2780 # TODO: Determine queue list dynamically
2781 # Not there? Check the queue directories...
2782 for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates", "embargoed", "unembargoed" ]:
2783 queue = get_policy_queue(queue_name, session)
2787 in_otherdir = os.path.join(queue.path, dsc_name)
2789 if os.path.exists(in_otherdir):
2790 in_otherdir_fh = utils.open_file(in_otherdir)
2791 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2792 in_otherdir_fh.close()
2793 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2795 if not orig_files.has_key(dsc_name):
2796 orig_files[dsc_name] = {}
2797 orig_files[dsc_name]["path"] = in_otherdir
2800 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2803 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2805 if actual_md5 != dsc_entry["md5sum"]:
2806 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2807 if actual_size != int(dsc_entry["size"]):
2808 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2810 ################################################################################
2811 # This is used by process-new and process-holding to recheck a changes file
2812 # at the time we're running. It mainly wraps various other internal functions
2813 # and is similar to accepted_checks - these should probably be tidied up
2815 def recheck(self, session):
2817 for f in self.pkg.files.keys():
2818 # The .orig.tar.gz can disappear out from under us is it's a
2819 # duplicate of one in the archive.
2820 if not self.pkg.files.has_key(f):
2823 entry = self.pkg.files[f]
2825 # Check that the source still exists
2826 if entry["type"] == "deb":
2827 source_version = entry["source version"]
2828 source_package = entry["source package"]
2829 if not self.pkg.changes["architecture"].has_key("source") \
2830 and not source_exists(source_package, source_version, \
2831 suites = self.pkg.changes["distribution"].keys(), session = session):
2832 source_epochless_version = re_no_epoch.sub('', source_version)
2833 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2835 for queue_name in ["embargoed", "unembargoed", "newstage"]:
2836 queue = get_policy_queue(queue_name, session)
2837 if queue and os.path.exists(os.path.join(queue.path, dsc_filename)):
2840 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2842 # Version and file overwrite checks
2843 if entry["type"] == "deb":
2844 self.check_binary_against_db(f, session)
2845 elif entry["type"] == "dsc":
2846 self.check_source_against_db(f, session)
2847 self.check_dsc_against_db(f, session)
2849 ################################################################################
2850 def accepted_checks(self, overwrite_checks, session):
2851 # Recheck anything that relies on the database; since that's not
2852 # frozen between accept and our run time when called from p-a.
2854 # overwrite_checks is set to False when installing to stable/oldstable
2859 # Find the .dsc (again)
2861 for f in self.pkg.files.keys():
2862 if self.pkg.files[f]["type"] == "dsc":
2865 for checkfile in self.pkg.files.keys():
2866 # The .orig.tar.gz can disappear out from under us is it's a
2867 # duplicate of one in the archive.
2868 if not self.pkg.files.has_key(checkfile):
2871 entry = self.pkg.files[checkfile]
2873 # Check that the source still exists
2874 if entry["type"] == "deb":
2875 source_version = entry["source version"]
2876 source_package = entry["source package"]
2877 if not self.pkg.changes["architecture"].has_key("source") \
2878 and not source_exists(source_package, source_version, \
2879 suites = self.pkg.changes["distribution"].keys(), \
2881 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2883 # Version and file overwrite checks
2884 if overwrite_checks:
2885 if entry["type"] == "deb":
2886 self.check_binary_against_db(checkfile, session)
2887 elif entry["type"] == "dsc":
2888 self.check_source_against_db(checkfile, session)
2889 self.check_dsc_against_db(dsc_filename, session)
2891 # propogate in the case it is in the override tables:
2892 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2893 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2894 propogate[suite] = 1
2896 nopropogate[suite] = 1
2898 for suite in propogate.keys():
2899 if suite in nopropogate:
2901 self.pkg.changes["distribution"][suite] = 1
2903 for checkfile in self.pkg.files.keys():
2904 # Check the package is still in the override tables
2905 for suite in self.pkg.changes["distribution"].keys():
2906 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2907 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2909 ################################################################################
2910 # If any file of an upload has a recent mtime then chances are good
2911 # the file is still being uploaded.
2913 def upload_too_new(self):
2916 # Move back to the original directory to get accurate time stamps
2918 os.chdir(self.pkg.directory)
2919 file_list = self.pkg.files.keys()
2920 file_list.extend(self.pkg.dsc_files.keys())
2921 file_list.append(self.pkg.changes_file)
2924 last_modified = time.time()-os.path.getmtime(f)
2925 if last_modified < int(cnf["Dinstall::SkipTime"]):
2934 def store_changelog(self):
2936 # Skip binary-only upload if it is not a bin-NMU
2937 if not self.pkg.changes['architecture'].has_key('source'):
2938 from daklib.regexes import re_bin_only_nmu
2939 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2942 session = DBConn().session()
2944 # Check if upload already has a changelog entry
2945 query = """SELECT changelog_id FROM changes WHERE source = :source
2946 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2947 if session.execute(query, {'source': self.pkg.changes['source'], \
2948 'version': self.pkg.changes['version'], \
2949 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2953 # Add current changelog text into changelogs_text table, return created ID
2954 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2955 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2957 # Link ID to the upload available in changes table
2958 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2959 AND version = :version AND architecture = :architecture"""
2960 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2961 'version': self.pkg.changes['version'], \
2962 'architecture': " ".join(self.pkg.changes['architecture'].keys())})