5 Queue utility functions for dak
7 @contact: Debian FTP Master <ftpmaster@debian.org>
8 @copyright: 2001 - 2006 James Troup <james@nocrew.org>
9 @copyright: 2009, 2010 Joerg Jaspert <joerg@debian.org>
10 @license: GNU General Public License version 2 or later
13 # This program is free software; you can redistribute it and/or modify
14 # it under the terms of the GNU General Public License as published by
15 # the Free Software Foundation; either version 2 of the License, or
16 # (at your option) any later version.
18 # This program is distributed in the hope that it will be useful,
19 # but WITHOUT ANY WARRANTY; without even the implied warranty of
20 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 # GNU General Public License for more details.
23 # You should have received a copy of the GNU General Public License
24 # along with this program; if not, write to the Free Software
25 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 ###############################################################################
41 from sqlalchemy.sql.expression import desc
42 from sqlalchemy.orm.exc import NoResultFound
46 from dak_exceptions import *
49 from config import Config
50 from holding import Holding
51 from urgencylog import UrgencyLog
53 from summarystats import SummaryStats
54 from utils import parse_changes, check_dsc_files, build_package_set
55 from textutils import fix_maintainer
56 from lintian import parse_lintian_output, generate_reject_messages
57 from contents import UnpackedSource
59 # suppress some deprecation warnings in squeeze related to apt_pkg
62 warnings.filterwarnings('ignore', \
63 "apt_pkg.ParseSection\(\) is deprecated. Please see apt_pkg\.TagSection\(\) for the replacement\.", \
65 warnings.filterwarnings('ignore', \
66 "Attribute 'Find' of the 'apt_pkg\.TagSection' object is deprecated, use 'find' instead\.", \
69 ###############################################################################
71 def get_type(f, session):
73 Get the file type of C{f}
76 @param f: file entry from Changes object
78 @type session: SQLA Session
79 @param session: SQL Alchemy session object
86 if f.has_key("dbtype"):
87 file_type = f["dbtype"]
88 elif re_source_ext.match(f["type"]):
90 elif f['architecture'] == 'source' and f["type"] == 'unreadable':
91 utils.warn('unreadable source file (will continue and hope for the best)')
95 utils.fubar("invalid type (%s) for new. Dazed, confused and sure as heck not continuing." % (file_type))
97 # Validate the override type
98 type_id = get_override_type(file_type, session)
100 utils.fubar("invalid type (%s) for new. Say wha?" % (file_type))
104 ################################################################################
106 # Determine what parts in a .changes are NEW
108 def determine_new(filename, changes, files, warn=1, session = None, dsc = None, new = None):
110 Determine what parts in a C{changes} file are NEW.
113 @param filename: changes filename
115 @type changes: Upload.Pkg.changes dict
116 @param changes: Changes dictionary
118 @type files: Upload.Pkg.files dict
119 @param files: Files dictionary
122 @param warn: Warn if overrides are added for (old)stable
124 @type dsc: Upload.Pkg.dsc dict
125 @param dsc: (optional); Dsc dictionary
128 @param new: new packages as returned by a previous call to this function, but override information may have changed
131 @return: dictionary of NEW components.
134 # TODO: This should all use the database instead of parsing the changes
140 dbchg = get_dbchange(filename, session)
142 print "Warning: cannot find changes file in database; won't check byhand"
144 # Try to get the Package-Set field from an included .dsc file (if possible).
146 for package, entry in build_package_set(dsc, session).items():
147 if not new.has_key(package):
150 # Build up a list of potentially new things
151 for name, f in files.items():
152 # Keep a record of byhand elements
153 if f["section"] == "byhand":
158 priority = f["priority"]
159 section = f["section"]
160 file_type = get_type(f, session)
161 component = f["component"]
163 if file_type == "dsc":
166 if not new.has_key(pkg):
168 new[pkg]["priority"] = priority
169 new[pkg]["section"] = section
170 new[pkg]["type"] = file_type
171 new[pkg]["component"] = component
172 new[pkg]["files"] = []
174 old_type = new[pkg]["type"]
175 if old_type != file_type:
176 # source gets trumped by deb or udeb
177 if old_type == "dsc":
178 new[pkg]["priority"] = priority
179 new[pkg]["section"] = section
180 new[pkg]["type"] = file_type
181 new[pkg]["component"] = component
183 new[pkg]["files"].append(name)
185 if f.has_key("othercomponents"):
186 new[pkg]["othercomponents"] = f["othercomponents"]
188 # Fix up the list of target suites
190 for suite in changes["suite"].keys():
191 oldsuite = get_suite(suite, session)
193 print "WARNING: Invalid suite %s found" % suite
196 if oldsuite.overridesuite:
197 newsuite = get_suite(oldsuite.overridesuite, session)
200 print "INFORMATION: Using overrides from suite %s instead of suite %s" % (
201 oldsuite.overridesuite, suite)
202 del changes["suite"][suite]
203 changes["suite"][oldsuite.overridesuite] = 1
205 print "WARNING: Told to use overridesuite %s for %s but it doesn't exist. Bugger" % (
206 oldsuite.overridesuite, suite)
208 # Check for unprocessed byhand files
209 if dbchg is not None:
210 for b in byhand.keys():
211 # Find the file entry in the database
213 for f in dbchg.files:
216 # If it's processed, we can ignore it
222 print "Warning: Couldn't find BYHAND item %s in the database; assuming unprocessed"
224 # Check for new stuff
225 for suite in changes["suite"].keys():
226 for pkg in new.keys():
227 ql = get_override(pkg, suite, new[pkg]["component"], new[pkg]["type"], session)
229 for file_entry in new[pkg]["files"]:
230 if files[file_entry].has_key("new"):
231 del files[file_entry]["new"]
235 for s in ['stable', 'oldstable']:
236 if changes["suite"].has_key(s):
237 print "WARNING: overrides will be added for %s!" % s
238 for pkg in new.keys():
239 if new[pkg].has_key("othercomponents"):
240 print "WARNING: %s already present in %s distribution." % (pkg, new[pkg]["othercomponents"])
244 ################################################################################
246 def check_valid(new, session = None):
248 Check if section and priority for NEW packages exist in database.
249 Additionally does sanity checks:
250 - debian-installer packages have to be udeb (or source)
251 - non debian-installer packages can not be udeb
252 - source priority can only be assigned to dsc file types
255 @param new: Dict of new packages with their section, priority and type.
258 for pkg in new.keys():
259 section_name = new[pkg]["section"]
260 priority_name = new[pkg]["priority"]
261 file_type = new[pkg]["type"]
263 section = get_section(section_name, session)
265 new[pkg]["section id"] = -1
267 new[pkg]["section id"] = section.section_id
269 priority = get_priority(priority_name, session)
271 new[pkg]["priority id"] = -1
273 new[pkg]["priority id"] = priority.priority_id
276 di = section_name.find("debian-installer") != -1
278 # If d-i, we must be udeb and vice-versa
279 if (di and file_type not in ("udeb", "dsc")) or \
280 (not di and file_type == "udeb"):
281 new[pkg]["section id"] = -1
283 # If dsc we need to be source and vice-versa
284 if (priority == "source" and file_type != "dsc") or \
285 (priority != "source" and file_type == "dsc"):
286 new[pkg]["priority id"] = -1
288 ###############################################################################
290 # Used by Upload.check_timestamps
291 class TarTime(object):
292 def __init__(self, future_cutoff, past_cutoff):
294 self.future_cutoff = future_cutoff
295 self.past_cutoff = past_cutoff
298 self.future_files = {}
299 self.ancient_files = {}
301 def callback(self, Kind, Name, Link, Mode, UID, GID, Size, MTime, Major, Minor):
302 if MTime > self.future_cutoff:
303 self.future_files[Name] = MTime
304 if MTime < self.past_cutoff:
305 self.ancient_files[Name] = MTime
307 ###############################################################################
309 def prod_maintainer(notes, upload):
312 # Here we prepare an editor and get them ready to prod...
313 (fd, temp_filename) = utils.temp_filename()
314 temp_file = os.fdopen(fd, 'w')
316 temp_file.write(note.comment)
318 editor = os.environ.get("EDITOR","vi")
321 os.system("%s %s" % (editor, temp_filename))
322 temp_fh = utils.open_file(temp_filename)
323 prod_message = "".join(temp_fh.readlines())
325 print "Prod message:"
326 print utils.prefix_multi_line_string(prod_message," ",include_blank_lines=1)
327 prompt = "[P]rod, Edit, Abandon, Quit ?"
329 while prompt.find(answer) == -1:
330 answer = utils.our_raw_input(prompt)
331 m = re_default_answer.search(prompt)
334 answer = answer[:1].upper()
335 os.unlink(temp_filename)
341 # Otherwise, do the proding...
342 user_email_address = utils.whoami() + " <%s>" % (
343 cnf["Dinstall::MyAdminAddress"])
347 Subst["__FROM_ADDRESS__"] = user_email_address
348 Subst["__PROD_MESSAGE__"] = prod_message
349 Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
351 prod_mail_message = utils.TemplateSubst(
352 Subst,cnf["Dir::Templates"]+"/process-new.prod")
355 utils.send_mail(prod_mail_message)
357 print "Sent prodding message"
359 ################################################################################
361 def edit_note(note, upload, session, trainee=False):
362 # Write the current data to a temporary file
363 (fd, temp_filename) = utils.temp_filename()
364 editor = os.environ.get("EDITOR","vi")
367 os.system("%s %s" % (editor, temp_filename))
368 temp_file = utils.open_file(temp_filename)
369 newnote = temp_file.read().rstrip()
372 print utils.prefix_multi_line_string(newnote," ")
373 prompt = "[D]one, Edit, Abandon, Quit ?"
375 while prompt.find(answer) == -1:
376 answer = utils.our_raw_input(prompt)
377 m = re_default_answer.search(prompt)
380 answer = answer[:1].upper()
381 os.unlink(temp_filename)
388 comment = NewComment()
389 comment.package = upload.pkg.changes["source"]
390 comment.version = upload.pkg.changes["version"]
391 comment.comment = newnote
392 comment.author = utils.whoami()
393 comment.trainee = trainee
397 ###############################################################################
399 # suite names DMs can upload to
400 dm_suites = ['unstable', 'experimental']
402 def get_newest_source(source, session):
403 'returns the newest DBSource object in dm_suites'
404 ## the most recent version of the package uploaded to unstable or
405 ## experimental includes the field "DM-Upload-Allowed: yes" in the source
406 ## section of its control file
407 q = session.query(DBSource).filter_by(source = source). \
408 filter(DBSource.suites.any(Suite.suite_name.in_(dm_suites))). \
409 order_by(desc('source.version'))
412 def get_suite_version_by_source(source, session):
413 'returns a list of tuples (suite_name, version) for source package'
414 q = session.query(Suite.suite_name, DBSource.version). \
415 join(Suite.sources).filter_by(source = source)
418 def get_source_by_package_and_suite(package, suite_name, session):
420 returns a DBSource query filtered by DBBinary.package and this package's
423 return session.query(DBSource). \
424 join(DBSource.binaries).filter_by(package = package). \
425 join(DBBinary.suites).filter_by(suite_name = suite_name)
427 def get_suite_version_by_package(package, arch_string, session):
429 returns a list of tuples (suite_name, version) for binary package and
432 return session.query(Suite.suite_name, DBBinary.version). \
433 join(Suite.binaries).filter_by(package = package). \
434 join(DBBinary.architecture). \
435 filter(Architecture.arch_string.in_([arch_string, 'all'])).all()
437 class Upload(object):
439 Everything that has to do with an upload processed.
447 ###########################################################################
450 """ Reset a number of internal variables."""
452 # Initialize the substitution template map
455 self.Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
456 self.Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
457 self.Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
458 self.Subst["__DAK_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
464 self.later_check_files = []
468 def package_info(self):
470 Format various messages from this Upload to send to the maintainer.
474 ('Reject Reasons', self.rejects),
475 ('Warnings', self.warnings),
476 ('Notes', self.notes),
480 for title, messages in msgs:
482 msg += '\n\n%s:\n%s' % (title, '\n'.join(messages))
487 ###########################################################################
488 def update_subst(self):
489 """ Set up the per-package template substitution mappings """
493 # If 'dak process-unchecked' crashed out in the right place, architecture may still be a string.
494 if not self.pkg.changes.has_key("architecture") or not \
495 isinstance(self.pkg.changes["architecture"], dict):
496 self.pkg.changes["architecture"] = { "Unknown" : "" }
498 # and maintainer2047 may not exist.
499 if not self.pkg.changes.has_key("maintainer2047"):
500 self.pkg.changes["maintainer2047"] = cnf["Dinstall::MyEmailAddress"]
502 self.Subst["__ARCHITECTURE__"] = " ".join(self.pkg.changes["architecture"].keys())
503 self.Subst["__CHANGES_FILENAME__"] = os.path.basename(self.pkg.changes_file)
504 self.Subst["__FILE_CONTENTS__"] = self.pkg.changes.get("filecontents", "")
506 # For source uploads the Changed-By field wins; otherwise Maintainer wins.
507 if self.pkg.changes["architecture"].has_key("source") and \
508 self.pkg.changes["changedby822"] != "" and \
509 (self.pkg.changes["changedby822"] != self.pkg.changes["maintainer822"]):
511 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["changedby2047"]
512 self.Subst["__MAINTAINER_TO__"] = "%s, %s" % (self.pkg.changes["changedby2047"], self.pkg.changes["maintainer2047"])
513 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("changed-by", "Unknown")
515 self.Subst["__MAINTAINER_FROM__"] = self.pkg.changes["maintainer2047"]
516 self.Subst["__MAINTAINER_TO__"] = self.pkg.changes["maintainer2047"]
517 self.Subst["__MAINTAINER__"] = self.pkg.changes.get("maintainer", "Unknown")
519 # Process policy doesn't set the fingerprint field and I don't want to make it
520 # do it for now as I don't want to have to deal with the case where we accepted
521 # the package into PU-NEW, but the fingerprint has gone away from the keyring in
522 # the meantime so the package will be remarked as rejectable. Urgh.
523 # TODO: Fix this properly
524 if self.pkg.changes.has_key('fingerprint'):
525 session = DBConn().session()
526 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session)
527 if fpr and self.check_if_upload_is_sponsored("%s@debian.org" % fpr.uid.uid, fpr.uid.name):
528 if self.pkg.changes.has_key("sponsoremail"):
529 self.Subst["__MAINTAINER_TO__"] += ", %s" % self.pkg.changes["sponsoremail"]
532 if cnf.has_key("Dinstall::TrackingServer") and self.pkg.changes.has_key("source"):
533 self.Subst["__MAINTAINER_TO__"] += "\nBcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
535 # Apply any global override of the Maintainer field
536 if cnf.get("Dinstall::OverrideMaintainer"):
537 self.Subst["__MAINTAINER_TO__"] = cnf["Dinstall::OverrideMaintainer"]
538 self.Subst["__MAINTAINER_FROM__"] = cnf["Dinstall::OverrideMaintainer"]
540 self.Subst["__REJECT_MESSAGE__"] = self.package_info()
541 self.Subst["__SOURCE__"] = self.pkg.changes.get("source", "Unknown")
542 self.Subst["__VERSION__"] = self.pkg.changes.get("version", "Unknown")
543 self.Subst["__SUITE__"] = ", ".join(self.pkg.changes["distribution"])
545 ###########################################################################
546 def load_changes(self, filename):
548 Load a changes file and setup a dictionary around it. Also checks for mandantory
551 @type filename: string
552 @param filename: Changes filename, full path.
555 @return: whether the changes file was valid or not. We may want to
556 reject even if this is True (see what gets put in self.rejects).
557 This is simply to prevent us even trying things later which will
558 fail because we couldn't properly parse the file.
561 self.pkg.changes_file = filename
563 # Parse the .changes field into a dictionary
565 self.pkg.changes.update(parse_changes(filename))
566 except CantOpenError:
567 self.rejects.append("%s: can't read file." % (filename))
569 except ParseChangesError, line:
570 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
572 except ChangesUnicodeError:
573 self.rejects.append("%s: changes file not proper utf-8" % (filename))
576 # Parse the Files field from the .changes into another dictionary
578 self.pkg.files.update(utils.build_file_list(self.pkg.changes))
579 except ParseChangesError, line:
580 self.rejects.append("%s: parse error, can't grok: %s." % (filename, line))
582 except UnknownFormatError, format:
583 self.rejects.append("%s: unknown format '%s'." % (filename, format))
586 # Check for mandatory fields
587 for i in ("distribution", "source", "binary", "architecture",
588 "version", "maintainer", "files", "changes", "description"):
589 if not self.pkg.changes.has_key(i):
590 # Avoid undefined errors later
591 self.rejects.append("%s: Missing mandatory field `%s'." % (filename, i))
594 # Strip a source version in brackets from the source field
595 if re_strip_srcver.search(self.pkg.changes["source"]):
596 self.pkg.changes["source"] = re_strip_srcver.sub('', self.pkg.changes["source"])
598 # Ensure the source field is a valid package name.
599 if not re_valid_pkg_name.match(self.pkg.changes["source"]):
600 self.rejects.append("%s: invalid source name '%s'." % (filename, self.pkg.changes["source"]))
602 # Split multi-value fields into a lower-level dictionary
603 for i in ("architecture", "distribution", "binary", "closes"):
604 o = self.pkg.changes.get(i, "")
606 del self.pkg.changes[i]
608 self.pkg.changes[i] = {}
611 self.pkg.changes[i][j] = 1
613 # Fix the Maintainer: field to be RFC822/2047 compatible
615 (self.pkg.changes["maintainer822"],
616 self.pkg.changes["maintainer2047"],
617 self.pkg.changes["maintainername"],
618 self.pkg.changes["maintaineremail"]) = \
619 fix_maintainer (self.pkg.changes["maintainer"])
620 except ParseMaintError, msg:
621 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
622 % (filename, self.pkg.changes["maintainer"], msg))
624 # ...likewise for the Changed-By: field if it exists.
626 (self.pkg.changes["changedby822"],
627 self.pkg.changes["changedby2047"],
628 self.pkg.changes["changedbyname"],
629 self.pkg.changes["changedbyemail"]) = \
630 fix_maintainer (self.pkg.changes.get("changed-by", ""))
631 except ParseMaintError, msg:
632 self.pkg.changes["changedby822"] = ""
633 self.pkg.changes["changedby2047"] = ""
634 self.pkg.changes["changedbyname"] = ""
635 self.pkg.changes["changedbyemail"] = ""
637 self.rejects.append("%s: Changed-By field ('%s') failed to parse: %s" \
638 % (filename, self.pkg.changes["changed-by"], msg))
640 # Ensure all the values in Closes: are numbers
641 if self.pkg.changes.has_key("closes"):
642 for i in self.pkg.changes["closes"].keys():
643 if re_isanum.match (i) == None:
644 self.rejects.append(("%s: `%s' from Closes field isn't a number." % (filename, i)))
646 # chopversion = no epoch; chopversion2 = no epoch and no revision (e.g. for .orig.tar.gz comparison)
647 self.pkg.changes["chopversion"] = re_no_epoch.sub('', self.pkg.changes["version"])
648 self.pkg.changes["chopversion2"] = re_no_revision.sub('', self.pkg.changes["chopversion"])
650 # Check the .changes is non-empty
651 if not self.pkg.files:
652 self.rejects.append("%s: nothing to do (Files field is empty)." % (os.path.basename(self.pkg.changes_file)))
655 # Changes was syntactically valid even if we'll reject
658 ###########################################################################
660 def check_distributions(self):
661 "Check and map the Distribution field"
665 # Handle suite mappings
666 for m in Cnf.ValueList("SuiteMappings"):
669 if mtype == "map" or mtype == "silent-map":
670 (source, dest) = args[1:3]
671 if self.pkg.changes["distribution"].has_key(source):
672 del self.pkg.changes["distribution"][source]
673 self.pkg.changes["distribution"][dest] = 1
674 if mtype != "silent-map":
675 self.notes.append("Mapping %s to %s." % (source, dest))
676 if self.pkg.changes.has_key("distribution-version"):
677 if self.pkg.changes["distribution-version"].has_key(source):
678 self.pkg.changes["distribution-version"][source]=dest
679 elif mtype == "map-unreleased":
680 (source, dest) = args[1:3]
681 if self.pkg.changes["distribution"].has_key(source):
682 for arch in self.pkg.changes["architecture"].keys():
683 if arch not in [ a.arch_string for a in get_suite_architectures(source) ]:
684 self.notes.append("Mapping %s to %s for unreleased architecture %s." % (source, dest, arch))
685 del self.pkg.changes["distribution"][source]
686 self.pkg.changes["distribution"][dest] = 1
688 elif mtype == "ignore":
690 if self.pkg.changes["distribution"].has_key(suite):
691 del self.pkg.changes["distribution"][suite]
692 self.warnings.append("Ignoring %s as a target suite." % (suite))
693 elif mtype == "reject":
695 if self.pkg.changes["distribution"].has_key(suite):
696 self.rejects.append("Uploads to %s are not accepted." % (suite))
697 elif mtype == "propup-version":
698 # give these as "uploaded-to(non-mapped) suites-to-add-when-upload-obsoletes"
700 # changes["distribution-version"] looks like: {'testing': 'testing-proposed-updates'}
701 if self.pkg.changes["distribution"].has_key(args[1]):
702 self.pkg.changes.setdefault("distribution-version", {})
703 for suite in args[2:]:
704 self.pkg.changes["distribution-version"][suite] = suite
706 # Ensure there is (still) a target distribution
707 if len(self.pkg.changes["distribution"].keys()) < 1:
708 self.rejects.append("No valid distribution remaining.")
710 # Ensure target distributions exist
711 for suite in self.pkg.changes["distribution"].keys():
712 if not Cnf.has_key("Suite::%s" % (suite)):
713 self.rejects.append("Unknown distribution `%s'." % (suite))
715 ###########################################################################
717 def binary_file_checks(self, f, session):
719 entry = self.pkg.files[f]
721 # Extract package control information
722 deb_file = utils.open_file(f)
724 control = apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))
726 self.rejects.append("%s: debExtractControl() raised %s." % (f, sys.exc_type))
728 # Can't continue, none of the checks on control would work.
731 # Check for mandantory "Description:"
734 apt_pkg.ParseSection(apt_inst.debExtractControl(deb_file))["Description"] + '\n'
736 self.rejects.append("%s: Missing Description in binary package" % (f))
741 # Check for mandatory fields
742 for field in [ "Package", "Architecture", "Version" ]:
743 if control.Find(field) == None:
745 self.rejects.append("%s: No %s field in control." % (f, field))
748 # Ensure the package name matches the one give in the .changes
749 if not self.pkg.changes["binary"].has_key(control.Find("Package", "")):
750 self.rejects.append("%s: control file lists name as `%s', which isn't in changes file." % (f, control.Find("Package", "")))
752 # Validate the package field
753 package = control.Find("Package")
754 if not re_valid_pkg_name.match(package):
755 self.rejects.append("%s: invalid package name '%s'." % (f, package))
757 # Validate the version field
758 version = control.Find("Version")
759 if not re_valid_version.match(version):
760 self.rejects.append("%s: invalid version number '%s'." % (f, version))
762 # Ensure the architecture of the .deb is one we know about.
763 default_suite = cnf.get("Dinstall::DefaultSuite", "Unstable")
764 architecture = control.Find("Architecture")
765 upload_suite = self.pkg.changes["distribution"].keys()[0]
767 if architecture not in [a.arch_string for a in get_suite_architectures(default_suite, session = session)] \
768 and architecture not in [a.arch_string for a in get_suite_architectures(upload_suite, session = session)]:
769 self.rejects.append("Unknown architecture '%s'." % (architecture))
771 # Ensure the architecture of the .deb is one of the ones
772 # listed in the .changes.
773 if not self.pkg.changes["architecture"].has_key(architecture):
774 self.rejects.append("%s: control file lists arch as `%s', which isn't in changes file." % (f, architecture))
776 # Sanity-check the Depends field
777 depends = control.Find("Depends")
779 self.rejects.append("%s: Depends field is empty." % (f))
781 # Sanity-check the Provides field
782 provides = control.Find("Provides")
784 provide = re_spacestrip.sub('', provides)
786 self.rejects.append("%s: Provides field is empty." % (f))
787 prov_list = provide.split(",")
788 for prov in prov_list:
789 if not re_valid_pkg_name.match(prov):
790 self.rejects.append("%s: Invalid Provides field content %s." % (f, prov))
792 # If there is a Built-Using field, we need to check we can find the
793 # exact source version
794 built_using = control.Find("Built-Using")
797 entry["built-using"] = []
798 for dep in apt_pkg.parse_depends(built_using):
799 bu_s, bu_v, bu_e = dep[0]
800 # Check that it's an exact match dependency and we have
801 # some form of version
802 if bu_e != "=" or len(bu_v) < 1:
803 self.rejects.append("%s: Built-Using contains non strict dependency (%s %s %s)" % (f, bu_s, bu_e, bu_v))
805 # Find the source id for this version
806 bu_so = get_sources_from_name(bu_s, version=bu_v, session = session)
808 self.rejects.append("%s: Built-Using (%s = %s): Cannot find source package" % (f, bu_s, bu_v))
810 entry["built-using"].append( (bu_so[0].source, bu_so[0].version, ) )
812 except ValueError, e:
813 self.rejects.append("%s: Cannot parse Built-Using field: %s" % (f, str(e)))
816 # Check the section & priority match those given in the .changes (non-fatal)
817 if control.Find("Section") and entry["section"] != "" \
818 and entry["section"] != control.Find("Section"):
819 self.warnings.append("%s control file lists section as `%s', but changes file has `%s'." % \
820 (f, control.Find("Section", ""), entry["section"]))
821 if control.Find("Priority") and entry["priority"] != "" \
822 and entry["priority"] != control.Find("Priority"):
823 self.warnings.append("%s control file lists priority as `%s', but changes file has `%s'." % \
824 (f, control.Find("Priority", ""), entry["priority"]))
826 entry["package"] = package
827 entry["architecture"] = architecture
828 entry["version"] = version
829 entry["maintainer"] = control.Find("Maintainer", "")
831 if f.endswith(".udeb"):
832 self.pkg.files[f]["dbtype"] = "udeb"
833 elif f.endswith(".deb"):
834 self.pkg.files[f]["dbtype"] = "deb"
836 self.rejects.append("%s is neither a .deb or a .udeb." % (f))
838 entry["source"] = control.Find("Source", entry["package"])
840 # Get the source version
841 source = entry["source"]
844 if source.find("(") != -1:
845 m = re_extract_src_version.match(source)
847 source_version = m.group(2)
849 if not source_version:
850 source_version = self.pkg.files[f]["version"]
852 entry["source package"] = source
853 entry["source version"] = source_version
855 # Ensure the filename matches the contents of the .deb
856 m = re_isadeb.match(f)
859 file_package = m.group(1)
860 if entry["package"] != file_package:
861 self.rejects.append("%s: package part of filename (%s) does not match package name in the %s (%s)." % \
862 (f, file_package, entry["dbtype"], entry["package"]))
863 epochless_version = re_no_epoch.sub('', control.Find("Version"))
866 file_version = m.group(2)
867 if epochless_version != file_version:
868 self.rejects.append("%s: version part of filename (%s) does not match package version in the %s (%s)." % \
869 (f, file_version, entry["dbtype"], epochless_version))
872 file_architecture = m.group(3)
873 if entry["architecture"] != file_architecture:
874 self.rejects.append("%s: architecture part of filename (%s) does not match package architecture in the %s (%s)." % \
875 (f, file_architecture, entry["dbtype"], entry["architecture"]))
877 # Check for existent source
878 source_version = entry["source version"]
879 source_package = entry["source package"]
880 if self.pkg.changes["architecture"].has_key("source"):
881 if source_version != self.pkg.changes["version"]:
882 self.rejects.append("source version (%s) for %s doesn't match changes version %s." % \
883 (source_version, f, self.pkg.changes["version"]))
885 # Check in the SQL database
886 if not source_exists(source_package, source_version, suites = \
887 self.pkg.changes["distribution"].keys(), session = session):
888 # Check in one of the other directories
889 source_epochless_version = re_no_epoch.sub('', source_version)
890 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
891 if os.path.exists(os.path.join(cnf["Dir::Queue::Byhand"], dsc_filename)):
893 elif os.path.exists(os.path.join(cnf["Dir::Queue::New"], dsc_filename)):
896 dsc_file_exists = False
897 for myq in ["Embargoed", "Unembargoed", "ProposedUpdates", "OldProposedUpdates"]:
898 if cnf.has_key("Dir::Queue::%s" % (myq)):
899 if os.path.exists(os.path.join(cnf["Dir::Queue::" + myq], dsc_filename)):
900 dsc_file_exists = True
903 if not dsc_file_exists:
904 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
906 # Check the version and for file overwrites
907 self.check_binary_against_db(f, session)
909 def source_file_checks(self, f, session):
910 entry = self.pkg.files[f]
912 m = re_issource.match(f)
916 entry["package"] = m.group(1)
917 entry["version"] = m.group(2)
918 entry["type"] = m.group(3)
920 # Ensure the source package name matches the Source filed in the .changes
921 if self.pkg.changes["source"] != entry["package"]:
922 self.rejects.append("%s: changes file doesn't say %s for Source" % (f, entry["package"]))
924 # Ensure the source version matches the version in the .changes file
925 if re_is_orig_source.match(f):
926 changes_version = self.pkg.changes["chopversion2"]
928 changes_version = self.pkg.changes["chopversion"]
930 if changes_version != entry["version"]:
931 self.rejects.append("%s: should be %s according to changes file." % (f, changes_version))
933 # Ensure the .changes lists source in the Architecture field
934 if not self.pkg.changes["architecture"].has_key("source"):
935 self.rejects.append("%s: changes file doesn't list `source' in Architecture field." % (f))
937 # Check the signature of a .dsc file
938 if entry["type"] == "dsc":
939 # check_signature returns either:
940 # (None, [list, of, rejects]) or (signature, [])
941 (self.pkg.dsc["fingerprint"], rejects) = utils.check_signature(f)
943 self.rejects.append(j)
945 entry["architecture"] = "source"
947 def per_suite_file_checks(self, f, suite, session):
949 entry = self.pkg.files[f]
952 if entry.has_key("byhand"):
955 # Check we have fields we need to do these checks
957 for m in ['component', 'package', 'priority', 'size', 'md5sum']:
958 if not entry.has_key(m):
959 self.rejects.append("file '%s' does not have field %s set" % (f, m))
965 # Handle component mappings
966 for m in cnf.ValueList("ComponentMappings"):
967 (source, dest) = m.split()
968 if entry["component"] == source:
969 entry["original component"] = source
970 entry["component"] = dest
972 # Ensure the component is valid for the target suite
973 if cnf.has_key("Suite:%s::Components" % (suite)) and \
974 entry["component"] not in cnf.ValueList("Suite::%s::Components" % (suite)):
975 self.rejects.append("unknown component `%s' for suite `%s'." % (entry["component"], suite))
978 # Validate the component
979 if not get_component(entry["component"], session):
980 self.rejects.append("file '%s' has unknown component '%s'." % (f, entry["component"]))
983 # See if the package is NEW
984 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), f, session):
987 # Validate the priority
988 if entry["priority"].find('/') != -1:
989 self.rejects.append("file '%s' has invalid priority '%s' [contains '/']." % (f, entry["priority"]))
991 # Determine the location
992 location = cnf["Dir::Pool"]
993 l = get_location(location, entry["component"], session=session)
995 self.rejects.append("[INTERNAL ERROR] couldn't determine location (Component: %s)" % entry["component"])
996 entry["location id"] = -1
998 entry["location id"] = l.location_id
1000 # Check the md5sum & size against existing files (if any)
1001 entry["pool name"] = utils.poolify(self.pkg.changes["source"], entry["component"])
1003 found, poolfile = check_poolfile(os.path.join(entry["pool name"], f),
1004 entry["size"], entry["md5sum"], entry["location id"])
1007 self.rejects.append("INTERNAL ERROR, get_files_id() returned multiple matches for %s." % (f))
1008 elif found is False and poolfile is not None:
1009 self.rejects.append("md5sum and/or size mismatch on existing copy of %s." % (f))
1011 if poolfile is None:
1012 entry["files id"] = None
1014 entry["files id"] = poolfile.file_id
1016 # Check for packages that have moved from one component to another
1017 entry['suite'] = suite
1018 arch_list = [entry["architecture"], 'all']
1019 component = get_component_by_package_suite(self.pkg.files[f]['package'], \
1020 [suite], arch_list = arch_list, session = session)
1021 if component is not None:
1022 entry["othercomponents"] = component
1024 def check_files(self, action=True):
1025 file_keys = self.pkg.files.keys()
1031 os.chdir(self.pkg.directory)
1033 ret = holding.copy_to_holding(f)
1035 self.warnings.append('Could not copy %s to holding; will attempt to find in DB later' % f)
1039 # check we already know the changes file
1040 # [NB: this check must be done post-suite mapping]
1041 base_filename = os.path.basename(self.pkg.changes_file)
1043 session = DBConn().session()
1046 dbc = session.query(DBChange).filter_by(changesname=base_filename).one()
1047 # if in the pool or in a queue other than unchecked, reject
1048 if (dbc.in_queue is None) \
1049 or (dbc.in_queue is not None
1050 and dbc.in_queue.queue_name not in ["unchecked", "newstage"]):
1051 self.rejects.append("%s file already known to dak" % base_filename)
1052 except NoResultFound, e:
1056 has_binaries = False
1059 for f, entry in self.pkg.files.items():
1060 # Ensure the file does not already exist in one of the accepted directories
1061 for d in [ "Byhand", "New", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
1062 if not cnf.has_key("Dir::Queue::%s" % (d)): continue
1063 if os.path.exists(os.path.join(cnf["Dir::Queue::%s" % (d) ], f)):
1064 self.rejects.append("%s file already exists in the %s directory." % (f, d))
1066 if not re_taint_free.match(f):
1067 self.rejects.append("!!WARNING!! tainted filename: '%s'." % (f))
1069 # Check the file is readable
1070 if os.access(f, os.R_OK) == 0:
1071 # When running in -n, copy_to_holding() won't have
1072 # generated the reject_message, so we need to.
1074 if os.path.exists(f):
1075 self.rejects.append("Can't read `%s'. [permission denied]" % (f))
1077 # Don't directly reject, mark to check later to deal with orig's
1078 # we can find in the pool
1079 self.later_check_files.append(f)
1080 entry["type"] = "unreadable"
1083 # If it's byhand skip remaining checks
1084 if entry["section"] == "byhand" or entry["section"][:4] == "raw-":
1086 entry["type"] = "byhand"
1088 # Checks for a binary package...
1089 elif re_isadeb.match(f):
1091 entry["type"] = "deb"
1093 # This routine appends to self.rejects/warnings as appropriate
1094 self.binary_file_checks(f, session)
1096 # Checks for a source package...
1097 elif re_issource.match(f):
1100 # This routine appends to self.rejects/warnings as appropriate
1101 self.source_file_checks(f, session)
1103 # Not a binary or source package? Assume byhand...
1106 entry["type"] = "byhand"
1108 # Per-suite file checks
1109 entry["oldfiles"] = {}
1110 for suite in self.pkg.changes["distribution"].keys():
1111 self.per_suite_file_checks(f, suite, session)
1115 # If the .changes file says it has source, it must have source.
1116 if self.pkg.changes["architecture"].has_key("source"):
1118 self.rejects.append("no source found and Architecture line in changes mention source.")
1120 if not has_binaries and cnf.FindB("Dinstall::Reject::NoSourceOnly"):
1121 self.rejects.append("source only uploads are not supported.")
1123 ###########################################################################
1125 def __dsc_filename(self):
1127 Returns: (Status, Dsc_Filename)
1129 Status: Boolean; True when there was no error, False otherwise
1130 Dsc_Filename: String; name of the dsc file if Status is True, reason for the error otherwise
1135 for name, entry in self.pkg.files.items():
1136 if entry.has_key("type") and entry["type"] == "dsc":
1138 return False, "cannot process a .changes file with multiple .dsc's."
1142 if not dsc_filename:
1143 return False, "source uploads must contain a dsc file"
1145 return True, dsc_filename
1147 def load_dsc(self, action=True, signing_rules=1):
1149 Find and load the dsc from self.pkg.files into self.dsc
1151 Returns: (Status, Reason)
1153 Status: Boolean; True when there was no error, False otherwise
1154 Reason: String; When Status is False this describes the error
1158 (status, dsc_filename) = self.__dsc_filename()
1160 # If status is false, dsc_filename has the reason
1161 return False, dsc_filename
1164 self.pkg.dsc.update(utils.parse_changes(dsc_filename, signing_rules=signing_rules, dsc_file=1))
1165 except CantOpenError:
1167 return False, "%s: can't read file." % (dsc_filename)
1168 except ParseChangesError, line:
1169 return False, "%s: parse error, can't grok: %s." % (dsc_filename, line)
1170 except InvalidDscError, line:
1171 return False, "%s: syntax error on line %s." % (dsc_filename, line)
1172 except ChangesUnicodeError:
1173 return False, "%s: dsc file not proper utf-8." % (dsc_filename)
1177 ###########################################################################
1179 def check_dsc(self, action=True, session=None):
1180 """Returns bool indicating whether or not the source changes are valid"""
1181 # Ensure there is source to check
1182 if not self.pkg.changes["architecture"].has_key("source"):
1185 (status, reason) = self.load_dsc(action=action)
1187 self.rejects.append(reason)
1189 (status, dsc_filename) = self.__dsc_filename()
1191 # If status is false, dsc_filename has the reason
1192 self.rejects.append(dsc_filename)
1195 # Build up the file list of files mentioned by the .dsc
1197 self.pkg.dsc_files.update(utils.build_file_list(self.pkg.dsc, is_a_dsc=1))
1198 except NoFilesFieldError:
1199 self.rejects.append("%s: no Files: field." % (dsc_filename))
1201 except UnknownFormatError, format:
1202 self.rejects.append("%s: unknown format '%s'." % (dsc_filename, format))
1204 except ParseChangesError, line:
1205 self.rejects.append("%s: parse error, can't grok: %s." % (dsc_filename, line))
1208 # Enforce mandatory fields
1209 for i in ("format", "source", "version", "binary", "maintainer", "architecture", "files"):
1210 if not self.pkg.dsc.has_key(i):
1211 self.rejects.append("%s: missing mandatory field `%s'." % (dsc_filename, i))
1214 # Validate the source and version fields
1215 if not re_valid_pkg_name.match(self.pkg.dsc["source"]):
1216 self.rejects.append("%s: invalid source name '%s'." % (dsc_filename, self.pkg.dsc["source"]))
1217 if not re_valid_version.match(self.pkg.dsc["version"]):
1218 self.rejects.append("%s: invalid version number '%s'." % (dsc_filename, self.pkg.dsc["version"]))
1220 # Only a limited list of source formats are allowed in each suite
1221 for dist in self.pkg.changes["distribution"].keys():
1222 allowed = [ x.format_name for x in get_suite_src_formats(dist, session) ]
1223 if self.pkg.dsc["format"] not in allowed:
1224 self.rejects.append("%s: source format '%s' not allowed in %s (accepted: %s) " % (dsc_filename, self.pkg.dsc["format"], dist, ", ".join(allowed)))
1226 # Validate the Maintainer field
1228 # We ignore the return value
1229 fix_maintainer(self.pkg.dsc["maintainer"])
1230 except ParseMaintError, msg:
1231 self.rejects.append("%s: Maintainer field ('%s') failed to parse: %s" \
1232 % (dsc_filename, self.pkg.dsc["maintainer"], msg))
1234 # Validate the build-depends field(s)
1235 for field_name in [ "build-depends", "build-depends-indep" ]:
1236 field = self.pkg.dsc.get(field_name)
1238 # Have apt try to parse them...
1240 apt_pkg.ParseSrcDepends(field)
1242 self.rejects.append("%s: invalid %s field (can not be parsed by apt)." % (dsc_filename, field_name.title()))
1244 # Ensure the version number in the .dsc matches the version number in the .changes
1245 epochless_dsc_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1246 changes_version = self.pkg.files[dsc_filename]["version"]
1248 if epochless_dsc_version != self.pkg.files[dsc_filename]["version"]:
1249 self.rejects.append("version ('%s') in .dsc does not match version ('%s') in .changes." % (epochless_dsc_version, changes_version))
1251 # Ensure the Files field contain only what's expected
1252 self.rejects.extend(check_dsc_files(dsc_filename, self.pkg.dsc, self.pkg.dsc_files))
1254 # Ensure source is newer than existing source in target suites
1255 session = DBConn().session()
1256 self.check_source_against_db(dsc_filename, session)
1257 self.check_dsc_against_db(dsc_filename, session)
1259 dbchg = get_dbchange(self.pkg.changes_file, session)
1261 # Finally, check if we're missing any files
1262 for f in self.later_check_files:
1264 # Check if we've already processed this file if we have a dbchg object
1267 for pf in dbchg.files:
1268 if pf.filename == f and pf.processed:
1269 self.notes.append('%s was already processed so we can go ahead' % f)
1271 del self.pkg.files[f]
1273 self.rejects.append("Could not find file %s references in changes" % f)
1279 ###########################################################################
1281 def get_changelog_versions(self, source_dir):
1282 """Extracts a the source package and (optionally) grabs the
1283 version history out of debian/changelog for the BTS."""
1287 # Find the .dsc (again)
1289 for f in self.pkg.files.keys():
1290 if self.pkg.files[f]["type"] == "dsc":
1293 # If there isn't one, we have nothing to do. (We have reject()ed the upload already)
1294 if not dsc_filename:
1297 # Create a symlink mirror of the source files in our temporary directory
1298 for f in self.pkg.files.keys():
1299 m = re_issource.match(f)
1301 src = os.path.join(source_dir, f)
1302 # If a file is missing for whatever reason, give up.
1303 if not os.path.exists(src):
1306 if re_is_orig_source.match(f) and self.pkg.orig_files.has_key(f) and \
1307 self.pkg.orig_files[f].has_key("path"):
1309 dest = os.path.join(os.getcwd(), f)
1310 os.symlink(src, dest)
1312 # If the orig files are not a part of the upload, create symlinks to the
1314 for orig_file in self.pkg.orig_files.keys():
1315 if not self.pkg.orig_files[orig_file].has_key("path"):
1317 dest = os.path.join(os.getcwd(), os.path.basename(orig_file))
1318 os.symlink(self.pkg.orig_files[orig_file]["path"], dest)
1320 # Extract the source
1322 unpacked = UnpackedSource(dsc_filename)
1324 self.rejects.append("'dpkg-source -x' failed for %s." % dsc_filename)
1327 if not cnf.Find("Dir::Queue::BTSVersionTrack"):
1330 # Get the upstream version
1331 upstr_version = re_no_epoch.sub('', self.pkg.dsc["version"])
1332 if re_strip_revision.search(upstr_version):
1333 upstr_version = re_strip_revision.sub('', upstr_version)
1335 # Ensure the changelog file exists
1336 changelog_file = unpacked.get_changelog_file()
1337 if changelog_file is None:
1338 self.rejects.append("%s: debian/changelog not found in extracted source." % (dsc_filename))
1341 # Parse the changelog
1342 self.pkg.dsc["bts changelog"] = ""
1343 for line in changelog_file.readlines():
1344 m = re_changelog_versions.match(line)
1346 self.pkg.dsc["bts changelog"] += line
1347 changelog_file.close()
1350 # Check we found at least one revision in the changelog
1351 if not self.pkg.dsc["bts changelog"]:
1352 self.rejects.append("%s: changelog format not recognised (empty version tree)." % (dsc_filename))
1354 def check_source(self):
1356 # a) there's no source
1357 if not self.pkg.changes["architecture"].has_key("source"):
1360 tmpdir = utils.temp_dirname()
1362 # Move into the temporary directory
1366 # Get the changelog version history
1367 self.get_changelog_versions(cwd)
1369 # Move back and cleanup the temporary tree
1373 shutil.rmtree(tmpdir)
1375 if e.errno != errno.EACCES:
1377 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1379 self.rejects.append("%s: source tree could not be cleanly removed." % (self.pkg.dsc["source"]))
1380 # We probably have u-r or u-w directories so chmod everything
1382 cmd = "chmod -R u+rwx %s" % (tmpdir)
1383 result = os.system(cmd)
1385 utils.fubar("'%s' failed with result %s." % (cmd, result))
1386 shutil.rmtree(tmpdir)
1387 except Exception, e:
1388 print "foobar2 (%s)" % e
1389 utils.fubar("%s: couldn't remove tmp dir for source tree." % (self.pkg.dsc["source"]))
1391 ###########################################################################
1392 def ensure_hashes(self):
1393 # Make sure we recognise the format of the Files: field in the .changes
1394 format = self.pkg.changes.get("format", "0.0").split(".", 1)
1395 if len(format) == 2:
1396 format = int(format[0]), int(format[1])
1398 format = int(float(format[0])), 0
1400 # We need to deal with the original changes blob, as the fields we need
1401 # might not be in the changes dict serialised into the .dak anymore.
1402 orig_changes = utils.parse_deb822(self.pkg.changes['filecontents'])
1404 # Copy the checksums over to the current changes dict. This will keep
1405 # the existing modifications to it intact.
1406 for field in orig_changes:
1407 if field.startswith('checksums-'):
1408 self.pkg.changes[field] = orig_changes[field]
1410 # Check for unsupported hashes
1411 for j in utils.check_hash_fields(".changes", self.pkg.changes):
1412 self.rejects.append(j)
1414 for j in utils.check_hash_fields(".dsc", self.pkg.dsc):
1415 self.rejects.append(j)
1417 # We have to calculate the hash if we have an earlier changes version than
1418 # the hash appears in rather than require it exist in the changes file
1419 for hashname, hashfunc, version in utils.known_hashes:
1420 # TODO: Move _ensure_changes_hash into this class
1421 for j in utils._ensure_changes_hash(self.pkg.changes, format, version, self.pkg.files, hashname, hashfunc):
1422 self.rejects.append(j)
1423 if "source" in self.pkg.changes["architecture"]:
1424 # TODO: Move _ensure_dsc_hash into this class
1425 for j in utils._ensure_dsc_hash(self.pkg.dsc, self.pkg.dsc_files, hashname, hashfunc):
1426 self.rejects.append(j)
1428 def check_hashes(self):
1429 for m in utils.check_hash(".changes", self.pkg.files, "md5", apt_pkg.md5sum):
1430 self.rejects.append(m)
1432 for m in utils.check_size(".changes", self.pkg.files):
1433 self.rejects.append(m)
1435 for m in utils.check_hash(".dsc", self.pkg.dsc_files, "md5", apt_pkg.md5sum):
1436 self.rejects.append(m)
1438 for m in utils.check_size(".dsc", self.pkg.dsc_files):
1439 self.rejects.append(m)
1441 self.ensure_hashes()
1443 ###########################################################################
1445 def ensure_orig(self, target_dir='.', session=None):
1447 Ensures that all orig files mentioned in the changes file are present
1448 in target_dir. If they do not exist, they are symlinked into place.
1450 An list containing the symlinks that were created are returned (so they
1457 for filename, entry in self.pkg.dsc_files.iteritems():
1458 if not re_is_orig_source.match(filename):
1459 # File is not an orig; ignore
1462 if os.path.exists(filename):
1463 # File exists, no need to continue
1466 def symlink_if_valid(path):
1467 f = utils.open_file(path)
1468 md5sum = apt_pkg.md5sum(f)
1471 fingerprint = (os.stat(path)[stat.ST_SIZE], md5sum)
1472 expected = (int(entry['size']), entry['md5sum'])
1474 if fingerprint != expected:
1477 dest = os.path.join(target_dir, filename)
1479 os.symlink(path, dest)
1480 symlinked.append(dest)
1486 session_ = DBConn().session()
1491 for poolfile in get_poolfile_like_name('%s' % filename, session_):
1492 poolfile_path = os.path.join(
1493 poolfile.location.path, poolfile.filename
1496 if symlink_if_valid(poolfile_path):
1506 # Look in some other queues for the file
1507 queues = ('New', 'Byhand', 'ProposedUpdates',
1508 'OldProposedUpdates', 'Embargoed', 'Unembargoed')
1510 for queue in queues:
1511 if not cnf.get('Dir::Queue::%s' % queue):
1514 queuefile_path = os.path.join(
1515 cnf['Dir::Queue::%s' % queue], filename
1518 if not os.path.exists(queuefile_path):
1519 # Does not exist in this queue
1522 if symlink_if_valid(queuefile_path):
1527 ###########################################################################
1529 def check_lintian(self):
1531 Extends self.rejects by checking the output of lintian against tags
1532 specified in Dinstall::LintianTags.
1537 # Don't reject binary uploads
1538 if not self.pkg.changes['architecture'].has_key('source'):
1541 # Only check some distributions
1542 for dist in ('unstable', 'experimental'):
1543 if dist in self.pkg.changes['distribution']:
1548 # If we do not have a tagfile, don't do anything
1549 tagfile = cnf.get("Dinstall::LintianTags")
1553 # Parse the yaml file
1554 sourcefile = file(tagfile, 'r')
1555 sourcecontent = sourcefile.read()
1559 lintiantags = yaml.load(sourcecontent)['lintian']
1560 except yaml.YAMLError, msg:
1561 utils.fubar("Can not read the lintian tags file %s, YAML error: %s." % (tagfile, msg))
1564 # Try and find all orig mentioned in the .dsc
1565 symlinked = self.ensure_orig()
1567 # Setup the input file for lintian
1568 fd, temp_filename = utils.temp_filename()
1569 temptagfile = os.fdopen(fd, 'w')
1570 for tags in lintiantags.values():
1571 temptagfile.writelines(['%s\n' % x for x in tags])
1575 cmd = "lintian --show-overrides --tags-from-file %s %s" % \
1576 (temp_filename, self.pkg.changes_file)
1578 result, output = commands.getstatusoutput(cmd)
1580 # Remove our tempfile and any symlinks we created
1581 os.unlink(temp_filename)
1583 for symlink in symlinked:
1587 utils.warn("lintian failed for %s [return code: %s]." % \
1588 (self.pkg.changes_file, result))
1589 utils.warn(utils.prefix_multi_line_string(output, \
1590 " [possible output:] "))
1595 [self.pkg.changes_file, "check_lintian"] + list(txt)
1599 parsed_tags = parse_lintian_output(output)
1600 self.rejects.extend(
1601 generate_reject_messages(parsed_tags, lintiantags, log=log)
1604 ###########################################################################
1605 def check_urgency(self):
1607 if self.pkg.changes["architecture"].has_key("source"):
1608 if not self.pkg.changes.has_key("urgency"):
1609 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1610 self.pkg.changes["urgency"] = self.pkg.changes["urgency"].lower()
1611 if self.pkg.changes["urgency"] not in cnf.ValueList("Urgency::Valid"):
1612 self.warnings.append("%s is not a valid urgency; it will be treated as %s by testing." % \
1613 (self.pkg.changes["urgency"], cnf["Urgency::Default"]))
1614 self.pkg.changes["urgency"] = cnf["Urgency::Default"]
1616 ###########################################################################
1618 # Sanity check the time stamps of files inside debs.
1619 # [Files in the near future cause ugly warnings and extreme time
1620 # travel can cause errors on extraction]
1622 def check_timestamps(self):
1625 future_cutoff = time.time() + int(Cnf["Dinstall::FutureTimeTravelGrace"])
1626 past_cutoff = time.mktime(time.strptime(Cnf["Dinstall::PastCutoffYear"],"%Y"))
1627 tar = TarTime(future_cutoff, past_cutoff)
1629 for filename, entry in self.pkg.files.items():
1630 if entry["type"] == "deb":
1633 deb_file = utils.open_file(filename)
1634 apt_inst.debExtract(deb_file, tar.callback, "control.tar.gz")
1637 apt_inst.debExtract(deb_file, tar.callback, "data.tar.gz")
1638 except SystemError, e:
1639 # If we can't find a data.tar.gz, look for data.tar.bz2 instead.
1640 if not re.search(r"Cannot f[ui]nd chunk data.tar.gz$", str(e)):
1643 apt_inst.debExtract(deb_file,tar.callback,"data.tar.bz2")
1647 future_files = tar.future_files.keys()
1649 num_future_files = len(future_files)
1650 future_file = future_files[0]
1651 future_date = tar.future_files[future_file]
1652 self.rejects.append("%s: has %s file(s) with a time stamp too far into the future (e.g. %s [%s])."
1653 % (filename, num_future_files, future_file, time.ctime(future_date)))
1655 ancient_files = tar.ancient_files.keys()
1657 num_ancient_files = len(ancient_files)
1658 ancient_file = ancient_files[0]
1659 ancient_date = tar.ancient_files[ancient_file]
1660 self.rejects.append("%s: has %s file(s) with a time stamp too ancient (e.g. %s [%s])."
1661 % (filename, num_ancient_files, ancient_file, time.ctime(ancient_date)))
1663 self.rejects.append("%s: deb contents timestamp check failed [%s: %s]" % (filename, sys.exc_type, sys.exc_value))
1665 def check_if_upload_is_sponsored(self, uid_email, uid_name):
1666 if uid_email in [self.pkg.changes["maintaineremail"], self.pkg.changes["changedbyemail"]]:
1668 elif uid_name in [self.pkg.changes["maintainername"], self.pkg.changes["changedbyname"]]:
1674 if ("source" in self.pkg.changes["architecture"] and uid_email and utils.is_email_alias(uid_email)):
1675 sponsor_addresses = utils.gpg_get_key_addresses(self.pkg.changes["fingerprint"])
1676 if (self.pkg.changes["maintaineremail"] not in sponsor_addresses and
1677 self.pkg.changes["changedbyemail"] not in sponsor_addresses):
1678 self.pkg.changes["sponsoremail"] = uid_email
1683 ###########################################################################
1684 # check_signed_by_key checks
1685 ###########################################################################
1687 def check_signed_by_key(self):
1688 """Ensure the .changes is signed by an authorized uploader."""
1689 session = DBConn().session()
1691 # First of all we check that the person has proper upload permissions
1692 # and that this upload isn't blocked
1693 fpr = get_fingerprint(self.pkg.changes['fingerprint'], session=session)
1696 self.rejects.append("Cannot find fingerprint %s" % self.pkg.changes["fingerprint"])
1699 # TODO: Check that import-keyring adds UIDs properly
1701 self.rejects.append("Cannot find uid for fingerprint %s. Please contact ftpmaster@debian.org" % fpr.fingerprint)
1704 # Check that the fingerprint which uploaded has permission to do so
1705 self.check_upload_permissions(fpr, session)
1707 # Check that this package is not in a transition
1708 self.check_transition(session)
1713 def check_upload_permissions(self, fpr, session):
1714 # Check any one-off upload blocks
1715 self.check_upload_blocks(fpr, session)
1717 # If the source_acl is None, source is never allowed
1718 if fpr.source_acl is None:
1719 if self.pkg.changes["architecture"].has_key("source"):
1720 rej = 'Fingerprint %s may not upload source' % fpr.fingerprint
1721 rej += '\nPlease contact ftpmaster if you think this is incorrect'
1722 self.rejects.append(rej)
1724 # Do DM as a special case
1725 # DM is a special case unfortunately, so we check it first
1726 # (keys with no source access get more access than DMs in one
1727 # way; DMs can only upload for their packages whether source
1728 # or binary, whereas keys with no access might be able to
1729 # upload some binaries)
1730 elif fpr.source_acl.access_level == 'dm':
1731 self.check_dm_upload(fpr, session)
1733 # If not a DM, we allow full upload rights
1734 uid_email = "%s@debian.org" % (fpr.uid.uid)
1735 self.check_if_upload_is_sponsored(uid_email, fpr.uid.name)
1738 # Check binary upload permissions
1739 # By this point we know that DMs can't have got here unless they
1740 # are allowed to deal with the package concerned so just apply
1742 if fpr.binary_acl.access_level == 'full':
1745 # Otherwise we're in the map case
1746 tmparches = self.pkg.changes["architecture"].copy()
1747 tmparches.pop('source', None)
1749 for bam in fpr.binary_acl_map:
1750 tmparches.pop(bam.architecture.arch_string, None)
1752 if len(tmparches.keys()) > 0:
1753 if fpr.binary_reject:
1754 rej = "changes file contains files of architectures not permitted for fingerprint %s" % fpr.fingerprint
1755 if len(tmparches.keys()) == 1:
1756 rej += "\n\narchitecture involved is: %s" % ",".join(tmparches.keys())
1758 rej += "\n\narchitectures involved are: %s" % ",".join(tmparches.keys())
1759 self.rejects.append(rej)
1761 # TODO: This is where we'll implement reject vs throw away binaries later
1762 rej = "Uhm. I'm meant to throw away the binaries now but that's not implemented yet"
1763 rej += "\nPlease complain to ftpmaster@debian.org as this shouldn't have been turned on"
1764 rej += "\nFingerprint: %s", (fpr.fingerprint)
1765 self.rejects.append(rej)
1768 def check_upload_blocks(self, fpr, session):
1769 """Check whether any upload blocks apply to this source, source
1770 version, uid / fpr combination"""
1772 def block_rej_template(fb):
1773 rej = 'Manual upload block in place for package %s' % fb.source
1774 if fb.version is not None:
1775 rej += ', version %s' % fb.version
1778 for fb in session.query(UploadBlock).filter_by(source = self.pkg.changes['source']).all():
1779 # version is None if the block applies to all versions
1780 if fb.version is None or fb.version == self.pkg.changes['version']:
1781 # Check both fpr and uid - either is enough to cause a reject
1782 if fb.fpr is not None:
1783 if fb.fpr.fingerprint == fpr.fingerprint:
1784 self.rejects.append(block_rej_template(fb) + ' for fingerprint %s\nReason: %s' % (fpr.fingerprint, fb.reason))
1785 if fb.uid is not None:
1786 if fb.uid == fpr.uid:
1787 self.rejects.append(block_rej_template(fb) + ' for uid %s\nReason: %s' % (fb.uid.uid, fb.reason))
1790 def check_dm_upload(self, fpr, session):
1791 # Quoth the GR (http://www.debian.org/vote/2007/vote_003):
1792 ## none of the uploaded packages are NEW
1794 for f in self.pkg.files.keys():
1795 if self.pkg.files[f].has_key("byhand"):
1796 self.rejects.append("%s may not upload BYHAND file %s" % (fpr.uid.uid, f))
1798 if self.pkg.files[f].has_key("new"):
1799 self.rejects.append("%s may not upload NEW file %s" % (fpr.uid.uid, f))
1805 r = get_newest_source(self.pkg.changes["source"], session)
1808 rej = "Could not find existing source package %s in unstable or experimental and this is a DM upload" % self.pkg.changes["source"]
1809 self.rejects.append(rej)
1812 if not r.dm_upload_allowed:
1813 rej = "Source package %s does not have 'DM-Upload-Allowed: yes' in its most recent version (%s)" % (self.pkg.changes["source"], r.version)
1814 self.rejects.append(rej)
1817 ## the Maintainer: field of the uploaded .changes file corresponds with
1818 ## the owner of the key used (ie, non-developer maintainers may not sponsor
1820 if self.check_if_upload_is_sponsored(fpr.uid.uid, fpr.uid.name):
1821 self.rejects.append("%s (%s) is not authorised to sponsor uploads" % (fpr.uid.uid, fpr.fingerprint))
1823 ## the most recent version of the package uploaded to unstable or
1824 ## experimental lists the uploader in the Maintainer: or Uploaders: fields (ie,
1825 ## non-developer maintainers cannot NMU or hijack packages)
1827 # uploader includes the maintainer
1829 for uploader in r.uploaders:
1830 (rfc822, rfc2047, name, email) = uploader.get_split_maintainer()
1831 # Eww - I hope we never have two people with the same name in Debian
1832 if email == fpr.uid.uid or name == fpr.uid.name:
1837 self.rejects.append("%s is not in Maintainer or Uploaders of source package %s" % (fpr.uid.uid, self.pkg.changes["source"]))
1840 ## none of the packages are being taken over from other source packages
1841 for b in self.pkg.changes["binary"].keys():
1842 for suite in self.pkg.changes["distribution"].keys():
1843 for s in get_source_by_package_and_suite(b, suite, session):
1844 if s.source != self.pkg.changes["source"]:
1845 self.rejects.append("%s may not hijack %s from source package %s in suite %s" % (fpr.uid.uid, b, s, suite))
1849 def check_transition(self, session):
1852 sourcepkg = self.pkg.changes["source"]
1854 # No sourceful upload -> no need to do anything else, direct return
1855 # We also work with unstable uploads, not experimental or those going to some
1856 # proposed-updates queue
1857 if "source" not in self.pkg.changes["architecture"] or \
1858 "unstable" not in self.pkg.changes["distribution"]:
1861 # Also only check if there is a file defined (and existant) with
1863 transpath = cnf.get("Dinstall::Reject::ReleaseTransitions", "")
1864 if transpath == "" or not os.path.exists(transpath):
1867 # Parse the yaml file
1868 sourcefile = file(transpath, 'r')
1869 sourcecontent = sourcefile.read()
1871 transitions = yaml.load(sourcecontent)
1872 except yaml.YAMLError, msg:
1873 # This shouldn't happen, there is a wrapper to edit the file which
1874 # checks it, but we prefer to be safe than ending up rejecting
1876 utils.warn("Not checking transitions, the transitions file is broken: %s." % (msg))
1879 # Now look through all defined transitions
1880 for trans in transitions:
1881 t = transitions[trans]
1882 source = t["source"]
1885 # Will be None if nothing is in testing.
1886 current = get_source_in_suite(source, "testing", session)
1887 if current is not None:
1888 compare = apt_pkg.VersionCompare(current.version, expected)
1890 if current is None or compare < 0:
1891 # This is still valid, the current version in testing is older than
1892 # the new version we wait for, or there is none in testing yet
1894 # Check if the source we look at is affected by this.
1895 if sourcepkg in t['packages']:
1896 # The source is affected, lets reject it.
1898 rejectmsg = "%s: part of the %s transition.\n\n" % (
1901 if current is not None:
1902 currentlymsg = "at version %s" % (current.version)
1904 currentlymsg = "not present in testing"
1906 rejectmsg += "Transition description: %s\n\n" % (t["reason"])
1908 rejectmsg += "\n".join(textwrap.wrap("""Your package
1909 is part of a testing transition designed to get %s migrated (it is
1910 currently %s, we need version %s). This transition is managed by the
1911 Release Team, and %s is the Release-Team member responsible for it.
1912 Please mail debian-release@lists.debian.org or contact %s directly if you
1913 need further assistance. You might want to upload to experimental until this
1914 transition is done."""
1915 % (source, currentlymsg, expected,t["rm"], t["rm"])))
1917 self.rejects.append(rejectmsg)
1920 ###########################################################################
1921 # End check_signed_by_key checks
1922 ###########################################################################
1924 def build_summaries(self):
1925 """ Build a summary of changes the upload introduces. """
1927 (byhand, new, summary, override_summary) = self.pkg.file_summary()
1929 short_summary = summary
1931 # This is for direport's benefit...
1932 f = re_fdnic.sub("\n .\n", self.pkg.changes.get("changes", ""))
1935 summary += "Changes: " + f
1937 summary += "\n\nOverride entries for your package:\n" + override_summary + "\n"
1939 summary += self.announce(short_summary, 0)
1941 return (summary, short_summary)
1943 ###########################################################################
1945 def close_bugs(self, summary, action):
1947 Send mail to close bugs as instructed by the closes field in the changes file.
1948 Also add a line to summary if any work was done.
1950 @type summary: string
1951 @param summary: summary text, as given by L{build_summaries}
1954 @param action: Set to false no real action will be done.
1957 @return: summary. If action was taken, extended by the list of closed bugs.
1961 template = os.path.join(Config()["Dir::Templates"], 'process-unchecked.bug-close')
1963 bugs = self.pkg.changes["closes"].keys()
1969 summary += "Closing bugs: "
1971 summary += "%s " % (bug)
1974 self.Subst["__BUG_NUMBER__"] = bug
1975 if self.pkg.changes["distribution"].has_key("stable"):
1976 self.Subst["__STABLE_WARNING__"] = """
1977 Note that this package is not part of the released stable Debian
1978 distribution. It may have dependencies on other unreleased software,
1979 or other instabilities. Please take care if you wish to install it.
1980 The update will eventually make its way into the next released Debian
1983 self.Subst["__STABLE_WARNING__"] = ""
1984 mail_message = utils.TemplateSubst(self.Subst, template)
1985 utils.send_mail(mail_message)
1987 # Clear up after ourselves
1988 del self.Subst["__BUG_NUMBER__"]
1989 del self.Subst["__STABLE_WARNING__"]
1991 if action and self.logger:
1992 self.logger.log(["closing bugs"] + bugs)
1998 ###########################################################################
2000 def announce(self, short_summary, action):
2002 Send an announce mail about a new upload.
2004 @type short_summary: string
2005 @param short_summary: Short summary text to include in the mail
2008 @param action: Set to false no real action will be done.
2011 @return: Textstring about action taken.
2016 announcetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.announce')
2018 # Only do announcements for source uploads with a recent dpkg-dev installed
2019 if float(self.pkg.changes.get("format", 0)) < 1.6 or not \
2020 self.pkg.changes["architecture"].has_key("source"):
2026 self.Subst["__SHORT_SUMMARY__"] = short_summary
2028 for dist in self.pkg.changes["distribution"].keys():
2029 suite = get_suite(dist)
2030 if suite is None: continue
2031 announce_list = suite.announce
2032 if announce_list == "" or lists_done.has_key(announce_list):
2035 lists_done[announce_list] = 1
2036 summary += "Announcing to %s\n" % (announce_list)
2040 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] = announce_list
2041 if cnf.get("Dinstall::TrackingServer") and \
2042 self.pkg.changes["architecture"].has_key("source"):
2043 trackingsendto = "Bcc: %s@%s" % (self.pkg.changes["source"], cnf["Dinstall::TrackingServer"])
2044 self.Subst["__ANNOUNCE_LIST_ADDRESS__"] += "\n" + trackingsendto
2046 mail_message = utils.TemplateSubst(self.Subst, announcetemplate)
2047 utils.send_mail(mail_message)
2049 del self.Subst["__ANNOUNCE_LIST_ADDRESS__"]
2051 if cnf.FindB("Dinstall::CloseBugs"):
2052 summary = self.close_bugs(summary, action)
2054 del self.Subst["__SHORT_SUMMARY__"]
2058 ###########################################################################
2060 def accept (self, summary, short_summary, session=None):
2064 This moves all files referenced from the .changes into the pool,
2065 sends the accepted mail, announces to lists, closes bugs and
2066 also checks for override disparities. If enabled it will write out
2067 the version history for the BTS Version Tracking and will finally call
2070 @type summary: string
2071 @param summary: Summary text
2073 @type short_summary: string
2074 @param short_summary: Short summary
2078 stats = SummaryStats()
2081 self.logger.log(["installing changes", self.pkg.changes_file])
2086 # Add the .dsc file to the DB first
2087 for newfile, entry in self.pkg.files.items():
2088 if entry["type"] == "dsc":
2089 source, dsc_component, dsc_location_id, pfs = add_dsc_to_db(self, newfile, session)
2093 # Add .deb / .udeb files to the DB (type is always deb, dbtype is udeb/deb)
2094 for newfile, entry in self.pkg.files.items():
2095 if entry["type"] == "deb":
2096 b, pf = add_deb_to_db(self, newfile, session)
2098 poolfiles.append(pf)
2100 # If this is a sourceful diff only upload that is moving
2101 # cross-component we need to copy the .orig files into the new
2102 # component too for the same reasons as above.
2103 # XXX: mhy: I think this should be in add_dsc_to_db
2104 if self.pkg.changes["architecture"].has_key("source"):
2105 for orig_file in self.pkg.orig_files.keys():
2106 if not self.pkg.orig_files[orig_file].has_key("id"):
2107 continue # Skip if it's not in the pool
2108 orig_file_id = self.pkg.orig_files[orig_file]["id"]
2109 if self.pkg.orig_files[orig_file]["location"] == dsc_location_id:
2110 continue # Skip if the location didn't change
2113 oldf = get_poolfile_by_id(orig_file_id, session)
2114 old_filename = os.path.join(oldf.location.path, oldf.filename)
2115 old_dat = {'size': oldf.filesize, 'md5sum': oldf.md5sum,
2116 'sha1sum': oldf.sha1sum, 'sha256sum': oldf.sha256sum}
2118 new_filename = os.path.join(utils.poolify(self.pkg.changes["source"], dsc_component), os.path.basename(old_filename))
2120 # TODO: Care about size/md5sum collisions etc
2121 (found, newf) = check_poolfile(new_filename, old_dat['size'], old_dat['md5sum'], dsc_location_id, session)
2123 # TODO: Uhm, what happens if newf isn't None - something has gone badly and we should cope
2125 utils.copy(old_filename, os.path.join(cnf["Dir::Pool"], new_filename))
2126 newf = add_poolfile(new_filename, old_dat, dsc_location_id, session)
2130 # Don't reference the old file from this changes
2132 if p.file_id == oldf.file_id:
2135 poolfiles.append(newf)
2137 # Fix up the DSC references
2140 for df in source.srcfiles:
2141 if df.poolfile.file_id == oldf.file_id:
2142 # Add a new DSC entry and mark the old one for deletion
2143 # Don't do it in the loop so we don't change the thing we're iterating over
2145 newdscf.source_id = source.source_id
2146 newdscf.poolfile_id = newf.file_id
2147 session.add(newdscf)
2157 # Make sure that our source object is up-to-date
2158 session.expire(source)
2160 # Add changelog information to the database
2161 self.store_changelog()
2163 # Install the files into the pool
2164 for newfile, entry in self.pkg.files.items():
2165 destination = os.path.join(cnf["Dir::Pool"], entry["pool name"], newfile)
2166 utils.move(newfile, destination)
2167 self.logger.log(["installed", newfile, entry["type"], entry["size"], entry["architecture"]])
2168 stats.accept_bytes += float(entry["size"])
2170 # Copy the .changes file across for suite which need it.
2171 copy_changes = dict([(x.copychanges, '')
2172 for x in session.query(Suite).filter(Suite.suite_name.in_(self.pkg.changes["distribution"].keys())).all()
2173 if x.copychanges is not None])
2175 for dest in copy_changes.keys():
2176 utils.copy(self.pkg.changes_file, os.path.join(cnf["Dir::Root"], dest))
2178 # We're done - commit the database changes
2180 # Our SQL session will automatically start a new transaction after
2183 # Now ensure that the metadata has been added
2184 # This has to be done after we copy the files into the pool
2185 # For source if we have it:
2186 if self.pkg.changes["architecture"].has_key("source"):
2187 import_metadata_into_db(source, session)
2189 # Now for any of our binaries
2191 import_metadata_into_db(b, session)
2195 # Move the .changes into the 'done' directory
2196 ye, mo, da = time.gmtime()[0:3]
2197 donedir = os.path.join(cnf["Dir::Queue::Done"], str(ye), "%0.2d" % mo, "%0.2d" % da)
2198 if not os.path.isdir(donedir):
2199 os.makedirs(donedir)
2201 utils.move(self.pkg.changes_file,
2202 os.path.join(donedir, os.path.basename(self.pkg.changes_file)))
2204 if self.pkg.changes["architecture"].has_key("source") and cnf.get("Dir::UrgencyLog"):
2205 UrgencyLog().log(self.pkg.dsc["source"], self.pkg.dsc["version"], self.pkg.changes["urgency"])
2208 self.Subst["__SUMMARY__"] = summary
2209 mail_message = utils.TemplateSubst(self.Subst,
2210 os.path.join(cnf["Dir::Templates"], 'process-unchecked.accepted'))
2211 utils.send_mail(mail_message)
2212 self.announce(short_summary, 1)
2214 ## Helper stuff for DebBugs Version Tracking
2215 if cnf.Find("Dir::Queue::BTSVersionTrack"):
2216 if self.pkg.changes["architecture"].has_key("source"):
2217 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2218 version_history = os.fdopen(fd, 'w')
2219 version_history.write(self.pkg.dsc["bts changelog"])
2220 version_history.close()
2221 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2222 self.pkg.changes_file[:-8]+".versions")
2223 os.rename(temp_filename, filename)
2224 os.chmod(filename, 0644)
2226 # Write out the binary -> source mapping.
2227 (fd, temp_filename) = utils.temp_filename(cnf["Dir::Queue::BTSVersionTrack"], prefix=".")
2228 debinfo = os.fdopen(fd, 'w')
2229 for name, entry in sorted(self.pkg.files.items()):
2230 if entry["type"] == "deb":
2231 line = " ".join([entry["package"], entry["version"],
2232 entry["architecture"], entry["source package"],
2233 entry["source version"]])
2234 debinfo.write(line+"\n")
2236 filename = "%s/%s" % (cnf["Dir::Queue::BTSVersionTrack"],
2237 self.pkg.changes_file[:-8]+".debinfo")
2238 os.rename(temp_filename, filename)
2239 os.chmod(filename, 0644)
2243 # Set up our copy queues (e.g. buildd queues)
2244 for suite_name in self.pkg.changes["distribution"].keys():
2245 suite = get_suite(suite_name, session)
2246 for q in suite.copy_queues:
2248 q.add_file_from_pool(f)
2253 stats.accept_count += 1
2255 def check_override(self):
2257 Checks override entries for validity. Mails "Override disparity" warnings,
2258 if that feature is enabled.
2260 Abandons the check if
2261 - override disparity checks are disabled
2262 - mail sending is disabled
2267 # Abandon the check if override disparity checks have been disabled
2268 if not cnf.FindB("Dinstall::OverrideDisparityCheck"):
2271 summary = self.pkg.check_override()
2276 overridetemplate = os.path.join(cnf["Dir::Templates"], 'process-unchecked.override-disparity')
2279 self.Subst["__SUMMARY__"] = summary
2280 mail_message = utils.TemplateSubst(self.Subst, overridetemplate)
2281 utils.send_mail(mail_message)
2282 del self.Subst["__SUMMARY__"]
2284 ###########################################################################
2286 def remove(self, from_dir=None):
2288 Used (for instance) in p-u to remove the package from unchecked
2290 Also removes the package from holding area.
2292 if from_dir is None:
2293 from_dir = self.pkg.directory
2296 for f in self.pkg.files.keys():
2297 os.unlink(os.path.join(from_dir, f))
2298 if os.path.exists(os.path.join(h.holding_dir, f)):
2299 os.unlink(os.path.join(h.holding_dir, f))
2301 os.unlink(os.path.join(from_dir, self.pkg.changes_file))
2302 if os.path.exists(os.path.join(h.holding_dir, self.pkg.changes_file)):
2303 os.unlink(os.path.join(h.holding_dir, self.pkg.changes_file))
2305 ###########################################################################
2307 def move_to_queue (self, queue):
2309 Move files to a destination queue using the permissions in the table
2312 utils.move(os.path.join(h.holding_dir, self.pkg.changes_file),
2313 queue.path, perms=int(queue.change_perms, 8))
2314 for f in self.pkg.files.keys():
2315 utils.move(os.path.join(h.holding_dir, f), queue.path, perms=int(queue.perms, 8))
2317 ###########################################################################
2319 def force_reject(self, reject_files):
2321 Forcefully move files from the current directory to the
2322 reject directory. If any file already exists in the reject
2323 directory it will be moved to the morgue to make way for
2326 @type reject_files: dict
2327 @param reject_files: file dictionary
2333 for file_entry in reject_files:
2334 # Skip any files which don't exist or which we don't have permission to copy.
2335 if os.access(file_entry, os.R_OK) == 0:
2338 dest_file = os.path.join(cnf["Dir::Queue::Reject"], file_entry)
2341 dest_fd = os.open(dest_file, os.O_RDWR | os.O_CREAT | os.O_EXCL, 0644)
2343 # File exists? Let's find a new name by adding a number
2344 if e.errno == errno.EEXIST:
2346 dest_file = utils.find_next_free(dest_file, 255)
2347 except NoFreeFilenameError:
2348 # Something's either gone badly Pete Tong, or
2349 # someone is trying to exploit us.
2350 utils.warn("**WARNING** failed to find a free filename for %s in %s." % (file_entry, cnf["Dir::Queue::Reject"]))
2353 # Make sure we really got it
2355 dest_fd = os.open(dest_file, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2358 utils.warn("**WARNING** failed to claim %s in the reject directory." % (file_entry))
2362 # If we got here, we own the destination file, so we can
2363 # safely overwrite it.
2364 utils.move(file_entry, dest_file, 1, perms=0660)
2367 ###########################################################################
2368 def do_reject (self, manual=0, reject_message="", notes=""):
2370 Reject an upload. If called without a reject message or C{manual} is
2371 true, spawn an editor so the user can write one.
2374 @param manual: manual or automated rejection
2376 @type reject_message: string
2377 @param reject_message: A reject message
2382 # If we weren't given a manual rejection message, spawn an
2383 # editor so the user can add one in...
2384 if manual and not reject_message:
2385 (fd, temp_filename) = utils.temp_filename()
2386 temp_file = os.fdopen(fd, 'w')
2389 temp_file.write("\nAuthor: %s\nVersion: %s\nTimestamp: %s\n\n%s" \
2390 % (note.author, note.version, note.notedate, note.comment))
2392 editor = os.environ.get("EDITOR","vi")
2394 while answer == 'E':
2395 os.system("%s %s" % (editor, temp_filename))
2396 temp_fh = utils.open_file(temp_filename)
2397 reject_message = "".join(temp_fh.readlines())
2399 print "Reject message:"
2400 print utils.prefix_multi_line_string(reject_message," ",include_blank_lines=1)
2401 prompt = "[R]eject, Edit, Abandon, Quit ?"
2403 while prompt.find(answer) == -1:
2404 answer = utils.our_raw_input(prompt)
2405 m = re_default_answer.search(prompt)
2408 answer = answer[:1].upper()
2409 os.unlink(temp_filename)
2415 print "Rejecting.\n"
2419 reason_filename = self.pkg.changes_file[:-8] + ".reason"
2420 reason_filename = os.path.join(cnf["Dir::Queue::Reject"], reason_filename)
2422 # Move all the files into the reject directory
2423 reject_files = self.pkg.files.keys() + [self.pkg.changes_file]
2424 self.force_reject(reject_files)
2426 # If we fail here someone is probably trying to exploit the race
2427 # so let's just raise an exception ...
2428 if os.path.exists(reason_filename):
2429 os.unlink(reason_filename)
2430 reason_fd = os.open(reason_filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0644)
2432 rej_template = os.path.join(cnf["Dir::Templates"], "queue.rejected")
2436 self.Subst["__REJECTOR_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
2437 self.Subst["__MANUAL_REJECT_MESSAGE__"] = ""
2438 self.Subst["__CC__"] = "X-DAK-Rejection: automatic (moo)"
2439 os.write(reason_fd, reject_message)
2440 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2442 # Build up the rejection email
2443 user_email_address = utils.whoami() + " <%s>" % (cnf["Dinstall::MyAdminAddress"])
2444 self.Subst["__REJECTOR_ADDRESS__"] = user_email_address
2445 self.Subst["__MANUAL_REJECT_MESSAGE__"] = reject_message
2446 self.Subst["__REJECT_MESSAGE__"] = ""
2447 self.Subst["__CC__"] = "Cc: " + cnf["Dinstall::MyEmailAddress"]
2448 reject_mail_message = utils.TemplateSubst(self.Subst, rej_template)
2449 # Write the rejection email out as the <foo>.reason file
2450 os.write(reason_fd, reject_mail_message)
2452 del self.Subst["__REJECTOR_ADDRESS__"]
2453 del self.Subst["__MANUAL_REJECT_MESSAGE__"]
2454 del self.Subst["__CC__"]
2458 # Send the rejection mail
2459 utils.send_mail(reject_mail_message)
2462 self.logger.log(["rejected", self.pkg.changes_file])
2466 ################################################################################
2467 def in_override_p(self, package, component, suite, binary_type, filename, session):
2469 Check if a package already has override entries in the DB
2471 @type package: string
2472 @param package: package name
2474 @type component: string
2475 @param component: database id of the component
2478 @param suite: database id of the suite
2480 @type binary_type: string
2481 @param binary_type: type of the package
2483 @type filename: string
2484 @param filename: filename we check
2486 @return: the database result. But noone cares anyway.
2492 if binary_type == "": # must be source
2495 file_type = binary_type
2497 # Override suite name; used for example with proposed-updates
2498 oldsuite = get_suite(suite, session)
2499 if (not oldsuite is None) and oldsuite.overridesuite:
2500 suite = oldsuite.overridesuite
2502 result = get_override(package, suite, component, file_type, session)
2504 # If checking for a source package fall back on the binary override type
2505 if file_type == "dsc" and len(result) < 1:
2506 result = get_override(package, suite, component, ['deb', 'udeb'], session)
2508 # Remember the section and priority so we can check them later if appropriate
2511 self.pkg.files[filename]["override section"] = result.section.section
2512 self.pkg.files[filename]["override priority"] = result.priority.priority
2517 ################################################################################
2518 def get_anyversion(self, sv_list, suite):
2521 @param sv_list: list of (suite, version) tuples to check
2524 @param suite: suite name
2530 anysuite = [suite] + [ vc.reference.suite_name for vc in get_version_checks(suite, "Enhances") ]
2531 for (s, v) in sv_list:
2532 if s in [ x.lower() for x in anysuite ]:
2533 if not anyversion or apt_pkg.VersionCompare(anyversion, v) <= 0:
2538 ################################################################################
2540 def cross_suite_version_check(self, sv_list, filename, new_version, sourceful=False):
2543 @param sv_list: list of (suite, version) tuples to check
2545 @type filename: string
2546 @param filename: XXX
2548 @type new_version: string
2549 @param new_version: XXX
2551 Ensure versions are newer than existing packages in target
2552 suites and that cross-suite version checking rules as
2553 set out in the conf file are satisfied.
2558 # Check versions for each target suite
2559 for target_suite in self.pkg.changes["distribution"].keys():
2560 must_be_newer_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan") ]
2561 must_be_older_than = [ vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan") ]
2563 # Enforce "must be newer than target suite" even if conffile omits it
2564 if target_suite not in must_be_newer_than:
2565 must_be_newer_than.append(target_suite)
2567 for (suite, existent_version) in sv_list:
2568 vercmp = apt_pkg.VersionCompare(new_version, existent_version)
2570 if suite in must_be_newer_than and sourceful and vercmp < 1:
2571 self.rejects.append("%s: old version (%s) in %s >= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2573 if suite in must_be_older_than and vercmp > -1:
2576 if self.pkg.changes.get('distribution-version', {}).has_key(suite):
2577 # we really use the other suite, ignoring the conflicting one ...
2578 addsuite = self.pkg.changes["distribution-version"][suite]
2580 add_version = self.get_anyversion(sv_list, addsuite)
2581 target_version = self.get_anyversion(sv_list, target_suite)
2584 # not add_version can only happen if we map to a suite
2585 # that doesn't enhance the suite we're propup'ing from.
2586 # so "propup-ver x a b c; map a d" is a problem only if
2587 # d doesn't enhance a.
2589 # i think we could always propagate in this case, rather
2590 # than complaining. either way, this isn't a REJECT issue
2592 # And - we really should complain to the dorks who configured dak
2593 self.warnings.append("%s is mapped to, but not enhanced by %s - adding anyways" % (suite, addsuite))
2594 self.pkg.changes.setdefault("propdistribution", {})
2595 self.pkg.changes["propdistribution"][addsuite] = 1
2597 elif not target_version:
2598 # not targets_version is true when the package is NEW
2599 # we could just stick with the "...old version..." REJECT
2600 # for this, I think.
2601 self.rejects.append("Won't propogate NEW packages.")
2602 elif apt_pkg.VersionCompare(new_version, add_version) < 0:
2603 # propogation would be redundant. no need to reject though.
2604 self.warnings.append("ignoring versionconflict: %s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2606 elif apt_pkg.VersionCompare(new_version, add_version) > 0 and \
2607 apt_pkg.VersionCompare(add_version, target_version) >= 0:
2609 self.warnings.append("Propogating upload to %s" % (addsuite))
2610 self.pkg.changes.setdefault("propdistribution", {})
2611 self.pkg.changes["propdistribution"][addsuite] = 1
2615 self.rejects.append("%s: old version (%s) in %s <= new version (%s) targeted at %s." % (filename, existent_version, suite, new_version, target_suite))
2617 ################################################################################
2618 def check_binary_against_db(self, filename, session):
2619 # Ensure version is sane
2620 self.cross_suite_version_check( \
2621 get_suite_version_by_package(self.pkg.files[filename]["package"], \
2622 self.pkg.files[filename]["architecture"], session),
2623 filename, self.pkg.files[filename]["version"], sourceful=False)
2625 # Check for any existing copies of the file
2626 q = session.query(DBBinary).filter_by(package=self.pkg.files[filename]["package"])
2627 q = q.filter_by(version=self.pkg.files[filename]["version"])
2628 q = q.join(Architecture).filter_by(arch_string=self.pkg.files[filename]["architecture"])
2631 self.rejects.append("%s: can not overwrite existing copy already in the archive." % filename)
2633 ################################################################################
2635 def check_source_against_db(self, filename, session):
2636 source = self.pkg.dsc.get("source")
2637 version = self.pkg.dsc.get("version")
2639 # Ensure version is sane
2640 self.cross_suite_version_check( \
2641 get_suite_version_by_source(source, session), filename, version,
2644 ################################################################################
2645 def check_dsc_against_db(self, filename, session):
2648 @warning: NB: this function can remove entries from the 'files' index [if
2649 the orig tarball is a duplicate of the one in the archive]; if
2650 you're iterating over 'files' and call this function as part of
2651 the loop, be sure to add a check to the top of the loop to
2652 ensure you haven't just tried to dereference the deleted entry.
2657 self.pkg.orig_files = {} # XXX: do we need to clear it?
2658 orig_files = self.pkg.orig_files
2660 # Try and find all files mentioned in the .dsc. This has
2661 # to work harder to cope with the multiple possible
2662 # locations of an .orig.tar.gz.
2663 # The ordering on the select is needed to pick the newest orig
2664 # when it exists in multiple places.
2665 for dsc_name, dsc_entry in self.pkg.dsc_files.items():
2667 if self.pkg.files.has_key(dsc_name):
2668 actual_md5 = self.pkg.files[dsc_name]["md5sum"]
2669 actual_size = int(self.pkg.files[dsc_name]["size"])
2670 found = "%s in incoming" % (dsc_name)
2672 # Check the file does not already exist in the archive
2673 ql = get_poolfile_like_name(dsc_name, session)
2675 # Strip out anything that isn't '%s' or '/%s$'
2677 if not i.filename.endswith(dsc_name):
2680 # "[dak] has not broken them. [dak] has fixed a
2681 # brokenness. Your crappy hack exploited a bug in
2684 # "(Come on! I thought it was always obvious that
2685 # one just doesn't release different files with
2686 # the same name and version.)"
2687 # -- ajk@ on d-devel@l.d.o
2690 # Ignore exact matches for .orig.tar.gz
2692 if re_is_orig_source.match(dsc_name):
2694 if self.pkg.files.has_key(dsc_name) and \
2695 int(self.pkg.files[dsc_name]["size"]) == int(i.filesize) and \
2696 self.pkg.files[dsc_name]["md5sum"] == i.md5sum:
2697 self.warnings.append("ignoring %s, since it's already in the archive." % (dsc_name))
2698 # TODO: Don't delete the entry, just mark it as not needed
2699 # This would fix the stupidity of changing something we often iterate over
2700 # whilst we're doing it
2701 del self.pkg.files[dsc_name]
2702 dsc_entry["files id"] = i.file_id
2703 if not orig_files.has_key(dsc_name):
2704 orig_files[dsc_name] = {}
2705 orig_files[dsc_name]["path"] = os.path.join(i.location.path, i.filename)
2708 # Don't bitch that we couldn't find this file later
2710 self.later_check_files.remove(dsc_name)
2716 self.rejects.append("can not overwrite existing copy of '%s' already in the archive." % (dsc_name))
2718 elif re_is_orig_source.match(dsc_name):
2720 ql = get_poolfile_like_name(dsc_name, session)
2722 # Strip out anything that isn't '%s' or '/%s$'
2723 # TODO: Shouldn't we just search for things which end with our string explicitly in the SQL?
2725 if not i.filename.endswith(dsc_name):
2729 # Unfortunately, we may get more than one match here if,
2730 # for example, the package was in potato but had an -sa
2731 # upload in woody. So we need to choose the right one.
2733 # default to something sane in case we don't match any or have only one
2738 old_file = os.path.join(i.location.path, i.filename)
2739 old_file_fh = utils.open_file(old_file)
2740 actual_md5 = apt_pkg.md5sum(old_file_fh)
2742 actual_size = os.stat(old_file)[stat.ST_SIZE]
2743 if actual_md5 == dsc_entry["md5sum"] and actual_size == int(dsc_entry["size"]):
2746 old_file = os.path.join(i.location.path, i.filename)
2747 old_file_fh = utils.open_file(old_file)
2748 actual_md5 = apt_pkg.md5sum(old_file_fh)
2750 actual_size = os.stat(old_file)[stat.ST_SIZE]
2752 suite_type = x.location.archive_type
2753 # need this for updating dsc_files in install()
2754 dsc_entry["files id"] = x.file_id
2755 # See install() in process-accepted...
2756 if not orig_files.has_key(dsc_name):
2757 orig_files[dsc_name] = {}
2758 orig_files[dsc_name]["id"] = x.file_id
2759 orig_files[dsc_name]["path"] = old_file
2760 orig_files[dsc_name]["location"] = x.location.location_id
2762 # TODO: Record the queues and info in the DB so we don't hardcode all this crap
2763 # Not there? Check the queue directories...
2764 for directory in [ "New", "Byhand", "ProposedUpdates", "OldProposedUpdates", "Embargoed", "Unembargoed" ]:
2765 if not Cnf.has_key("Dir::Queue::%s" % (directory)):
2767 in_otherdir = os.path.join(Cnf["Dir::Queue::%s" % (directory)], dsc_name)
2768 if os.path.exists(in_otherdir):
2769 in_otherdir_fh = utils.open_file(in_otherdir)
2770 actual_md5 = apt_pkg.md5sum(in_otherdir_fh)
2771 in_otherdir_fh.close()
2772 actual_size = os.stat(in_otherdir)[stat.ST_SIZE]
2774 if not orig_files.has_key(dsc_name):
2775 orig_files[dsc_name] = {}
2776 orig_files[dsc_name]["path"] = in_otherdir
2779 self.rejects.append("%s refers to %s, but I can't find it in the queue or in the pool." % (filename, dsc_name))
2782 self.rejects.append("%s refers to %s, but I can't find it in the queue." % (filename, dsc_name))
2784 if actual_md5 != dsc_entry["md5sum"]:
2785 self.rejects.append("md5sum for %s doesn't match %s." % (found, filename))
2786 if actual_size != int(dsc_entry["size"]):
2787 self.rejects.append("size for %s doesn't match %s." % (found, filename))
2789 ################################################################################
2790 # This is used by process-new and process-holding to recheck a changes file
2791 # at the time we're running. It mainly wraps various other internal functions
2792 # and is similar to accepted_checks - these should probably be tidied up
2794 def recheck(self, session):
2796 for f in self.pkg.files.keys():
2797 # The .orig.tar.gz can disappear out from under us is it's a
2798 # duplicate of one in the archive.
2799 if not self.pkg.files.has_key(f):
2802 entry = self.pkg.files[f]
2804 # Check that the source still exists
2805 if entry["type"] == "deb":
2806 source_version = entry["source version"]
2807 source_package = entry["source package"]
2808 if not self.pkg.changes["architecture"].has_key("source") \
2809 and not source_exists(source_package, source_version, \
2810 suites = self.pkg.changes["distribution"].keys(), session = session):
2811 source_epochless_version = re_no_epoch.sub('', source_version)
2812 dsc_filename = "%s_%s.dsc" % (source_package, source_epochless_version)
2814 for q in ["Embargoed", "Unembargoed", "Newstage"]:
2815 if cnf.has_key("Dir::Queue::%s" % (q)):
2816 if os.path.exists(cnf["Dir::Queue::%s" % (q)] + '/' + dsc_filename):
2819 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, f))
2821 # Version and file overwrite checks
2822 if entry["type"] == "deb":
2823 self.check_binary_against_db(f, session)
2824 elif entry["type"] == "dsc":
2825 self.check_source_against_db(f, session)
2826 self.check_dsc_against_db(f, session)
2828 ################################################################################
2829 def accepted_checks(self, overwrite_checks, session):
2830 # Recheck anything that relies on the database; since that's not
2831 # frozen between accept and our run time when called from p-a.
2833 # overwrite_checks is set to False when installing to stable/oldstable
2838 # Find the .dsc (again)
2840 for f in self.pkg.files.keys():
2841 if self.pkg.files[f]["type"] == "dsc":
2844 for checkfile in self.pkg.files.keys():
2845 # The .orig.tar.gz can disappear out from under us is it's a
2846 # duplicate of one in the archive.
2847 if not self.pkg.files.has_key(checkfile):
2850 entry = self.pkg.files[checkfile]
2852 # Check that the source still exists
2853 if entry["type"] == "deb":
2854 source_version = entry["source version"]
2855 source_package = entry["source package"]
2856 if not self.pkg.changes["architecture"].has_key("source") \
2857 and not source_exists(source_package, source_version, \
2858 suites = self.pkg.changes["distribution"].keys(), \
2860 self.rejects.append("no source found for %s %s (%s)." % (source_package, source_version, checkfile))
2862 # Version and file overwrite checks
2863 if overwrite_checks:
2864 if entry["type"] == "deb":
2865 self.check_binary_against_db(checkfile, session)
2866 elif entry["type"] == "dsc":
2867 self.check_source_against_db(checkfile, session)
2868 self.check_dsc_against_db(dsc_filename, session)
2870 # propogate in the case it is in the override tables:
2871 for suite in self.pkg.changes.get("propdistribution", {}).keys():
2872 if self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2873 propogate[suite] = 1
2875 nopropogate[suite] = 1
2877 for suite in propogate.keys():
2878 if suite in nopropogate:
2880 self.pkg.changes["distribution"][suite] = 1
2882 for checkfile in self.pkg.files.keys():
2883 # Check the package is still in the override tables
2884 for suite in self.pkg.changes["distribution"].keys():
2885 if not self.in_override_p(entry["package"], entry["component"], suite, entry.get("dbtype",""), checkfile, session):
2886 self.rejects.append("%s is NEW for %s." % (checkfile, suite))
2888 ################################################################################
2889 # If any file of an upload has a recent mtime then chances are good
2890 # the file is still being uploaded.
2892 def upload_too_new(self):
2895 # Move back to the original directory to get accurate time stamps
2897 os.chdir(self.pkg.directory)
2898 file_list = self.pkg.files.keys()
2899 file_list.extend(self.pkg.dsc_files.keys())
2900 file_list.append(self.pkg.changes_file)
2903 last_modified = time.time()-os.path.getmtime(f)
2904 if last_modified < int(cnf["Dinstall::SkipTime"]):
2913 def store_changelog(self):
2915 # Skip binary-only upload if it is not a bin-NMU
2916 if not self.pkg.changes['architecture'].has_key('source'):
2917 from daklib.regexes import re_bin_only_nmu
2918 if not re_bin_only_nmu.search(self.pkg.changes['version']):
2921 session = DBConn().session()
2923 # Check if upload already has a changelog entry
2924 query = """SELECT changelog_id FROM changes WHERE source = :source
2925 AND version = :version AND architecture = :architecture AND changelog_id != 0"""
2926 if session.execute(query, {'source': self.pkg.changes['source'], \
2927 'version': self.pkg.changes['version'], \
2928 'architecture': " ".join(self.pkg.changes['architecture'].keys())}).rowcount:
2932 # Add current changelog text into changelogs_text table, return created ID
2933 query = "INSERT INTO changelogs_text (changelog) VALUES (:changelog) RETURNING id"
2934 ID = session.execute(query, {'changelog': self.pkg.changes['changes']}).fetchone()[0]
2936 # Link ID to the upload available in changes table
2937 query = """UPDATE changes SET changelog_id = :id WHERE source = :source
2938 AND version = :version AND architecture = :architecture"""
2939 session.execute(query, {'id': ID, 'source': self.pkg.changes['source'], \
2940 'version': self.pkg.changes['version'], \
2941 'architecture': " ".join(self.pkg.changes['architecture'].keys())})